You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2020/06/16 20:56:23 UTC

[airflow] branch master updated: Introduce 'transfers' packages (#9320)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new f6bd817  Introduce 'transfers' packages (#9320)
f6bd817 is described below

commit f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Tue Jun 16 22:55:42 2020 +0200

    Introduce 'transfers' packages (#9320)
    
    * Consistent naming of transfer operators
    
    Transfer operators have consistent names and are grouped in
    the 'transfer' packages.
    
    * fixup! Consistent naming of transfer operators
    
    * Introduces 'transfers' packages.
    
    Closes #9161 and #8620
    
    * fixup! Introduces 'transfers' packages.
    
    * fixup! fixup! Introduces 'transfers' packages.
    
    * fixup! fixup! fixup! Introduces 'transfers' packages.
---
 .pre-commit-config.yaml                            |   2 +
 CONTRIBUTING.rst                                   |  68 +++-
 UPDATING.md                                        |   4 +-
 airflow/contrib/operators/adls_to_gcs.py           |  10 +-
 airflow/contrib/operators/bigquery_to_bigquery.py  |   6 +-
 airflow/contrib/operators/bigquery_to_gcs.py       |  10 +-
 .../operators/bigquery_to_mysql_operator.py        |   6 +-
 airflow/contrib/operators/cassandra_to_gcs.py      |  10 +-
 airflow/contrib/operators/dynamodb_to_s3.py        |   6 +-
 airflow/contrib/operators/file_to_gcs.py           |  10 +-
 airflow/contrib/operators/file_to_wasb.py          |   6 +-
 airflow/contrib/operators/gcp_transfer_operator.py |   4 +-
 airflow/contrib/operators/gcs_download_operator.py |   8 +-
 airflow/contrib/operators/gcs_to_bq.py             |  10 +-
 airflow/contrib/operators/gcs_to_gcs.py            |  10 +-
 .../contrib/operators/gcs_to_gdrive_operator.py    |   6 +-
 airflow/contrib/operators/gcs_to_s3.py             |  10 +-
 airflow/contrib/operators/hive_to_dynamodb.py      |   6 +-
 .../operators/imap_attachment_to_s3_operator.py    |   6 +-
 airflow/contrib/operators/mongo_to_s3.py           |   6 +-
 airflow/contrib/operators/mssql_to_gcs.py          |  10 +-
 airflow/contrib/operators/mysql_to_gcs.py          |  10 +-
 .../oracle_to_azure_data_lake_transfer.py          |   8 +-
 .../contrib/operators/oracle_to_oracle_transfer.py |  12 +-
 .../contrib/operators/postgres_to_gcs_operator.py  |  10 +-
 airflow/contrib/operators/s3_to_gcs_operator.py    |   6 +-
 airflow/contrib/operators/s3_to_sftp_operator.py   |   6 +-
 airflow/contrib/operators/sftp_to_s3_operator.py   |   6 +-
 airflow/contrib/operators/sql_to_gcs.py            |  10 +-
 airflow/contrib/operators/vertica_to_hive.py       |  12 +-
 airflow/contrib/operators/vertica_to_mysql.py      |  12 +-
 airflow/operators/gcs_to_s3.py                     |   6 +-
 airflow/operators/google_api_to_s3_transfer.py     |  14 +-
 airflow/operators/hive_to_druid.py                 |  12 +-
 airflow/operators/hive_to_mysql.py                 |  12 +-
 airflow/operators/hive_to_samba_operator.py        |   6 +-
 airflow/operators/mssql_to_hive.py                 |  12 +-
 airflow/operators/mysql_to_hive.py                 |  12 +-
 airflow/operators/presto_to_mysql.py               |  12 +-
 airflow/operators/redshift_to_s3_operator.py       |  12 +-
 airflow/operators/s3_to_hive_operator.py           |  12 +-
 airflow/operators/s3_to_redshift_operator.py       |  10 +-
 .../amazon/PROVIDERS_CHANGES_2020.05.20.md         |   8 +
 airflow/providers/amazon/README.md                 |  57 ++--
 .../example_google_api_to_s3_transfer_advanced.py  |   6 +-
 .../example_google_api_to_s3_transfer_basic.py     |   4 +-
 .../example_dags/example_imap_attachment_to_s3.py  |   2 +-
 .../aws/example_dags/example_s3_to_redshift.py     |   6 +-
 airflow/providers/amazon/aws/hooks/batch_client.py |   3 +-
 .../providers/amazon/aws/transfers}/__init__.py    |   1 -
 .../aws/{operators => transfers}/dynamodb_to_s3.py |   0
 .../aws/{operators => transfers}/gcs_to_s3.py      |   0
 .../google_api_to_s3.py}                           |   2 +-
 .../{operators => transfers}/hive_to_dynamodb.py   |   2 +-
 .../imap_attachment_to_s3.py                       |   0
 .../aws/{operators => transfers}/mongo_to_s3.py    |   0
 .../aws/{operators => transfers}/redshift_to_s3.py |   2 +-
 .../aws/{operators => transfers}/s3_to_redshift.py |   4 +-
 .../aws/{operators => transfers}/s3_to_sftp.py     |   0
 .../aws/{operators => transfers}/sftp_to_s3.py     |   0
 .../cassandra/PROVIDERS_CHANGES_2020.05.20.md      |   1 +
 airflow/providers/apache/cassandra/README.md       |   3 +
 .../apache/druid/PROVIDERS_CHANGES_2020.05.20.md   |   1 +
 airflow/providers/apache/druid/README.md           |  24 +-
 .../providers/apache/druid/transfers}/__init__.py  |   1 -
 .../{operators => transfers}/hive_to_druid.py      |   2 +-
 .../apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md    |   1 +
 airflow/providers/apache/hdfs/README.md            |   3 +
 .../apache/hive/PROVIDERS_CHANGES_2020.05.20.md    |   3 +
 airflow/providers/apache/hive/README.md            |  38 ++-
 .../providers/apache/hive/transfers}/__init__.py   |   1 -
 .../hive/{operators => transfers}/hive_to_mysql.py |   2 +-
 .../hive/{operators => transfers}/hive_to_samba.py |   2 +-
 .../hive/{operators => transfers}/mssql_to_hive.py |   2 +-
 .../hive/{operators => transfers}/mysql_to_hive.py |   2 +-
 .../hive/{operators => transfers}/s3_to_hive.py    |   2 +-
 .../{operators => transfers}/vertica_to_hive.py    |   2 +-
 .../apache/livy/PROVIDERS_CHANGES_2020.05.20.md    |   1 +
 airflow/providers/apache/livy/README.md            |   3 +
 .../apache/pig/PROVIDERS_CHANGES_2020.05.20.md     |   1 +
 airflow/providers/apache/pig/README.md             |   3 +
 .../apache/pinot/PROVIDERS_CHANGES_2020.05.20.md   |   1 +
 airflow/providers/apache/pinot/README.md           |   3 +
 .../apache/spark/PROVIDERS_CHANGES_2020.05.20.md   |   1 +
 airflow/providers/apache/spark/README.md           |   3 +
 .../apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md   |   1 +
 airflow/providers/apache/sqoop/README.md           |   3 +
 .../celery/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/celery/README.md                 |   3 +
 .../cloudant/PROVIDERS_CHANGES_2020.05.20.md       |   1 +
 airflow/providers/cloudant/README.md               |   3 +
 .../databricks/PROVIDERS_CHANGES_2020.05.20.md     |   1 +
 airflow/providers/databricks/README.md             |   3 +
 .../datadog/PROVIDERS_CHANGES_2020.05.20.md        |   1 +
 airflow/providers/datadog/README.md                |   3 +
 .../dingding/PROVIDERS_CHANGES_2020.05.20.md       |   1 +
 airflow/providers/dingding/README.md               |   3 +
 .../discord/PROVIDERS_CHANGES_2020.05.20.md        |   1 +
 airflow/providers/discord/README.md                |   3 +
 .../docker/PROVIDERS_CHANGES_2020.05.20.md         |   3 +
 airflow/providers/docker/README.md                 |   5 +
 .../elasticsearch/PROVIDERS_CHANGES_2020.05.20.md  |   1 +
 airflow/providers/elasticsearch/README.md          |   3 +
 .../email/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/email/README.md                  |   3 +
 .../exasol/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/exasol/README.md                 |   3 +
 .../facebook/PROVIDERS_CHANGES_2020.05.20.md       |   1 +
 airflow/providers/facebook/README.md               |   3 +
 .../providers/ftp/PROVIDERS_CHANGES_2020.05.20.md  |   1 +
 airflow/providers/ftp/README.md                    |   3 +
 .../google/PROVIDERS_CHANGES_2020.05.20.md         |  20 ++
 airflow/providers/google/README.md                 | 110 +++++--
 .../google/ads/example_dags/example_ads.py         |   3 +-
 airflow/providers/google/ads/operators/ads.py      |  96 +-----
 .../providers/google/ads/transfers}/__init__.py    |   1 -
 .../{operators/ads.py => transfers/ads_to_gcs.py}  |  85 +----
 .../example_dags/example_bigquery_to_bigquery.py   |   2 +-
 .../cloud/example_dags/example_bigquery_to_gcs.py  |   2 +-
 .../example_dags/example_bigquery_transfer.py      |   4 +-
 .../google/cloud/example_dags/example_dataflow.py  |   4 +-
 .../example_dags/example_facebook_ads_to_gcs.py    |   4 +-
 .../google/cloud/example_dags/example_gcs.py       |  11 +-
 .../cloud/example_dags/example_gcs_to_bigquery.py  |   2 +-
 .../cloud/example_dags/example_gcs_to_gcs.py       |   5 +-
 .../cloud/example_dags/example_gcs_to_sftp.py      |   2 +-
 .../cloud/example_dags/example_local_to_gcs.py     |   2 +-
 .../cloud/example_dags/example_postgres_to_gcs.py  |   2 +-
 .../cloud/example_dags/example_presto_to_gcs.py    |   2 +-
 .../cloud/example_dags/example_sftp_to_gcs.py      |   2 +-
 .../cloud/example_dags/example_sheets_to_gcs.py    |   2 +-
 airflow/providers/google/cloud/operators/gcs.py    | 188 +++++------
 .../providers/google/cloud/transfers}/__init__.py  |   1 -
 .../cloud/{operators => transfers}/adls_to_gcs.py  |   0
 .../bigquery_to_bigquery.py                        |   0
 .../{operators => transfers}/bigquery_to_gcs.py    |   0
 .../{operators => transfers}/bigquery_to_mysql.py  |   0
 .../{operators => transfers}/cassandra_to_gcs.py   |   0
 .../facebook_ads_to_gcs.py                         |   0
 .../{operators => transfers}/gcs_to_bigquery.py    |   0
 .../cloud/{operators => transfers}/gcs_to_gcs.py   |  92 ------
 .../google/cloud/transfers/gcs_to_local.py         | 122 +++++++
 .../cloud/{operators => transfers}/gcs_to_sftp.py  |   0
 .../cloud/{operators => transfers}/local_to_gcs.py |   0
 .../cloud/{operators => transfers}/mssql_to_gcs.py |   2 +-
 .../cloud/{operators => transfers}/mysql_to_gcs.py |   2 +-
 .../{operators => transfers}/postgres_to_gcs.py    |   2 +-
 .../{operators => transfers}/presto_to_gcs.py      |   2 +-
 .../cloud/{operators => transfers}/s3_to_gcs.py    |   0
 .../cloud/{operators => transfers}/sftp_to_gcs.py  |   0
 .../{operators => transfers}/sheets_to_gcs.py      |   0
 .../cloud/{operators => transfers}/sql_to_gcs.py   |   0
 .../example_dags/example_display_video.py          |   2 +-
 .../suite/example_dags/example_gcs_to_gdrive.py    |   2 +-
 .../suite/example_dags/example_gcs_to_sheets.py    |   4 +-
 .../google/suite/example_dags/example_sheets.py    |   4 +-
 .../providers/google/suite/transfers}/__init__.py  |   1 -
 .../{operators => transfers}/gcs_to_gdrive.py      |   0
 .../{operators => transfers}/gcs_to_sheets.py      |   0
 .../providers/grpc/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/grpc/README.md                   |   3 +
 .../hashicorp/PROVIDERS_CHANGES_2020.05.20.md      |   1 +
 airflow/providers/hashicorp/README.md              |   3 +
 .../providers/http/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/http/README.md                   |   3 +
 .../providers/imap/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/imap/README.md                   |   3 +
 .../providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/jdbc/README.md                   |   3 +
 .../jenkins/PROVIDERS_CHANGES_2020.05.20.md        |   1 +
 airflow/providers/jenkins/README.md                |   3 +
 .../providers/jira/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/jira/README.md                   |   3 +
 .../azure/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/microsoft/azure/README.md        |  34 +-
 .../microsoft/azure/transfers}/__init__.py         |   1 -
 .../azure/{operators => transfers}/file_to_wasb.py |   0
 .../oracle_to_azure_data_lake.py}                  |   2 +-
 .../mssql/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/microsoft/mssql/README.md        |   3 +
 .../winrm/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/microsoft/winrm/README.md        |   3 +
 .../mongo/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/mongo/README.md                  |   3 +
 .../mysql/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/mysql/README.md                  |  36 +-
 .../providers/mysql/transfers}/__init__.py         |   1 -
 .../{operators => transfers}/presto_to_mysql.py    |   2 +-
 .../mysql/{operators => transfers}/s3_to_mysql.py  |   2 +-
 .../{operators => transfers}/vertica_to_mysql.py   |   2 +-
 .../providers/odbc/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/odbc/README.md                   |   3 +
 .../openfaas/PROVIDERS_CHANGES_2020.05.20.md       |   1 +
 airflow/providers/openfaas/README.md               |   3 +
 .../opsgenie/PROVIDERS_CHANGES_2020.05.20.md       |   1 +
 airflow/providers/opsgenie/README.md               |   3 +
 .../oracle/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/oracle/README.md                 |  22 +-
 .../providers/oracle/transfers}/__init__.py        |   1 -
 .../oracle_to_oracle.py}                           |   2 +-
 .../pagerduty/PROVIDERS_CHANGES_2020.05.20.md      |   1 +
 airflow/providers/pagerduty/README.md              |   3 +
 .../postgres/PROVIDERS_CHANGES_2020.05.20.md       |   1 +
 airflow/providers/postgres/README.md               |   3 +
 .../presto/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/presto/README.md                 |   3 +
 .../qubole/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/qubole/README.md                 |   3 +
 .../redis/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/redis/README.md                  |   3 +
 .../salesforce/PROVIDERS_CHANGES_2020.05.20.md     |   1 +
 airflow/providers/salesforce/README.md             |   3 +
 .../samba/PROVIDERS_CHANGES_2020.05.20.md          |   1 +
 airflow/providers/samba/README.md                  |   3 +
 .../segment/PROVIDERS_CHANGES_2020.05.20.md        |   1 +
 airflow/providers/segment/README.md                |   3 +
 .../providers/sftp/PROVIDERS_CHANGES_2020.05.20.md |   1 +
 airflow/providers/sftp/README.md                   |   3 +
 .../singularity/PROVIDERS_CHANGES_2020.05.20.md    |   1 +
 airflow/providers/singularity/README.md            |   3 +
 .../slack/PROVIDERS_CHANGES_2020.05.20.md          |   2 +
 airflow/providers/slack/README.md                  |  11 +
 .../snowflake/PROVIDERS_CHANGES_2020.05.20.md      |   2 +
 airflow/providers/snowflake/README.md              |  40 ++-
 .../snowflake/example_dags/example_snowflake.py    |   6 +-
 .../providers/snowflake/transfers}/__init__.py     |   1 -
 .../{operators => transfers}/s3_to_snowflake.py    |   2 +-
 .../{operators => transfers}/snowflake_to_slack.py |   0
 .../sqlite/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/sqlite/README.md                 |   3 +
 .../providers/ssh/PROVIDERS_CHANGES_2020.05.20.md  |   1 +
 airflow/providers/ssh/README.md                    |   3 +
 .../vertica/PROVIDERS_CHANGES_2020.05.20.md        |   1 +
 airflow/providers/vertica/README.md                |   3 +
 .../yandex/PROVIDERS_CHANGES_2020.05.20.md         |   1 +
 airflow/providers/yandex/README.md                 |   3 +
 .../zendesk/PROVIDERS_CHANGES_2020.05.20.md        |   1 +
 airflow/providers/zendesk/README.md                |   3 +
 .../PROVIDERS_CLASSES_TEMPLATE.md.jinja2           |  27 +-
 .../PROVIDERS_README_TEMPLATE.md.jinja2            |  18 +-
 backport_packages/refactor_backport_packages.py    |  26 +-
 backport_packages/setup_backport_packages.py       | 245 +++++++++-----
 docs/autoapi_templates/index.rst                   |  20 ++
 docs/build                                         |   3 +-
 docs/concepts.rst                                  |   2 +-
 docs/howto/define_extra_link.rst                   |   4 +-
 .../amazon/aws/google_api_to_s3_transfer.rst       |   2 +-
 .../operator/amazon/aws/imap_attachment_to_s3.rst  |   2 +-
 docs/howto/operator/amazon/aws/s3_to_redshift.rst  |   8 +-
 docs/howto/operator/gcp/ads.rst                    |   8 +-
 docs/howto/operator/gcp/facebook_ads_to_gcs.rst    |   2 +-
 docs/howto/operator/gcp/gcs.rst                    |   2 +-
 docs/howto/operator/gcp/gcs_to_gcs.rst             |   4 +-
 docs/howto/operator/gcp/gcs_to_gdrive.rst          |   4 +-
 .../gcp/{local_to_gcs.rst => gcs_to_local.rst}     |  21 +-
 docs/howto/operator/gcp/gcs_to_sftp.rst            |   4 +-
 docs/howto/operator/gcp/gcs_to_sheets.rst          |   4 +-
 docs/howto/operator/gcp/local_to_gcs.rst           |   2 +-
 docs/howto/operator/gcp/presto_to_gcs.rst          |   4 +-
 docs/howto/operator/gcp/sftp_to_gcs.rst            |   4 +-
 docs/howto/operator/gcp/sheets_to_gcs.rst          |   4 +-
 docs/installation.rst                              |  98 +++---
 docs/operators-and-hooks-ref.rst                   | 153 +++++----
 docs/plugins.rst                                   |   2 +-
 requirements/requirements-python3.6.txt            |  42 +--
 requirements/requirements-python3.7.txt            |  42 +--
 requirements/requirements-python3.8.txt            |  44 +--
 scripts/ci/in_container/_in_container_utils.sh     |   2 +-
 .../operators => amazon/aws/transfers}/__init__.py |   1 -
 .../test_dynamodb_to_s3.py                         |   6 +-
 .../aws/{operators => transfers}/test_gcs_to_s3.py |  12 +-
 .../test_google_api_to_s3.py}                      |  30 +-
 .../test_google_api_to_s3_system.py}               |   0
 .../test_hive_to_dynamodb.py                       |   8 +-
 .../test_imap_attachment_to_s3.py                  |   6 +-
 .../test_imap_attachment_to_s3_system.py           |   0
 .../{operators => transfers}/test_mongo_to_s3.py   |   6 +-
 .../test_redshift_to_s3.py                         |   4 +-
 .../test_s3_to_redshift.py                         |   4 +-
 .../test_s3_to_redshift_system.py                  |   0
 .../{operators => transfers}/test_s3_to_sftp.py    |   2 +-
 .../{operators => transfers}/test_sftp_to_s3.py    |   2 +-
 .../druid/transfers}/__init__.py                   |   1 -
 .../{operators => transfers}/test_hive_to_druid.py |   4 +-
 .../hive/transfers}/__init__.py                    |   1 -
 .../{operators => transfers}/test_hive_to_mysql.py |  40 +--
 .../{operators => transfers}/test_hive_to_samba.py |  16 +-
 .../{operators => transfers}/test_mssql_to_hive.py |  30 +-
 .../{operators => transfers}/test_mysql_to_hive.py |  14 +-
 .../{operators => transfers}/test_s3_to_hive.py    |  24 +-
 .../apache/hive/transfers/test_vertica_to_hive.py  |  68 ++++
 tests/providers/google/ads/operators/test_ads.py   |  29 +-
 .../operators => google/ads/transfers}/__init__.py |   1 -
 .../google/ads/transfers/test_ads_to_gcs.py        |  50 +++
 tests/providers/google/cloud/operators/test_gcs.py |  50 ++-
 .../cloud/transfers}/__init__.py                   |   1 -
 .../{operators => transfers}/test_adls_to_gcs.py   |  10 +-
 .../test_bigquery_to_bigquery.py                   |   4 +-
 .../test_bigquery_to_bigquery_system.py            |   0
 .../test_bigquery_to_gcs.py                        |   4 +-
 .../test_bigquery_to_gcs_system.py                 |   0
 .../test_bigquery_to_mysql.py                      |   4 +-
 .../test_cassandra_to_gcs.py                       |   8 +-
 .../test_facebook_ads_to_gcs.py                    |   6 +-
 .../test_facebook_ads_to_gcs_system.py             |   0
 .../test_gcs_to_bigquery.py                        |   6 +-
 .../test_gcs_to_bigquery_system.py                 |   0
 .../{operators => transfers}/test_gcs_to_gcs.py    |  92 ++----
 .../test_gcs_to_gcs_system.py                      |   0
 .../google/cloud/transfers/test_gcs_to_local.py    |  47 +++
 .../{operators => transfers}/test_gcs_to_sftp.py   |  22 +-
 .../test_gcs_to_sftp_system.py                     |   0
 .../{operators => transfers}/test_local_to_gcs.py  |   4 +-
 .../test_local_to_gcs_system.py                    |   0
 .../{operators => transfers}/test_mssql_to_gcs.py  |  14 +-
 .../{operators => transfers}/test_mysql_to_gcs.py  |  38 +--
 .../test_postgres_to_gcs.py                        |   8 +-
 .../test_postgres_to_gcs_system.py                 |   0
 .../{operators => transfers}/test_presto_to_gcs.py |  26 +-
 .../test_presto_to_gcs_system.py                   |   0
 .../{operators => transfers}/test_s3_to_gcs.py     |  10 +-
 .../{operators => transfers}/test_sftp_to_gcs.py   |  22 +-
 .../test_sftp_to_gcs_system.py                     |   0
 .../{operators => transfers}/test_sheets_to_gcs.py |  14 +-
 .../test_sheets_to_gcs_system.py                   |   0
 .../suite/transfers}/__init__.py                   |   1 -
 .../{operators => transfers}/test_gcs_to_gdrive.py |   4 +-
 .../{operators => transfers}/test_gcs_to_sheets.py |  10 +-
 .../test_gcs_to_sheets_system.py                   |   0
 .../azure/transfers}/__init__.py                   |   1 -
 .../{operators => transfers}/test_file_to_wasb.py  |   4 +-
 .../test_oracle_to_azure_data_lake.py}             |  10 +-
 .../operators => mysql/transfers}/__init__.py      |   1 -
 .../test_presto_to_mysql.py                        |  16 +-
 .../{operators => transfers}/test_s3_to_mysql.py   |  18 +-
 .../test_vertica_to_mysql.py                       |  44 +--
 .../oracle/{operators => transfers}/__init__.py    |   0
 .../test_oracle_to_oracle.py}                      |   4 +-
 .../operators => snowflake/transfers}/__init__.py  |   1 -
 .../test_s3_to_snowflake.py                        |   6 +-
 .../test_snowflake_to_slack.py                     |   6 +-
 tests/test_core_to_contrib.py                      | 366 ++++++++++-----------
 tests/test_project_structure.py                    |   3 +-
 343 files changed, 2143 insertions(+), 1658 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f08907b..befff35 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -264,6 +264,8 @@ metastore_browser/templates/.*\\.html$|.*\\.jinja2"
           (?x)
           ^airflow/providers/apache/cassandra/hooks/cassandra.py$|
           ^airflow/providers/apache/hive/operators/hive_stats.py$|
+          ^airflow/providers/apache/hive/PROVIDERS_CHANGES_*|
+          ^airflow/providers/apache/hive/README.md$|
           ^tests/providers/apache/cassandra/hooks/test_cassandra.py
       - id: consistent-pylint
         language: pygrep
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 7ae2e98..b3e9dfa 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -542,8 +542,71 @@ We support the following types of tests:
 
 For details on running different types of Airflow tests, see `TESTING.rst <TESTING.rst>`_.
 
+
+Naming Conventions for provider packages
+========================================
+
+In Airflow 2.0 we standardized and enforced naming for provider packages, modules and classes.
+those rules (introduced as AIP-21) were not only introduced but enforced using automated checks
+that verify if the naming conventions are followed. Here is a brief summary of the rules, for
+detailed discussion you can go to [AIP-21 Changes in import paths](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-21%3A+Changes+in+import+paths)
+
+The rules are as follows:
+
+* Provider packages are all placed in 'airflow.providers'
+
+* Providers are usually direct sub-packages of the 'airflow.providers' package but in some cases they can be
+  further split into sub-packages (for example 'apache' package has 'cassandra', 'druid' ... providers ) out
+  of which several different provider packages are produced (apache.cassandra, apache.druid). This is
+  case when the providers are connected under common umbrella but very loosely coupled on the code level.
+
+* In some cases the package can have sub-packages but they are all delivered as single provider
+  package (for example 'google' package contains 'ads', 'cloud' etc. sub-packages). This is in case
+  the providers are connected under common umbrella and they are also tightly coupled on the code level.
+
+* Typical structure of provider package:
+    * example_dags -> example DAGs are stored here (used for documentation and System Tests)
+    * hooks -> hooks are stored here
+    * operators -> operators are stored here
+    * sensors -> sensors are stored here
+    * secrets -> secret backends are stored here
+    * transfers -> transfer operators are stored here
+
+* Module names do not contain word "hooks" , "operators" etc. The right type comes from
+  the package. For example 'hooks.datastore' module contains DataStore hook and 'operators.datastore'
+  contains DataStore operators.
+
+* Class names contain 'Operator', 'Hook', 'Sensor' - for example DataStoreHook, DataStoreExportOperator
+
+* Operator name usually follows the convention: <Subject><Action><Entity>Operator
+  (BigQueryExecuteQueryOperator) is a good example
+
+* Transfer Operators are those that actively push data from one service/provider and send it to another
+  service (might be for the same or another provider). This usually involves two hooks. The convention
+  for those <Source>To<Destination>Operator. They are not named *TransferOperator nor *Transfer.
+
+* Operators that use external service to perform transfer (for example CloudDataTransferService operators
+  are not placed in "transfers" package and do not have to follow the naming convention for
+  transfer operators.
+
+* It is often debatable where to put transfer operators but we agreed to the following criteria:
+
+  * We use "maintainability" of the operators as the main criteria - so the transfer operator
+    should be kept at the provider which has highest "interest" in the transfer operator
+
+  * For Cloud Providers or Service providers that usually means that the transfer operators
+    should land at the "target" side of the transfer
+
+* Secret Backend name follows the convention: <SecretEngine>Backend.
+
+* Tests are grouped in parallel packages under "tests.providers" top level package.  Module name is usually
+  "test_<object_to_test>.py',
+
+* System tests (not yet fully automated but allowing to run e2e testing of partucular provider) are
+  named with _system.py suffix.
+
 Metadata Database Updates
-==============================
+=========================
 
 When developing features, you may need to persist information to the metadata
 database. Airflow has `Alembic <https://github.com/sqlalchemy/alembic>`__ built-in
@@ -623,7 +686,7 @@ could get a reproducible build. See the `Yarn docs
 
 
 Generate Bundled Files with yarn
-----------------------------------
+--------------------------------
 
 To parse and generate bundled files for Airflow, run either of the following
 commands:
@@ -910,6 +973,7 @@ You can join the channels via links at the `Airflow Community page <https://airf
 * The deprecated `JIRA issues <https://issues.apache.org/jira/projects/AIRFLOW/issues/AIRFLOW-4470?filter=allopenissues>`_ for:
    * checking out old but still valuable issues that are not on Github yet
    * mentioning the JIRA issue number in the title of the related PR you would like to open on Github
+
 **IMPORTANT**
 We don't create new issues on JIRA anymore. The reason we still look at JIRA issues is that there are valuable tickets inside of it. However, each new PR should be created on `Github issues <https://github.com/apache/airflow/issues>`_ as stated in `Contribution Workflow Example <https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#contribution-workflow-example>`_
 
diff --git a/UPDATING.md b/UPDATING.md
index 8de9064..01d69f0 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -912,7 +912,7 @@ The following table shows changes in import paths.
 |airflow.contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator                           |airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator                                 |
 |airflow.contrib.operators.datastore_export_operator.DatastoreExportOperator                                       |airflow.providers.google.cloud.operators.datastore.DatastoreExportOperator                                                    |
 |airflow.contrib.operators.datastore_import_operator.DatastoreImportOperator                                       |airflow.providers.google.cloud.operators.datastore.DatastoreImportOperator                                                    |
-|airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator                                            |airflow.providers.google.cloud.operators.local_to_gcs.FileToGoogleCloudStorageOperator                                        |
+|airflow.contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator                                            |airflow.providers.google.cloud.transfers.local_to_gcs.FileToGoogleCloudStorageOperator                                        |
 |airflow.contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator                                     |airflow.providers.google.cloud.operators.bigtable.BigtableUpdateClusterOperator                                               |
 |airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator                                    |airflow.providers.google.cloud.operators.bigtable.BigtableCreateInstanceOperator                                              |
 |airflow.contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator                                    |airflow.providers.google.cloud.operators.bigtable.BigtableDeleteInstanceOperator                                              |
@@ -1006,7 +1006,7 @@ The following table shows changes in import paths.
 |airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator                         |airflow.providers.google.cloud.operators.gcs.GCSBucketCreateAclEntryOperator                                                  |
 |airflow.contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator                         |airflow.providers.google.cloud.operators.gcs.GCSObjectCreateAclEntryOperator                                                  |
 |airflow.contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator                                    |airflow.providers.google.cloud.operators.gcs.GCSDeleteObjectsOperator                                                         |
-|airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator                                |airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator                                                               |
+|airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator                                |airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator                                                               |
 |airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator                                        |airflow.providers.google.cloud.operators.gcs.GCSListObjectsOperator                                                           |
 |airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator                                     |airflow.providers.google.cloud.operators.gcs.GCSCreateBucketOperator                                                          |
 |airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator                                          |airflow.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator                                                              |
diff --git a/airflow/contrib/operators/adls_to_gcs.py b/airflow/contrib/operators/adls_to_gcs.py
index c0f3b6a..f3def74 100644
--- a/airflow/contrib/operators/adls_to_gcs.py
+++ b/airflow/contrib/operators/adls_to_gcs.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.adls_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.adls_to_gcs`."""
 
 import warnings
 
-from airflow.providers.google.cloud.operators.adls_to_gcs import ADLSToGCSOperator
+from airflow.providers.google.cloud.transfers.adls_to_gcs import ADLSToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.adls_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.adls_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -30,13 +30,13 @@ warnings.warn(
 class AdlsToGoogleCloudStorageOperator(ADLSToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.adls_to_gcs.ADLSToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.adls_to_gcs.ADLSToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.adls_to_gcs.ADLSToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/bigquery_to_bigquery.py b/airflow/contrib/operators/bigquery_to_bigquery.py
index e985c47..e585ccd 100644
--- a/airflow/contrib/operators/bigquery_to_bigquery.py
+++ b/airflow/contrib/operators/bigquery_to_bigquery.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_bigquery`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_bigquery`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator  # noqa
+from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_bigquery`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_bigquery`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/bigquery_to_gcs.py b/airflow/contrib/operators/bigquery_to_gcs.py
index f8f718d..cd3b571 100644
--- a/airflow/contrib/operators/bigquery_to_gcs.py
+++ b/airflow/contrib/operators/bigquery_to_gcs.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs`."""
 
 import warnings
 
-from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -30,13 +30,13 @@ warnings.warn(
 class BigQueryToCloudStorageOperator(BigQueryToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/bigquery_to_mysql_operator.py b/airflow/contrib/operators/bigquery_to_mysql_operator.py
index e9bf576..37fec73 100644
--- a/airflow/contrib/operators/bigquery_to_mysql_operator.py
+++ b/airflow/contrib/operators/bigquery_to_mysql_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_mysql`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_mysql`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.google.cloud.operators.bigquery_to_mysql import BigQueryToMySqlOperator  # noqa
+from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.bigquery_to_mysql`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.bigquery_to_mysql`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/cassandra_to_gcs.py b/airflow/contrib/operators/cassandra_to_gcs.py
index 3e32006..c36fd5f 100644
--- a/airflow/contrib/operators/cassandra_to_gcs.py
+++ b/airflow/contrib/operators/cassandra_to_gcs.py
@@ -16,15 +16,15 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-This module is deprecated. Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs`.
+This module is deprecated. Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs`.
 """
 
 import warnings
 
-from airflow.providers.google.cloud.operators.cassandra_to_gcs import CassandraToGCSOperator
+from airflow.providers.google.cloud.transfers.cassandra_to_gcs import CassandraToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -32,13 +32,13 @@ warnings.warn(
 class CassandraToGoogleCloudStorageOperator(CassandraToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/dynamodb_to_s3.py b/airflow/contrib/operators/dynamodb_to_s3.py
index 41b2981..748a69d 100644
--- a/airflow/contrib/operators/dynamodb_to_s3.py
+++ b/airflow/contrib/operators/dynamodb_to_s3.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.dynamodb_to_s3`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.dynamodb_to_s3`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.dynamodb_to_s3 import DynamoDBToS3Operator  # noqa
+from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.dynamodb_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.dynamodb_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/file_to_gcs.py b/airflow/contrib/operators/file_to_gcs.py
index 6bf4986..be7db23 100644
--- a/airflow/contrib/operators/file_to_gcs.py
+++ b/airflow/contrib/operators/file_to_gcs.py
@@ -16,15 +16,15 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-This module is deprecated. Please use `airflow.providers.google.cloud.operators.local_to_gcs`.
+This module is deprecated. Please use `airflow.providers.google.cloud.transfers.local_to_gcs`.
 """
 
 import warnings
 
-from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.local_to_gcs`,",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.local_to_gcs`,",
     DeprecationWarning, stacklevel=2
 )
 
@@ -32,14 +32,14 @@ warnings.warn(
 class FileToGoogleCloudStorageOperator(LocalFilesystemToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator`.""",
+            `airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/file_to_wasb.py b/airflow/contrib/operators/file_to_wasb.py
index b7368da..8ed0da7 100644
--- a/airflow/contrib/operators/file_to_wasb.py
+++ b/airflow/contrib/operators/file_to_wasb.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.microsoft.azure.operators.file_to_wasb`."""
+"""This module is deprecated. Please use `airflow.providers.microsoft.azure.transfers.file_to_wasb`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.microsoft.azure.operators.file_to_wasb import FileToWasbOperator  # noqa
+from airflow.providers.microsoft.azure.transfers.file_to_wasb import FileToWasbOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.microsoft.azure.operators.file_to_wasb`.",
+    "This module is deprecated. Please use `airflow.providers.microsoft.azure.transfers.file_to_wasb`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/gcp_transfer_operator.py b/airflow/contrib/operators/gcp_transfer_operator.py
index 70f81b8..0d9c5d0 100644
--- a/airflow/contrib/operators/gcp_transfer_operator.py
+++ b/airflow/contrib/operators/gcp_transfer_operator.py
@@ -180,8 +180,8 @@ class GcpTransferServiceOperationsListOperator(CloudDataTransferServiceListOpera
 class GoogleCloudStorageToGoogleCloudStorageTransferOperator(CloudDataTransferServiceGCSToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.data_transfe
-    r.CloudDataTransferServiceGCSToGCSOperator`.
+    Please use `airflow.providers.google.cloud.operators.data_transfer
+    .CloudDataTransferServiceGCSToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
diff --git a/airflow/contrib/operators/gcs_download_operator.py b/airflow/contrib/operators/gcs_download_operator.py
index 687f84c..6103e6c 100644
--- a/airflow/contrib/operators/gcs_download_operator.py
+++ b/airflow/contrib/operators/gcs_download_operator.py
@@ -21,7 +21,7 @@ This module is deprecated. Please use `airflow.providers.google.cloud.operators.
 
 import warnings
 
-from airflow.providers.google.cloud.operators.gcs import GCSToLocalOperator
+from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator
 
 warnings.warn(
     "This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs`.",
@@ -29,16 +29,16 @@ warnings.warn(
 )
 
 
-class GoogleCloudStorageDownloadOperator(GCSToLocalOperator):
+class GoogleCloudStorageDownloadOperator(GCSToLocalFilesystemOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator`.
+    Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalOperator`.""",
+            Please use `airflow.providers.google.cloud.operators.gcs.GCSToLocalFilesystemOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/gcs_to_bq.py b/airflow/contrib/operators/gcs_to_bq.py
index 333e5aa..99c339e 100644
--- a/airflow/contrib/operators/gcs_to_bq.py
+++ b/airflow/contrib/operators/gcs_to_bq.py
@@ -16,15 +16,15 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_bigquery`.
+This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_bigquery`.
 """
 
 import warnings
 
-from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
+from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_bigquery`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_bigquery`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -32,13 +32,13 @@ warnings.warn(
 class GoogleCloudStorageToBigQueryOperator(GCSToBigQueryOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.gcs_to_bq.GCSToBigQueryOperator`.
+    Please use `airflow.providers.google.cloud.transfers.gcs_to_bq.GCSToBigQueryOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.gcs_to_bq.GCSToBigQueryOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.gcs_to_bq.GCSToBigQueryOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/gcs_to_gcs.py b/airflow/contrib/operators/gcs_to_gcs.py
index 309353f..4f6b4fb 100644
--- a/airflow/contrib/operators/gcs_to_gcs.py
+++ b/airflow/contrib/operators/gcs_to_gcs.py
@@ -16,15 +16,15 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_gcs`.
+This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs`.
 """
 
 import warnings
 
-from airflow.providers.google.cloud.operators.gcs_to_gcs import GCSToGCSOperator
+from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.gcs_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -32,13 +32,13 @@ warnings.warn(
 class GoogleCloudStorageToGoogleCloudStorageOperator(GCSToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/gcs_to_gdrive_operator.py b/airflow/contrib/operators/gcs_to_gdrive_operator.py
index c741591..6ec7462 100644
--- a/airflow/contrib/operators/gcs_to_gdrive_operator.py
+++ b/airflow/contrib/operators/gcs_to_gdrive_operator.py
@@ -15,15 +15,15 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.suite.operators.gcs_to_gdrive`."""
+"""This module is deprecated. Please use `airflow.providers.google.suite.transfers.gcs_to_gdrive`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.google.suite.operators.gcs_to_gdrive import GCSToGoogleDriveOperator  # noqa
+from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator  # noqa
 
 warnings.warn(
     "This module is deprecated. "
-    "Please use `airflow.providers.google.suite.operators.gcs_to_gdrive.",
+    "Please use `airflow.providers.google.suite.transfers.gcs_to_gdrive.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/gcs_to_s3.py b/airflow/contrib/operators/gcs_to_s3.py
index e80bc96..593be1f 100644
--- a/airflow/contrib/operators/gcs_to_s3.py
+++ b/airflow/contrib/operators/gcs_to_s3.py
@@ -16,27 +16,27 @@
 # specific language governing permissions and limitations
 # under the License.
 """
-This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.
+This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.
 """
 
 import warnings
 
-from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator
+from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
 
 
 class GoogleCloudStorageToS3Operator(GCSToS3Operator):
     """
-    This class is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator`.
+    This class is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`.
     """
     def __init__(self, *args, **kwargs):
         warnings.warn(
             "This class is deprecated. "
-            "Please use `airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator`.",
+            "Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator`.",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/hive_to_dynamodb.py b/airflow/contrib/operators/hive_to_dynamodb.py
index 80fe6c3..6784680 100644
--- a/airflow/contrib/operators/hive_to_dynamodb.py
+++ b/airflow/contrib/operators/hive_to_dynamodb.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.hive_to_dynamodb`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.hive_to_dynamodb`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.hive_to_dynamodb import HiveToDynamoDBTransferOperator  # noqa
+from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.hive_to_dynamodb`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.hive_to_dynamodb`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/imap_attachment_to_s3_operator.py b/airflow/contrib/operators/imap_attachment_to_s3_operator.py
index 6f0975e..597d6be 100644
--- a/airflow/contrib/operators/imap_attachment_to_s3_operator.py
+++ b/airflow/contrib/operators/imap_attachment_to_s3_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.imap_attachment_to_s3`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.imap_attachment_to_s3 import ImapAttachmentToS3Operator  # noqa
+from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.imap_attachment_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/mongo_to_s3.py b/airflow/contrib/operators/mongo_to_s3.py
index 20e33cd..82449ee 100644
--- a/airflow/contrib/operators/mongo_to_s3.py
+++ b/airflow/contrib/operators/mongo_to_s3.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.mongo_to_s3`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.mongo_to_s3`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.mongo_to_s3 import MongoToS3Operator  # noqa
+from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.mongo_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.mongo_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/mssql_to_gcs.py b/airflow/contrib/operators/mssql_to_gcs.py
index 14698f7..20be043 100644
--- a/airflow/contrib/operators/mssql_to_gcs.py
+++ b/airflow/contrib/operators/mssql_to_gcs.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.mssql_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs`."""
 
 import warnings
 
-from airflow.providers.google.cloud.operators.mssql_to_gcs import MSSQLToGCSOperator
+from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.mssql_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -30,13 +30,13 @@ warnings.warn(
 class MsSqlToGoogleCloudStorageOperator(MSSQLToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.mssql_to_gcs.MSSQLToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.mssql_to_gcs.MSSQLToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/mysql_to_gcs.py b/airflow/contrib/operators/mysql_to_gcs.py
index 4935efe..25206a1 100644
--- a/airflow/contrib/operators/mysql_to_gcs.py
+++ b/airflow/contrib/operators/mysql_to_gcs.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.mysql_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs`."""
 
 import warnings
 
-from airflow.providers.google.cloud.operators.mysql_to_gcs import MySQLToGCSOperator
+from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.mysql_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -30,13 +30,13 @@ warnings.warn(
 class MySqlToGoogleCloudStorageOperator(MySQLToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.mysql_to_gcs.MySQLToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.mysql_to_gcs.MySQLToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.mysql_to_gcs.MySQLToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py
index 9f4e554..2cdca43 100644
--- a/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py
+++ b/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py
@@ -17,18 +17,18 @@
 # under the License.
 """
 This module is deprecated.
-Please use `airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer`.
+Please use `airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake`.
 """
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer import (  # noqa
-    OracleToAzureDataLakeTransferOperator,
+from airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake import (  # noqa
+    OracleToAzureDataLakeOperator,
 )
 
 warnings.warn(
     "This module is deprecated. "
-    "Please use `airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer`.",
+    "Please use `airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/oracle_to_oracle_transfer.py b/airflow/contrib/operators/oracle_to_oracle_transfer.py
index 12e7b21..f1749bc 100644
--- a/airflow/contrib/operators/oracle_to_oracle_transfer.py
+++ b/airflow/contrib/operators/oracle_to_oracle_transfer.py
@@ -15,30 +15,30 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.oracle.operators.oracle_to_oracle_transfer`."""
+"""This module is deprecated. Please use `airflow.providers.oracle.transfers.oracle_to_oracle`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.oracle.operators.oracle_to_oracle_transfer import OracleToOracleTransferOperator
+from airflow.providers.oracle.transfers.oracle_to_oracle import OracleToOracleOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.oracle.operators.oracle_to_oracle_transfer`.",
+    "This module is deprecated. Please use `airflow.providers.oracle.transfers.oracle_to_oracle`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class OracleToOracleTransfer(OracleToOracleTransferOperator):
+class OracleToOracleTransfer(OracleToOracleOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.oracle.operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator`."""
+    `airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.oracle.operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator`.""",
+            `airflow.providers.oracle.transfers.oracle_to_oracle.OracleToOracleOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/postgres_to_gcs_operator.py b/airflow/contrib/operators/postgres_to_gcs_operator.py
index e36def3..9ad3b31 100644
--- a/airflow/contrib/operators/postgres_to_gcs_operator.py
+++ b/airflow/contrib/operators/postgres_to_gcs_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.postgres_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs`."""
 
 import warnings
 
-from airflow.providers.google.cloud.operators.postgres_to_gcs import PostgresToGCSOperator
+from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.postgres_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -30,12 +30,12 @@ warnings.warn(
 class PostgresToGoogleCloudStorageOperator(PostgresToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.postgres_to_gcs.PostgresToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator`.
     """
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.postgres_to_gcs.PostgresToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.postgres_to_gcs.PostgresToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/s3_to_gcs_operator.py b/airflow/contrib/operators/s3_to_gcs_operator.py
index cedeaf3..7a43f76 100644
--- a/airflow/contrib/operators/s3_to_gcs_operator.py
+++ b/airflow/contrib/operators/s3_to_gcs_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.s3_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.s3_to_gcs`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.google.cloud.operators.s3_to_gcs import S3ToGCSOperator  # noqa
+from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.s3_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.s3_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/s3_to_sftp_operator.py b/airflow/contrib/operators/s3_to_sftp_operator.py
index 3cc7d93..b247ce5 100644
--- a/airflow/contrib/operators/s3_to_sftp_operator.py
+++ b/airflow/contrib/operators/s3_to_sftp_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.s3_to_sftp`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.s3_to_sftp`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.s3_to_sftp import S3ToSFTPOperator  # noqa
+from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.s3_to_sftp`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.s3_to_sftp`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/sftp_to_s3_operator.py b/airflow/contrib/operators/sftp_to_s3_operator.py
index 94b218f..817880c 100644
--- a/airflow/contrib/operators/sftp_to_s3_operator.py
+++ b/airflow/contrib/operators/sftp_to_s3_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.sftp_to_s3`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.sftp_to_s3`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.sftp_to_s3 import SFTPToS3Operator  # noqa
+from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.sftp_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.sftp_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/contrib/operators/sql_to_gcs.py b/airflow/contrib/operators/sql_to_gcs.py
index d7f1762..2206ba9 100644
--- a/airflow/contrib/operators/sql_to_gcs.py
+++ b/airflow/contrib/operators/sql_to_gcs.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.sql_to_gcs`."""
+"""This module is deprecated. Please use `airflow.providers.google.cloud.transfers.sql_to_gcs`."""
 
 import warnings
 
-from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator
+from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.google.cloud.operators.sql_to_gcs`.",
+    "This module is deprecated. Please use `airflow.providers.google.cloud.transfers.sql_to_gcs`.",
     DeprecationWarning, stacklevel=2
 )
 
@@ -30,13 +30,13 @@ warnings.warn(
 class BaseSQLToGoogleCloudStorageOperator(BaseSQLToGCSOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.google.cloud.operators.sql_to_gcs.BaseSQLToGCSOperator`.
+    Please use `airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-            Please use `airflow.providers.google.cloud.operators.sql_to_gcs.BaseSQLToGCSOperator`.""",
+            Please use `airflow.providers.google.cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/vertica_to_hive.py b/airflow/contrib/operators/vertica_to_hive.py
index 6492c63..4c6acbd 100644
--- a/airflow/contrib/operators/vertica_to_hive.py
+++ b/airflow/contrib/operators/vertica_to_hive.py
@@ -15,29 +15,29 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.vertica_to_hive`."""
+"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.vertica_to_hive`."""
 
 import warnings
 
-from airflow.providers.apache.hive.operators.vertica_to_hive import VerticaToHiveTransferOperator
+from airflow.providers.apache.hive.transfers.vertica_to_hive import VerticaToHiveOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.hive.operators.vertica_to_hive`.",
+    "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.vertica_to_hive`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class VerticaToHiveTransfer(VerticaToHiveTransferOperator):
+class VerticaToHiveTransfer(VerticaToHiveOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.apache.hive.operators.vertica_to_hive.VerticaToHiveTransferOperator`."""
+    `airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.apache.hive.operators.vertica_to_hive.VerticaToHiveTransferOperator`.""",
+            `airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/contrib/operators/vertica_to_mysql.py b/airflow/contrib/operators/vertica_to_mysql.py
index efd6d64..9f28561 100644
--- a/airflow/contrib/operators/vertica_to_mysql.py
+++ b/airflow/contrib/operators/vertica_to_mysql.py
@@ -15,30 +15,30 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.mysql.operators.vertica_to_mysql`."""
+"""This module is deprecated. Please use `airflow.providers.mysql.transfers.vertica_to_mysql`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.mysql.operators.vertica_to_mysql import VerticaToMySqlTransferOperator
+from airflow.providers.mysql.transfers.vertica_to_mysql import VerticaToMySqlOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.mysql.operators.vertica_to_mysql`.",
+    "This module is deprecated. Please use `airflow.providers.mysql.transfers.vertica_to_mysql`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class VerticaToMySqlTransfer(VerticaToMySqlTransferOperator):
+class VerticaToMySqlTransfer(VerticaToMySqlOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.mysql.operators.vertica_to_mysql.VerticaToMySqlTransferOperator`."""
+    `airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.mysql.operators.vertica_to_mysql.VerticaToMySqlTransferOperator`.""",
+            `airflow.providers.mysql.transfers.vertica_to_mysql.VerticaToMySqlOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/gcs_to_s3.py b/airflow/operators/gcs_to_s3.py
index 6dc7f7f..19affc7 100644
--- a/airflow/operators/gcs_to_s3.py
+++ b/airflow/operators/gcs_to_s3.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator  # noqa
+from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.gcs_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/operators/google_api_to_s3_transfer.py b/airflow/operators/google_api_to_s3_transfer.py
index c1c22a5..a8fbf0b 100644
--- a/airflow/operators/google_api_to_s3_transfer.py
+++ b/airflow/operators/google_api_to_s3_transfer.py
@@ -17,32 +17,32 @@
 # under the License.
 """
 This module is deprecated.
-Please use `airflow.providers.amazon.aws.operators.google_api_to_s3_transfer`.
+Please use `airflow.providers.amazon.aws.transfers.google_api_to_s3`.
 """
 
 import warnings
 
-from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator
+from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
 
 warnings.warn(
     "This module is deprecated. "
-    "Please use `airflow.providers.amazon.aws.operators.google_api_to_s3_transfer`.",
+    "Please use `airflow.providers.amazon.aws.transfers.google_api_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class GoogleApiToS3Transfer(GoogleApiToS3TransferOperator):
+class GoogleApiToS3Transfer(GoogleApiToS3Operator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleApiToS3TransferOperator`."""
+    `airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Operator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.amazon.aws.operators.""" +
-            "google_api_to_s3_transfer.GoogleApiToS3TransferOperator`.",
+            `airflow.providers.amazon.aws.transfers.""" +
+            "google_api_to_s3_transfer.GoogleApiToS3Operator`.",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py
index 1cdd162..1fd70fb 100644
--- a/airflow/operators/hive_to_druid.py
+++ b/airflow/operators/hive_to_druid.py
@@ -15,30 +15,30 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.druid.operators.hive_to_druid`."""
+"""This module is deprecated. Please use `airflow.providers.apache.druid.transfers.hive_to_druid`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.apache.druid.operators.hive_to_druid import HiveToDruidTransferOperator
+from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.druid.operators.hive_to_druid`.",
+    "This module is deprecated. Please use `airflow.providers.apache.druid.transfers.hive_to_druid`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class HiveToDruidTransfer(HiveToDruidTransferOperator):
+class HiveToDruidTransfer(HiveToDruidOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.apache.druid.operators.hive_to_druid.HiveToDruidTransferOperator`."""
+    `airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.apache.druid.operators.hive_to_druid.HiveToDruidTransferOperator`.""",
+            `airflow.providers.apache.druid.transfers.hive_to_druid.HiveToDruidOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/hive_to_mysql.py b/airflow/operators/hive_to_mysql.py
index bca1cf8..f27a1f8 100644
--- a/airflow/operators/hive_to_mysql.py
+++ b/airflow/operators/hive_to_mysql.py
@@ -15,30 +15,30 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_mysql`."""
+"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_mysql`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.apache.hive.operators.hive_to_mysql import HiveToMySqlTransferOperator
+from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_mysql`.",
+    "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_mysql`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class HiveToMySqlTransfer(HiveToMySqlTransferOperator):
+class HiveToMySqlTransfer(HiveToMySqlOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.apache.hive.operators.hive_to_mysql.HiveToMySqlTransferOperator`."""
+    `airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.apache.hive.operators.hive_to_mysql.HiveToMySqlTransferOperator`.""",
+            `airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/hive_to_samba_operator.py b/airflow/operators/hive_to_samba_operator.py
index ba4dc3d..d1d1e94 100644
--- a/airflow/operators/hive_to_samba_operator.py
+++ b/airflow/operators/hive_to_samba_operator.py
@@ -15,14 +15,14 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_samba`."""
+"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_samba`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.apache.hive.operators.hive_to_samba import Hive2SambaOperator  # noqa
+from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator  # noqa
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.hive.operators.hive_to_samba`.",
+    "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.hive_to_samba`.",
     DeprecationWarning, stacklevel=2
 )
diff --git a/airflow/operators/mssql_to_hive.py b/airflow/operators/mssql_to_hive.py
index 756ce30..50a9884 100644
--- a/airflow/operators/mssql_to_hive.py
+++ b/airflow/operators/mssql_to_hive.py
@@ -15,29 +15,29 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.mssql_to_hive`."""
+"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mssql_to_hive`."""
 
 import warnings
 
-from airflow.providers.apache.hive.operators.mssql_to_hive import MsSqlToHiveTransferOperator
+from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.hive.operators.mssql_to_hive`.",
+    "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mssql_to_hive`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class MsSqlToHiveTransfer(MsSqlToHiveTransferOperator):
+class MsSqlToHiveTransfer(MsSqlToHiveOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlToHiveTransferOperator`."""
+    `airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlToHiveTransferOperator`.""",
+            `airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/mysql_to_hive.py b/airflow/operators/mysql_to_hive.py
index 5c45075..82b2082 100644
--- a/airflow/operators/mysql_to_hive.py
+++ b/airflow/operators/mysql_to_hive.py
@@ -15,28 +15,28 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.mysql_to_hive`."""
+"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mysql_to_hive`."""
 
 import warnings
 
-from airflow.providers.apache.hive.operators.mysql_to_hive import MySqlToHiveTransferOperator
+from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.hive.operators.mysql_to_hive`.",
+    "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.mysql_to_hive`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class MySqlToHiveTransfer(MySqlToHiveTransferOperator):
+class MySqlToHiveTransfer(MySqlToHiveOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.apache.hive.operators.mysql_to_hive.MySqlToHiveTransferOperator`.
+    Please use `airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-           Please use `airflow.providers.apache.hive.operators.mysql_to_hive.MySqlToHiveTransferOperator`.""",
+           Please use `airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/presto_to_mysql.py b/airflow/operators/presto_to_mysql.py
index a7b4aee..55e10f2 100644
--- a/airflow/operators/presto_to_mysql.py
+++ b/airflow/operators/presto_to_mysql.py
@@ -15,30 +15,30 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.mysql.operators.presto_to_mysql`."""
+"""This module is deprecated. Please use `airflow.providers.mysql.transfers.presto_to_mysql`."""
 
 import warnings
 
 # pylint: disable=unused-import
-from airflow.providers.mysql.operators.presto_to_mysql import PrestoToMySqlTransferOperator
+from airflow.providers.mysql.transfers.presto_to_mysql import PrestoToMySqlOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.mysql.operators.presto_to_mysql`.",
+    "This module is deprecated. Please use `airflow.providers.mysql.transfers.presto_to_mysql`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class PrestoToMySqlTransfer(PrestoToMySqlTransferOperator):
+class PrestoToMySqlTransfer(PrestoToMySqlOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator`."""
+    `airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator`.""",
+            `airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/redshift_to_s3_operator.py b/airflow/operators/redshift_to_s3_operator.py
index 9b4eb3e..2b9fcc0 100644
--- a/airflow/operators/redshift_to_s3_operator.py
+++ b/airflow/operators/redshift_to_s3_operator.py
@@ -15,29 +15,29 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.amazon.aws.operators.redshift_to_s3`."""
+"""This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.redshift_to_s3`."""
 
 import warnings
 
-from airflow.providers.amazon.aws.operators.redshift_to_s3 import RedshiftToS3TransferOperator
+from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.redshift_to_s3`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.redshift_to_s3`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class RedshiftToS3Transfer(RedshiftToS3TransferOperator):
+class RedshiftToS3Transfer(RedshiftToS3Operator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.amazon.aws.operators.redshift_to_s3.RedshiftToS3TransferOperator`."""
+    `airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.amazon.aws.operators.redshift_to_s3.RedshiftToS3TransferOperator`.""",
+            `airflow.providers.amazon.aws.transfers.redshift_to_s3.RedshiftToS3Operator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/s3_to_hive_operator.py b/airflow/operators/s3_to_hive_operator.py
index d353b4e..f1305e9 100644
--- a/airflow/operators/s3_to_hive_operator.py
+++ b/airflow/operators/s3_to_hive_operator.py
@@ -15,28 +15,28 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""This module is deprecated. Please use `airflow.providers.apache.hive.operators.s3_to_hive`."""
+"""This module is deprecated. Please use `airflow.providers.apache.hive.transfers.s3_to_hive`."""
 
 import warnings
 
-from airflow.providers.apache.hive.operators.s3_to_hive import S3ToHiveTransferOperator
+from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.apache.hive.operators.s3_to_hive`.",
+    "This module is deprecated. Please use `airflow.providers.apache.hive.transfers.s3_to_hive`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class S3ToHiveTransfer(S3ToHiveTransferOperator):
+class S3ToHiveTransfer(S3ToHiveOperator):
     """
     This class is deprecated.
-    Please use `airflow.providers.apache.hive.operators.s3_to_hive.S3ToHiveTransferOperator`.
+    Please use `airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`.
     """
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
-           Please use `airflow.providers.apache.hive.operators.s3_to_hive.S3ToHiveTransferOperator`.""",
+           Please use `airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/operators/s3_to_redshift_operator.py b/airflow/operators/s3_to_redshift_operator.py
index 74d37da..4272ae8 100644
--- a/airflow/operators/s3_to_redshift_operator.py
+++ b/airflow/operators/s3_to_redshift_operator.py
@@ -19,25 +19,25 @@
 
 import warnings
 
-from airflow.providers.amazon.aws.operators.s3_to_redshift import S3ToRedshiftTransferOperator
+from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
 
 warnings.warn(
-    "This module is deprecated. Please use `airflow.providers.amazon.aws.operators.s3_to_redshift`.",
+    "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.s3_to_redshift`.",
     DeprecationWarning, stacklevel=2
 )
 
 
-class S3ToRedshiftTransfer(S3ToRedshiftTransferOperator):
+class S3ToRedshiftTransfer(S3ToRedshiftOperator):
     """
     This class is deprecated.
     Please use:
-    `airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator`."""
+    `airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`."""
 
     def __init__(self, *args, **kwargs):
         warnings.warn(
             """This class is deprecated.
             Please use
-            `airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator`.""",
+            `airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`.""",
             DeprecationWarning, stacklevel=2
         )
         super().__init__(*args, **kwargs)
diff --git a/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md
index 5caa145..1961258 100644
--- a/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/amazon/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,14 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16  | AWSBatchOperator &lt;&gt; ClientHook relation changed to composition (#9306)                                                                                             |
+| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15  | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211)                                                                                              |
+| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14  | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214)                                                                                                   |
+| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10  | Add S3ToRedshift example dag and system test (#8877)                                                                                                               |
+| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
+| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29  | Add Delete/Create S3 bucket operators (#8895)                                                                                                                      |
+| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28  | Add script_args for S3FileTransformOperator (#9019)                                                                                                                |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23  | Old json boto compat removed from dynamodb_to_s3 operator (#8987)                                                                                                  |
diff --git a/airflow/providers/amazon/README.md b/airflow/providers/amazon/README.md
index 23df486..466449d 100644
--- a/airflow/providers/amazon/README.md
+++ b/airflow/providers/amazon/README.md
@@ -33,14 +33,14 @@ Release: 2020.5.20
     - [Operators](#operators)
         - [New operators](#new-operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Sensors](#sensors)
         - [New sensors](#new-sensors)
         - [Moved sensors](#moved-sensors)
     - [Hooks](#hooks)
         - [New hooks](#new-hooks)
         - [Moved hooks](#moved-hooks)
-    - [Protocols](#protocols)
-        - [Moved protocols](#moved-protocols)
     - [Secrets](#secrets)
         - [Moved secrets](#moved-secrets)
 - [Releases](#releases)
@@ -114,6 +114,8 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package.
 | [aws.operators.ec2_stop_instance.EC2StopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_stop_instance.py)       |
 | [aws.operators.emr_modify_cluster.EmrModifyClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_modify_cluster.py)    |
 | [aws.operators.glue.AwsGlueJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue.py)                                      |
+| [aws.operators.s3_bucket.S3CreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py)                        |
+| [aws.operators.s3_bucket.S3DeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py)                        |
 | [aws.operators.s3_file_transform.S3FileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_file_transform.py)       |
 
 
@@ -124,22 +126,13 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package.
 |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
 | [aws.operators.athena.AWSAthenaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/athena.py)                                                     | [contrib.operators.aws_athena_operator.AWSAthenaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_athena_operator.py)                                             |
 | [aws.operators.batch.AwsBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/batch.py)                                                        | [contrib.operators.awsbatch_operator.AWSBatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py)                                                  |
-| [aws.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/dynamodb_to_s3.py)                                  | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py)                                                    |
 | [aws.operators.ecs.ECSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py)                                                                 | [contrib.operators.ecs_operator.ECSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py)                                                                 |
 | [aws.operators.emr_add_steps.EmrAddStepsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_add_steps.py)                                     | [contrib.operators.emr_add_steps_operator.EmrAddStepsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_add_steps_operator.py)                                     |
 | [aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_create_job_flow.py)                    | [contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_create_job_flow_operator.py)                    |
 | [aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py)           | [contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_terminate_job_flow_operator.py)           |
-| [aws.operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/gcs_to_s3.py)                                                 | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py)                                                                                   |
-| [aws.operators.google_api_to_s3_transfer.GoogleApiToS3TransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py)   | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py)                                             |
-| [aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/hive_to_dynamodb.py)                    | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py)                                      |
-| [aws.operators.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/imap_attachment_to_s3.py)              | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py)              |
-| [aws.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/mongo_to_s3.py)                                           | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py)                                                             |
-| [aws.operators.redshift_to_s3.RedshiftToS3TransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/redshift_to_s3.py)                          | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py)                                                  |
 | [aws.operators.s3_copy_object.S3CopyObjectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_copy_object.py)                                  | [contrib.operators.s3_copy_object_operator.S3CopyObjectOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_copy_object_operator.py)                                  |
 | [aws.operators.s3_delete_objects.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_delete_objects.py)                         | [contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_delete_objects_operator.py)                         |
 | [aws.operators.s3_list.S3ListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_list.py)                                                      | [contrib.operators.s3_list_operator.S3ListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_list_operator.py)                                                      |
-| [aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_to_redshift.py)                          | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py)                                                  |
-| [aws.operators.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_to_sftp.py)                                              | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py)                                              |
 | [aws.operators.sagemaker_base.SageMakerBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_base.py)                                 | [contrib.operators.sagemaker_base_operator.SageMakerBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_base_operator.py)                                 |
 | [aws.operators.sagemaker_endpoint.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py)                     | [contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_operator.py)                     |
 | [aws.operators.sagemaker_endpoint_config.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py) | [contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_config_operator.py) |
@@ -147,13 +140,33 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package.
 | [aws.operators.sagemaker_training.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_training.py)                     | [contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_training_operator.py)                     |
 | [aws.operators.sagemaker_transform.SageMakerTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_transform.py)                  | [contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_transform_operator.py)                  |
 | [aws.operators.sagemaker_tuning.SageMakerTuningOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_tuning.py)                           | [contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_tuning_operator.py)                           |
-| [aws.operators.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sftp_to_s3.py)                                              | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py)                                              |
 | [aws.operators.sns.SnsPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sns.py)                                                          | [contrib.operators.sns_publish_operator.SnsPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sns_publish_operator.py)                                          |
 | [aws.operators.sqs.SQSPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sqs.py)                                                          | [contrib.operators.aws_sqs_publish_operator.SQSPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_sqs_publish_operator.py)                                  |
 
 
 
 
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.amazon` package                                                                                                                       | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                   |
+|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py)                     | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py)                                       |
+| [aws.transfers.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/gcs_to_s3.py)                                    | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py)                                                                      |
+| [aws.transfers.google_api_to_s3.GoogleApiToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/google_api_to_s3.py)                | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py)                                |
+| [aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py)               | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py)                                 |
+| [aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py) | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py) |
+| [aws.transfers.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mongo_to_s3.py)                              | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py)                                                |
+| [aws.transfers.redshift_to_s3.RedshiftToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/redshift_to_s3.py)                     | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py)                                     |
+| [aws.transfers.s3_to_redshift.S3ToRedshiftOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_redshift.py)                     | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py)                                     |
+| [aws.transfers.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_sftp.py)                                 | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py)                                 |
+| [aws.transfers.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/sftp_to_s3.py)                                 | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py)                                 |
+
+
+
+
 ## Sensors
 
 
@@ -223,18 +236,6 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package.
 
 
 
-## Protocols
-
-
-
-### Moved protocols
-
-| Airflow 2.0 protocols: `airflow.providers.amazon` package                                                                                   | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                            |
-|:--------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [aws.hooks.batch_client.AwsBatchProtocol](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_client.py) | [contrib.operators.awsbatch_operator.BatchProtocol](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py) |
-| [aws.operators.ecs.ECSProtocol](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py)                | [contrib.operators.ecs_operator.ECSProtocol](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py)             |
-
-
 
 ## Secrets
 
@@ -256,6 +257,14 @@ All classes in Airflow 2.0 are in `airflow.providers.amazon` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16  | AWSBatchOperator &lt;&gt; ClientHook relation changed to composition (#9306)                                                                                             |
+| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15  | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211)                                                                                              |
+| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14  | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214)                                                                                                   |
+| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10  | Add S3ToRedshift example dag and system test (#8877)                                                                                                               |
+| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
+| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29  | Add Delete/Create S3 bucket operators (#8895)                                                                                                                      |
+| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28  | Add script_args for S3FileTransformOperator (#9019)                                                                                                                |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23  | Old json boto compat removed from dynamodb_to_s3 operator (#8987)                                                                                                  |
diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py
index 6dd9117..057f29b 100644
--- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py
+++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py
@@ -38,7 +38,7 @@ from os import getenv
 from airflow import DAG
 from airflow.operators.dummy_operator import DummyOperator
 from airflow.operators.python import BranchPythonOperator
-from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator
+from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
 from airflow.utils.dates import days_ago
 
 # [START howto_operator_google_api_to_s3_transfer_advanced_env_variables]
@@ -79,7 +79,7 @@ with DAG(
     tags=['example']
 ) as dag:
     # [START howto_operator_google_api_to_s3_transfer_advanced_task_1]
-    task_video_ids_to_s3 = GoogleApiToS3TransferOperator(
+    task_video_ids_to_s3 = GoogleApiToS3Operator(
         gcp_conn_id=YOUTUBE_CONN_ID,
         google_api_service_name='youtube',
         google_api_service_version='v3',
@@ -109,7 +109,7 @@ with DAG(
     )
     # [END howto_operator_google_api_to_s3_transfer_advanced_task_1_1]
     # [START howto_operator_google_api_to_s3_transfer_advanced_task_2]
-    task_video_data_to_s3 = GoogleApiToS3TransferOperator(
+    task_video_data_to_s3 = GoogleApiToS3Operator(
         gcp_conn_id=YOUTUBE_CONN_ID,
         google_api_service_name='youtube',
         google_api_service_version='v3',
diff --git a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py
index f329f1d..07290ca 100644
--- a/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py
+++ b/airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_basic.py
@@ -23,7 +23,7 @@ You need to set all env variables to request the data.
 from os import getenv
 
 from airflow import DAG
-from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator
+from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
 from airflow.utils.dates import days_ago
 
 # [START howto_operator_google_api_to_s3_transfer_basic_env_variables]
@@ -41,7 +41,7 @@ with DAG(
     tags=['example']
 ) as dag:
     # [START howto_operator_google_api_to_s3_transfer_basic_task_1]
-    task_google_sheets_values_to_s3 = GoogleApiToS3TransferOperator(
+    task_google_sheets_values_to_s3 = GoogleApiToS3Operator(
         google_api_service_name='sheets',
         google_api_service_version='v4',
         google_api_endpoint_path='sheets.spreadsheets.values.get',
diff --git a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py
index 7a0d86c..636d360 100644
--- a/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py
+++ b/airflow/providers/amazon/aws/example_dags/example_imap_attachment_to_s3.py
@@ -23,7 +23,7 @@ protocol from a mail server to S3 Bucket.
 from os import getenv
 
 from airflow import DAG
-from airflow.providers.amazon.aws.operators.imap_attachment_to_s3 import ImapAttachmentToS3Operator
+from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator
 from airflow.utils.dates import days_ago
 
 # [START howto_operator_imap_attachment_to_s3_env_variables]
diff --git a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
index c36d443..c9cc2bd 100644
--- a/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
+++ b/airflow/providers/amazon/aws/example_dags/example_s3_to_redshift.py
@@ -16,7 +16,7 @@
 # under the License.
 
 """
-This is an example dag for using `S3ToRedshiftTransferOperator` to copy a S3 key into a Redshift table.
+This is an example dag for using `S3ToRedshiftOperator` to copy a S3 key into a Redshift table.
 """
 
 from os import getenv
@@ -24,7 +24,7 @@ from os import getenv
 from airflow import DAG
 from airflow.operators.python import PythonOperator
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
-from airflow.providers.amazon.aws.operators.s3_to_redshift import S3ToRedshiftTransferOperator
+from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
 from airflow.providers.postgres.operators.postgres import PostgresOperator
 from airflow.utils.dates import days_ago
 
@@ -64,7 +64,7 @@ with DAG(
         task_id='setup__create_table'
     )
     # [START howto_operator_s3_to_redshift_task_1]
-    task_transfer_s3_to_redshift = S3ToRedshiftTransferOperator(
+    task_transfer_s3_to_redshift = S3ToRedshiftOperator(
         s3_bucket=S3_BUCKET,
         s3_key=S3_KEY,
         schema="PUBLIC",
diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py
index 653854e..2069bc9 100644
--- a/airflow/providers/amazon/aws/hooks/batch_client.py
+++ b/airflow/providers/amazon/aws/hooks/batch_client.py
@@ -38,7 +38,8 @@ from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.typing_compat import Protocol, runtime_checkable
 
-# Add exceptions to pylint for the boto3 protocol only; ideally the boto3 library could provide
+# Add exceptions to pylint for the boto3 protocol only; ideally the boto3 library
+# could provide
 # protocols for all their dynamically generated classes (try to migrate this to a PR on botocore).
 # Note that the use of invalid-name parameters should be restricted to the boto3 mappings only;
 # all the Airflow wrappers of boto3 clients should not adopt invalid-names to match boto3.
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/amazon/aws/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/amazon/aws/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/amazon/aws/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/amazon/aws/operators/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
similarity index 100%
rename from airflow/providers/amazon/aws/operators/dynamodb_to_s3.py
rename to airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py
diff --git a/airflow/providers/amazon/aws/operators/gcs_to_s3.py b/airflow/providers/amazon/aws/transfers/gcs_to_s3.py
similarity index 100%
rename from airflow/providers/amazon/aws/operators/gcs_to_s3.py
rename to airflow/providers/amazon/aws/transfers/gcs_to_s3.py
diff --git a/airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py
similarity index 99%
rename from airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py
rename to airflow/providers/amazon/aws/transfers/google_api_to_s3.py
index e0f13e5..e30e39b 100644
--- a/airflow/providers/amazon/aws/operators/google_api_to_s3_transfer.py
+++ b/airflow/providers/amazon/aws/transfers/google_api_to_s3.py
@@ -29,7 +29,7 @@ from airflow.providers.google.common.hooks.discovery_api import GoogleDiscoveryA
 from airflow.utils.decorators import apply_defaults
 
 
-class GoogleApiToS3TransferOperator(BaseOperator):
+class GoogleApiToS3Operator(BaseOperator):
     """
     Basic class for transferring data from a Google API endpoint into a S3 Bucket.
 
diff --git a/airflow/providers/amazon/aws/operators/hive_to_dynamodb.py b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
similarity index 98%
rename from airflow/providers/amazon/aws/operators/hive_to_dynamodb.py
rename to airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
index b1b8ab7..882a6fb 100644
--- a/airflow/providers/amazon/aws/operators/hive_to_dynamodb.py
+++ b/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py
@@ -28,7 +28,7 @@ from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
 from airflow.utils.decorators import apply_defaults
 
 
-class HiveToDynamoDBTransferOperator(BaseOperator):
+class HiveToDynamoDBOperator(BaseOperator):
     """
     Moves data from Hive to DynamoDB, note that for now the data is loaded
     into memory before being pushed to DynamoDB, so this operator should
diff --git a/airflow/providers/amazon/aws/operators/imap_attachment_to_s3.py b/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py
similarity index 100%
rename from airflow/providers/amazon/aws/operators/imap_attachment_to_s3.py
rename to airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py
diff --git a/airflow/providers/amazon/aws/operators/mongo_to_s3.py b/airflow/providers/amazon/aws/transfers/mongo_to_s3.py
similarity index 100%
rename from airflow/providers/amazon/aws/operators/mongo_to_s3.py
rename to airflow/providers/amazon/aws/transfers/mongo_to_s3.py
diff --git a/airflow/providers/amazon/aws/operators/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
similarity index 99%
rename from airflow/providers/amazon/aws/operators/redshift_to_s3.py
rename to airflow/providers/amazon/aws/transfers/redshift_to_s3.py
index a456560..5c85506 100644
--- a/airflow/providers/amazon/aws/operators/redshift_to_s3.py
+++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py
@@ -26,7 +26,7 @@ from airflow.providers.postgres.hooks.postgres import PostgresHook
 from airflow.utils.decorators import apply_defaults
 
 
-class RedshiftToS3TransferOperator(BaseOperator):
+class RedshiftToS3Operator(BaseOperator):
     """
     Executes an UNLOAD command to s3 as a CSV with headers
 
diff --git a/airflow/providers/amazon/aws/operators/s3_to_redshift.py b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py
similarity index 98%
rename from airflow/providers/amazon/aws/operators/s3_to_redshift.py
rename to airflow/providers/amazon/aws/transfers/s3_to_redshift.py
index 5a32af5..f8d28a1 100644
--- a/airflow/providers/amazon/aws/operators/s3_to_redshift.py
+++ b/airflow/providers/amazon/aws/transfers/s3_to_redshift.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+
 from typing import List, Optional, Union
 
 from airflow.models import BaseOperator
@@ -23,7 +23,7 @@ from airflow.providers.postgres.hooks.postgres import PostgresHook
 from airflow.utils.decorators import apply_defaults
 
 
-class S3ToRedshiftTransferOperator(BaseOperator):
+class S3ToRedshiftOperator(BaseOperator):
     """
     Executes an COPY command to load files from s3 to Redshift
 
diff --git a/airflow/providers/amazon/aws/operators/s3_to_sftp.py b/airflow/providers/amazon/aws/transfers/s3_to_sftp.py
similarity index 100%
rename from airflow/providers/amazon/aws/operators/s3_to_sftp.py
rename to airflow/providers/amazon/aws/transfers/s3_to_sftp.py
diff --git a/airflow/providers/amazon/aws/operators/sftp_to_s3.py b/airflow/providers/amazon/aws/transfers/sftp_to_s3.py
similarity index 100%
rename from airflow/providers/amazon/aws/operators/sftp_to_s3.py
rename to airflow/providers/amazon/aws/transfers/sftp_to_s3.py
diff --git a/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md
index 4ae75e8..fe4163a8 100644
--- a/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/cassandra/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                          |
diff --git a/airflow/providers/apache/cassandra/README.md b/airflow/providers/apache/cassandra/README.md
index 484f2b7..fb2bb65 100644
--- a/airflow/providers/apache/cassandra/README.md
+++ b/airflow/providers/apache/cassandra/README.md
@@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.cassandra` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.cassandra` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                          |
diff --git a/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md
index a2542e2..493a62e 100644
--- a/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/druid/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                     |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                     |
diff --git a/airflow/providers/apache/druid/README.md b/airflow/providers/apache/druid/README.md
index df7b70c..f14aaab 100644
--- a/airflow/providers/apache/druid/README.md
+++ b/airflow/providers/apache/druid/README.md
@@ -32,6 +32,8 @@ Release: 2020.5.20
 - [Provider class summary](#provider-class-summary)
     - [Operators](#operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Hooks](#hooks)
         - [Moved hooks](#moved-hooks)
 - [Releases](#releases)
@@ -92,11 +94,22 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.druid` package.
 
 ### Moved operators
 
-| Airflow 2.0 operators: `airflow.providers.apache.druid` package                                                                                                | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                       |
-|:---------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py)                               | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py)  |
-| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py)              | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) |
-| [operators.hive_to_druid.HiveToDruidTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py)              |
+| Airflow 2.0 operators: `airflow.providers.apache.druid` package                                                                                   | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                       |
+|:--------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py)                  | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py)  |
+| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py) | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) |
+
+
+
+
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.apache.druid` package                                                                                        | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                          |
+|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.hive_to_druid.HiveToDruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/transfers/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py) |
 
 
 
@@ -124,6 +137,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.druid` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                     |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                     |
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/apache/druid/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/apache/druid/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/apache/druid/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/apache/druid/operators/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py
similarity index 99%
rename from airflow/providers/apache/druid/operators/hive_to_druid.py
rename to airflow/providers/apache/druid/transfers/hive_to_druid.py
index d021a88..e8ba9bc 100644
--- a/airflow/providers/apache/druid/operators/hive_to_druid.py
+++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py
@@ -31,7 +31,7 @@ LOAD_CHECK_INTERVAL = 5
 DEFAULT_TARGET_PARTITION_SIZE = 5000000
 
 
-class HiveToDruidTransferOperator(BaseOperator):
+class HiveToDruidOperator(BaseOperator):
     """
     Moves data from Hive to Druid, [del]note that for now the data is loaded
     into memory before being pushed to Druid, so this operator should
diff --git a/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md
index 2bb90a1..e6ea33a 100644
--- a/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/hdfs/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
diff --git a/airflow/providers/apache/hdfs/README.md b/airflow/providers/apache/hdfs/README.md
index b215487..4781ada 100644
--- a/airflow/providers/apache/hdfs/README.md
+++ b/airflow/providers/apache/hdfs/README.md
@@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hdfs` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -108,6 +110,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hdfs` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
diff --git a/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md
index 7ae7ae4..742055b 100644
--- a/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/hive/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,9 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08  | Don&#39;t use the term &#34;whitelist&#34; - language matters (#9174)                                                                                                          |
+| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03  | Remove Hive/Hadoop/Java dependency from unit tests (#9029)                                                                                                         |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/apache/hive/README.md b/airflow/providers/apache/hive/README.md
index 8b2af2c..3e83273 100644
--- a/airflow/providers/apache/hive/README.md
+++ b/airflow/providers/apache/hive/README.md
@@ -32,6 +32,8 @@ Release: 2020.5.20
 - [Provider class summary](#provider-class-summary)
     - [Operators](#operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Sensors](#sensors)
         - [Moved sensors](#moved-sensors)
     - [Hooks](#hooks)
@@ -66,7 +68,7 @@ For full compatibility and test status of the backport packages check
 | PIP package   | Version required   |
 |:--------------|:-------------------|
 | hmsclient     | &gt;=0.1.0            |
-| pyhive        | &gt;=0.6.0            |
+| pyhive[hive]  | &gt;=0.6.0            |
 
 ## Cross provider package dependencies
 
@@ -100,16 +102,27 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hive` package.
 
 ### Moved operators
 
-| Airflow 2.0 operators: `airflow.providers.apache.hive` package                                                                                                      | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py)                                        | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py)                              |
-| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py)             | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py)   |
-| [operators.hive_to_mysql.HiveToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_to_mysql.py)       | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py)                       |
-| [operators.hive_to_samba.Hive2SambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_to_samba.py)                | [operators.hive_to_samba_operator.Hive2SambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py)      |
-| [operators.mssql_to_hive.MsSqlToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/mssql_to_hive.py)       | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py)                       |
-| [operators.mysql_to_hive.MySqlToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/mysql_to_hive.py)       | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py)                       |
-| [operators.s3_to_hive.S3ToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/s3_to_hive.py)                | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py)              |
-| [operators.vertica_to_hive.VerticaToHiveTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) |
+| Airflow 2.0 operators: `airflow.providers.apache.hive` package                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                              |
+|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py)                            | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py)                            |
+| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py) | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py) |
+
+
+
+
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.apache.hive` package                                                                                              | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                |
+|:------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.hive_to_mysql.HiveToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_mysql.py)       | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py)                       |
+| [transfers.hive_to_samba.HiveToSambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_samba.py)       | [operators.hive_to_samba_operator.HiveToSambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py)     |
+| [transfers.mssql_to_hive.MsSqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mssql_to_hive.py)       | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py)                       |
+| [transfers.mysql_to_hive.MySqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mysql_to_hive.py)       | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py)                       |
+| [transfers.s3_to_hive.S3ToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/s3_to_hive.py)                | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py)              |
+| [transfers.vertica_to_hive.VerticaToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) |
 
 
 
@@ -151,6 +164,9 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.hive` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08  | Don&#39;t use the term &#34;whitelist&#34; - language matters (#9174)                                                                                                          |
+| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03  | Remove Hive/Hadoop/Java dependency from unit tests (#9029)                                                                                                         |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/apache/hive/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/apache/hive/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/apache/hive/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/apache/hive/operators/hive_to_mysql.py b/airflow/providers/apache/hive/transfers/hive_to_mysql.py
similarity index 99%
rename from airflow/providers/apache/hive/operators/hive_to_mysql.py
rename to airflow/providers/apache/hive/transfers/hive_to_mysql.py
index d46c800..524f9b1 100644
--- a/airflow/providers/apache/hive/operators/hive_to_mysql.py
+++ b/airflow/providers/apache/hive/transfers/hive_to_mysql.py
@@ -29,7 +29,7 @@ from airflow.utils.decorators import apply_defaults
 from airflow.utils.operator_helpers import context_to_airflow_vars
 
 
-class HiveToMySqlTransferOperator(BaseOperator):
+class HiveToMySqlOperator(BaseOperator):
     """
     Moves data from Hive to MySQL, note that for now the data is loaded
     into memory before being pushed to MySQL, so this operator should
diff --git a/airflow/providers/apache/hive/operators/hive_to_samba.py b/airflow/providers/apache/hive/transfers/hive_to_samba.py
similarity index 98%
rename from airflow/providers/apache/hive/operators/hive_to_samba.py
rename to airflow/providers/apache/hive/transfers/hive_to_samba.py
index 4e8f714..4426da1 100644
--- a/airflow/providers/apache/hive/operators/hive_to_samba.py
+++ b/airflow/providers/apache/hive/transfers/hive_to_samba.py
@@ -29,7 +29,7 @@ from airflow.utils.decorators import apply_defaults
 from airflow.utils.operator_helpers import context_to_airflow_vars
 
 
-class Hive2SambaOperator(BaseOperator):
+class HiveToSambaOperator(BaseOperator):
     """
     Executes hql code in a specific Hive database and loads the
     results of the query as a csv to a Samba location.
diff --git a/airflow/providers/apache/hive/operators/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
similarity index 99%
rename from airflow/providers/apache/hive/operators/mssql_to_hive.py
rename to airflow/providers/apache/hive/transfers/mssql_to_hive.py
index fba48db..7af6ec6 100644
--- a/airflow/providers/apache/hive/operators/mssql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
@@ -33,7 +33,7 @@ from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
 from airflow.utils.decorators import apply_defaults
 
 
-class MsSqlToHiveTransferOperator(BaseOperator):
+class MsSqlToHiveOperator(BaseOperator):
     """
     Moves data from Microsoft SQL Server to Hive. The operator runs
     your query against Microsoft SQL Server, stores the file locally
diff --git a/airflow/providers/apache/hive/operators/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py
similarity index 99%
rename from airflow/providers/apache/hive/operators/mysql_to_hive.py
rename to airflow/providers/apache/hive/transfers/mysql_to_hive.py
index 1d44611..083c2a3 100644
--- a/airflow/providers/apache/hive/operators/mysql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py
@@ -33,7 +33,7 @@ from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.utils.decorators import apply_defaults
 
 
-class MySqlToHiveTransferOperator(BaseOperator):
+class MySqlToHiveOperator(BaseOperator):
     """
     Moves data from MySql to Hive. The operator runs your query against
     MySQL, stores the file locally before loading it into a Hive table.
diff --git a/airflow/providers/apache/hive/operators/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py
similarity index 99%
rename from airflow/providers/apache/hive/operators/s3_to_hive.py
rename to airflow/providers/apache/hive/transfers/s3_to_hive.py
index 2e35321..032ff5b 100644
--- a/airflow/providers/apache/hive/operators/s3_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py
@@ -35,7 +35,7 @@ from airflow.utils.compression import uncompress_file
 from airflow.utils.decorators import apply_defaults
 
 
-class S3ToHiveTransferOperator(BaseOperator):  # pylint: disable=too-many-instance-attributes
+class S3ToHiveOperator(BaseOperator):  # pylint: disable=too-many-instance-attributes
     """
     Moves data from S3 to Hive. The operator downloads a file from S3,
     stores the file locally before loading it into a Hive table.
diff --git a/airflow/providers/apache/hive/operators/vertica_to_hive.py b/airflow/providers/apache/hive/transfers/vertica_to_hive.py
similarity index 99%
rename from airflow/providers/apache/hive/operators/vertica_to_hive.py
rename to airflow/providers/apache/hive/transfers/vertica_to_hive.py
index 5bd7314..5d1d9be 100644
--- a/airflow/providers/apache/hive/operators/vertica_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/vertica_to_hive.py
@@ -31,7 +31,7 @@ from airflow.providers.vertica.hooks.vertica import VerticaHook
 from airflow.utils.decorators import apply_defaults
 
 
-class VerticaToHiveTransferOperator(BaseOperator):
+class VerticaToHiveOperator(BaseOperator):
     """
     Moves data from Vertica to Hive. The operator runs
     your query against Vertica, stores the file locally
diff --git a/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md
index eef2cfd..384646c 100644
--- a/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/livy/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/apache/livy/README.md b/airflow/providers/apache/livy/README.md
index cf0c3d5..96ec762 100644
--- a/airflow/providers/apache/livy/README.md
+++ b/airflow/providers/apache/livy/README.md
@@ -94,6 +94,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.livy` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -127,6 +129,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.livy` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md
index 7fae213..dbc4872 100644
--- a/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/pig/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                  |
diff --git a/airflow/providers/apache/pig/README.md b/airflow/providers/apache/pig/README.md
index 03a1d66..7f81852 100644
--- a/airflow/providers/apache/pig/README.md
+++ b/airflow/providers/apache/pig/README.md
@@ -77,6 +77,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pig` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -98,6 +100,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pig` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                  |
diff --git a/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md
index c1f59aa..a6b8e6e 100644
--- a/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/pinot/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
diff --git a/airflow/providers/apache/pinot/README.md b/airflow/providers/apache/pinot/README.md
index f3f08e0..d342959 100644
--- a/airflow/providers/apache/pinot/README.md
+++ b/airflow/providers/apache/pinot/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pinot` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -92,6 +94,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.pinot` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
diff --git a/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md
index 8c025a2..dd49fd3 100644
--- a/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/spark/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                      |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                     |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                          |
diff --git a/airflow/providers/apache/spark/README.md b/airflow/providers/apache/spark/README.md
index eac1eaf..d9da28b 100644
--- a/airflow/providers/apache/spark/README.md
+++ b/airflow/providers/apache/spark/README.md
@@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.spark` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -109,6 +111,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.spark` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                      |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                     |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                          |
diff --git a/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md
index 26c70b0..dd3d4fd 100644
--- a/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/apache/sqoop/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
diff --git a/airflow/providers/apache/sqoop/README.md b/airflow/providers/apache/sqoop/README.md
index d1d8160..4e90245 100644
--- a/airflow/providers/apache/sqoop/README.md
+++ b/airflow/providers/apache/sqoop/README.md
@@ -77,6 +77,8 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.sqoop` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -98,6 +100,7 @@ All classes in Airflow 2.0 are in `airflow.providers.apache.sqoop` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                        |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                  |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                    |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                   |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                        |
diff --git a/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md
index dd18437..9ac9a7b 100644
--- a/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/celery/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/celery/README.md b/airflow/providers/celery/README.md
index 0520fa7..f77c536 100644
--- a/airflow/providers/celery/README.md
+++ b/airflow/providers/celery/README.md
@@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.celery` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -93,6 +95,7 @@ All classes in Airflow 2.0 are in `airflow.providers.celery` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md
index e45e43f..06713f6 100644
--- a/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/cloudant/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                 |
diff --git a/airflow/providers/cloudant/README.md b/airflow/providers/cloudant/README.md
index 0962864..9cc3ae1 100644
--- a/airflow/providers/cloudant/README.md
+++ b/airflow/providers/cloudant/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.cloudant` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.cloudant` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                 |
diff --git a/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md
index a15a8d7..8ec7de2 100644
--- a/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/databricks/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22  | Add support for spark python and submit tasks in Databricks operator(#8846)                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/databricks/README.md b/airflow/providers/databricks/README.md
index 9d2cf93..678d141 100644
--- a/airflow/providers/databricks/README.md
+++ b/airflow/providers/databricks/README.md
@@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.databricks` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -106,6 +108,7 @@ All classes in Airflow 2.0 are in `airflow.providers.databricks` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22  | Add support for spark python and submit tasks in Databricks operator(#8846)                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md
index a9c72ea..9c1eac6 100644
--- a/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/datadog/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/datadog/README.md b/airflow/providers/datadog/README.md
index b56c7df..d6ca490 100644
--- a/airflow/providers/datadog/README.md
+++ b/airflow/providers/datadog/README.md
@@ -71,6 +71,8 @@ All classes in Airflow 2.0 are in `airflow.providers.datadog` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -104,6 +106,7 @@ All classes in Airflow 2.0 are in `airflow.providers.datadog` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md
index 20c9610..58def40 100644
--- a/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/dingding/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/dingding/README.md b/airflow/providers/dingding/README.md
index fcec208..9db9b09 100644
--- a/airflow/providers/dingding/README.md
+++ b/airflow/providers/dingding/README.md
@@ -93,6 +93,8 @@ All classes in Airflow 2.0 are in `airflow.providers.dingding` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -114,6 +116,7 @@ All classes in Airflow 2.0 are in `airflow.providers.dingding` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md
index 8100422..b32b8c4 100644
--- a/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/discord/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/discord/README.md b/airflow/providers/discord/README.md
index d5369d2..b64b7d5 100644
--- a/airflow/providers/discord/README.md
+++ b/airflow/providers/discord/README.md
@@ -93,6 +93,8 @@ All classes in Airflow 2.0 are in `airflow.providers.discord` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -114,6 +116,7 @@ All classes in Airflow 2.0 are in `airflow.providers.discord` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md
index f321a1a..30de358 100644
--- a/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/docker/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,9 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08  | Fix xcom in DockerOperator when auto_remove is used (#9173)                                                                                                        |
+| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07  | Add kernel capabilities in DockerOperator(#9142)                                                                                                                   |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/docker/README.md b/airflow/providers/docker/README.md
index 8d28424..c24bfb2 100644
--- a/airflow/providers/docker/README.md
+++ b/airflow/providers/docker/README.md
@@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.docker` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -106,6 +108,9 @@ All classes in Airflow 2.0 are in `airflow.providers.docker` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08  | Fix xcom in DockerOperator when auto_remove is used (#9173)                                                                                                        |
+| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07  | Add kernel capabilities in DockerOperator(#9142)                                                                                                                   |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md
index e4da8b0..04b5c89 100644
--- a/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/elasticsearch/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/elasticsearch/README.md b/airflow/providers/elasticsearch/README.md
index 5e06f3b..83bfa30 100644
--- a/airflow/providers/elasticsearch/README.md
+++ b/airflow/providers/elasticsearch/README.md
@@ -63,6 +63,8 @@ All classes in Airflow 2.0 are in `airflow.providers.elasticsearch` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -84,6 +86,7 @@ All classes in Airflow 2.0 are in `airflow.providers.elasticsearch` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md
index f0e3f1c..7331213 100644
--- a/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/email/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/email/README.md b/airflow/providers/email/README.md
index 5738044..b89030b 100644
--- a/airflow/providers/email/README.md
+++ b/airflow/providers/email/README.md
@@ -79,12 +79,15 @@ All classes in Airflow 2.0 are in `airflow.providers.email` package.
 
 
 
+
+
 ## Releases
 
 ### Release 2020.5.20
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md
index b4658cc..3da1352 100644
--- a/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/exasol/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                          |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
diff --git a/airflow/providers/exasol/README.md b/airflow/providers/exasol/README.md
index 608bd05..31e00bd 100644
--- a/airflow/providers/exasol/README.md
+++ b/airflow/providers/exasol/README.md
@@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.exasol` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.exasol` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                          |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
diff --git a/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md
index 2960030..385aaa8 100644
--- a/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/facebook/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/facebook/README.md b/airflow/providers/facebook/README.md
index 40c2bb6..26afa6b 100644
--- a/airflow/providers/facebook/README.md
+++ b/airflow/providers/facebook/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.facebook` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.facebook` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md
index d26e9e9..06b6365 100644
--- a/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/ftp/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/ftp/README.md b/airflow/providers/ftp/README.md
index 3645623..b983ddd 100644
--- a/airflow/providers/ftp/README.md
+++ b/airflow/providers/ftp/README.md
@@ -64,6 +64,8 @@ All classes in Airflow 2.0 are in `airflow.providers.ftp` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -99,6 +101,7 @@ All classes in Airflow 2.0 are in `airflow.providers.ftp` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md
index 4c0012b..15320b1 100644
--- a/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/google/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,26 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16  | Add support for latest Apache Beam SDK in Dataflow operators (#9323)                                                                                               |
+| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15  | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314)                                                                                           |
+| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15  | Resolve upstream tasks when template field is XComArg (#8805)                                                                                                      |
+| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15  | Wait for pipeline state in Data Fusion operators (#8954)                                                                                                           |
+| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10  | Add test for BQ operations using location (#9206)                                                                                                                  |
+| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10  | Make generated job_id more informative in BQ insert_job (#9203)                                                                                                    |
+| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10  | Upgrade pendulum to latest major version ~2.0 (#9184)                                                                                                              |
+| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09  | Allows using private endpoints in GKEStartPodOperator (#9169)                                                                                                      |
+| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05  | Add 3.8 to the test matrices (#8836)                                                                                                                               |
+| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05  | Add &#39;main&#39; param to template_fields in DataprocSubmitPySparkJobOperator (#9154)                                                                                    |
+| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05  | [AIRFLOW-6290] Create guide for GKE operators (#8883)                                                                                                              |
+| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04  | Fix sql_to_gcs hook gzip of schema_file (#9140)                                                                                                                    |
+| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
+| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01  | Add BigQueryInsertJobOperator (#8868)                                                                                                                              |
+| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31  | Create guide for Dataproc Operators (#9037)                                                                                                                        |
+| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29  | Add example dag and system test for LocalFilesystemToGCSOperator (#9043)                                                                                           |
+| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29  | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066)                                                                                   |
+| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29  | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055)                                                                                      |
+| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29  | add example dag and system test for GoogleSheetsToGCSOperator (#9056)                                                                                              |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26  | Refactor BigQuery operators (#8858)                                                                                                                                |
 | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
diff --git a/airflow/providers/google/README.md b/airflow/providers/google/README.md
index ea08844..c313861 100644
--- a/airflow/providers/google/README.md
+++ b/airflow/providers/google/README.md
@@ -33,6 +33,9 @@ Release: 2020.5.20
     - [Operators](#operators)
         - [New operators](#new-operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [New transfer operators](#new-transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Sensors](#sensors)
         - [New sensors](#new-sensors)
         - [Moved sensors](#moved-sensors)
@@ -100,12 +103,11 @@ For full compatibility and test status of the backport packages check
 | google-cloud-speech                | &gt;=0.36.3           |
 | google-cloud-storage               | &gt;=1.16             |
 | google-cloud-tasks                 | &gt;=1.2.1            |
-| google-cloud-texttospeech          | &gt;=0.4.0            |
+| google-cloud-texttospeech          | &gt;=0.4.0,&lt;2         |
 | google-cloud-translate             | &gt;=1.5.0            |
 | google-cloud-videointelligence     | &gt;=1.7.0            |
 | google-cloud-vision                | &gt;=0.35.2           |
 | grpcio-gcp                         | &gt;=0.2.2            |
-| httplib2                           | ~=0.15             |
 | pandas-gbq                         |                    |
 
 ## Cross provider package dependencies
@@ -145,7 +147,6 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | New Airflow 2.0 operators: `airflow.providers.google` package                                                                                                                                                                |
 |:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
 | [ads.operators.ads.GoogleAdsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py)                                                                               |
-| [ads.operators.ads.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py)                                                                                      |
 | [cloud.operators.automl.AutoMLBatchPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                        |
 | [cloud.operators.automl.AutoMLCreateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                       |
 | [cloud.operators.automl.AutoMLDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                       |
@@ -159,6 +160,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.automl.AutoMLTablesListTableSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                |
 | [cloud.operators.automl.AutoMLTablesUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                 |
 | [cloud.operators.automl.AutoMLTrainModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py)                                                                          |
+| [cloud.operators.bigquery.BigQueryInsertJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                     |
 | [cloud.operators.bigquery_dts.BigQueryCreateDataTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py)                                                    |
 | [cloud.operators.bigquery_dts.BigQueryDataTransferServiceStartTransferRunsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py)                                  |
 | [cloud.operators.bigquery_dts.BigQueryDeleteDataTransferConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py)                                              |
@@ -173,6 +175,8 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                       |
 | [cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                       |
 | [cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py)                                      |
+| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)          |
+| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)           |
 | [cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                |
 | [cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                     |
 | [cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py)                                                       |
@@ -206,12 +210,10 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py)                                                     |
 | [cloud.operators.dataproc.DataprocSubmitJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                     |
 | [cloud.operators.dataproc.DataprocUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py)                                                                 |
-| [cloud.operators.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/facebook_ads_to_gcs.py)                                          |
 | [cloud.operators.functions.CloudFunctionInvokeFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py)                                                         |
 | [cloud.operators.gcs.GCSDeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                 |
 | [cloud.operators.gcs.GCSFileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                |
-| [cloud.operators.gcs_to_gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs_to_gcs.py)                                                             |
-| [cloud.operators.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs_to_sftp.py)                                                                       |
+| [cloud.operators.gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                           |
 | [cloud.operators.life_sciences.LifeSciencesRunPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/life_sciences.py)                                                     |
 | [cloud.operators.mlengine.MLEngineCreateModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                   |
 | [cloud.operators.mlengine.MLEngineCreateVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                 |
@@ -221,10 +223,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.mlengine.MLEngineListVersionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                  |
 | [cloud.operators.mlengine.MLEngineSetDefaultVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                             |
 | [cloud.operators.mlengine.MLEngineTrainingCancelJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                             |
-| [cloud.operators.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/presto_to_gcs.py)                                                                 |
 | [cloud.operators.pubsub.PubSubPullOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                                |
-| [cloud.operators.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/sftp_to_gcs.py)                                                                       |
-| [cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/sheets_to_gcs.py)                                                           |
 | [cloud.operators.stackdriver.StackdriverDeleteAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                          |
 | [cloud.operators.stackdriver.StackdriverDeleteNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                            |
 | [cloud.operators.stackdriver.StackdriverDisableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py)                                                 |
@@ -248,6 +247,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.tasks.CloudTasksTaskGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                           |
 | [cloud.operators.tasks.CloudTasksTaskRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                           |
 | [cloud.operators.tasks.CloudTasksTasksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py)                                                                         |
+| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                         |
 | [firebase.operators.firestore.CloudFirestoreExportDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/operators/firestore.py)                                                  |
 | [marketing_platform.operators.analytics.GoogleAnalyticsDataImportUploadOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                           |
 | [marketing_platform.operators.analytics.GoogleAnalyticsDeletePreviousDataUploadsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py)                  |
@@ -271,7 +271,6 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py)              |
 | [marketing_platform.operators.search_ads.GoogleSearchAdsDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py)                           |
 | [marketing_platform.operators.search_ads.GoogleSearchAdsInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py)                             |
-| [suite.operators.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/gcs_to_sheets.py)                                                           |
 | [suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/sheets.py)                                                             |
 
 
@@ -280,14 +279,27 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 
 | Airflow 2.0 operators: `airflow.providers.google` package                                                                                                                                                                  | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                                                   |
 |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [cloud.operators.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/adls_to_gcs.py)                                                                     | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py)                                                                 |
+| [cloud.operators.bigquery.BigQueryCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                       | [contrib.operators.bigquery_check_operator.BigQueryCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py)                                                    |
+| [cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                          | [contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                   |
+| [cloud.operators.bigquery.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                            | [contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                     |
+| [cloud.operators.bigquery.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                         | [contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                  |
+| [cloud.operators.bigquery.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                               | [contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                        |
+| [cloud.operators.bigquery.BigQueryDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                 | [contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_table_delete_operator.py)                                |
+| [cloud.operators.bigquery.BigQueryExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                | [contrib.operators.bigquery_operator.BigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                                     |
+| [cloud.operators.bigquery.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                     | [contrib.operators.bigquery_get_data.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_get_data.py)                                                              |
+| [cloud.operators.bigquery.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                  | [contrib.operators.bigquery_operator.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                           |
+| [cloud.operators.bigquery.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                            | [contrib.operators.bigquery_operator.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                     |
+| [cloud.operators.bigquery.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                               | [contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py)                                            |
+| [cloud.operators.bigquery.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                | [contrib.operators.bigquery_operator.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                         |
+| [cloud.operators.bigquery.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                               | [contrib.operators.bigquery_operator.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                        |
+| [cloud.operators.bigquery.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                 | [contrib.operators.bigquery_operator.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py)                                                          |
+| [cloud.operators.bigquery.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py)                                                                  | [contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py)                                               |
 | [cloud.operators.bigtable.BigtableCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                              | [contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                               |
 | [cloud.operators.bigtable.BigtableCreateTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                                 | [contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                                  |
 | [cloud.operators.bigtable.BigtableDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                              | [contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                               |
 | [cloud.operators.bigtable.BigtableDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                                 | [contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                                  |
 | [cloud.operators.bigtable.BigtableUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py)                                                               | [contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py)                                                |
-| [cloud.operators.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cassandra_to_gcs.py)                                                      | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py)                                                  |
-| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py)                                                         | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py)                                          |
+| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py)                                                         | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py)                                          |
 | [cloud.operators.cloud_sql.CloudSQLBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                                      | [contrib.operators.gcp_sql_operator.CloudSqlBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                                   |
 | [cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                    | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                 |
 | [cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py)                                                            | [contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py)                                                         |
@@ -301,12 +313,10 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCancelOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                    |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)       | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                          |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceDeleteJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)       | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                          |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)        | [contrib.operators.gcp_transfer_operator.GoogleCloudStorageToGoogleCloudStorageTransferOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                       |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)    | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                       |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceListOperationsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)  | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                     |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServicePauseOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)  | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                     |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceResumeOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                    |
-| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)         | [contrib.operators.s3_to_gcs_transfer_operator.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_transfer_operator.py)                          |
 | [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceUpdateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py)       | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py)                                          |
 | [cloud.operators.compute.ComputeEngineBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                                     | [contrib.operators.gcp_compute_operator.GceBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                                |
 | [cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py)                                                     | [contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py)                                                |
@@ -368,29 +378,22 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.gcs.GCSDeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                              | [contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_delete_operator.py)                                                 |
 | [cloud.operators.gcs.GCSListObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                | [contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_list_operator.py)                                                       |
 | [cloud.operators.gcs.GCSObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                       | [contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py)                                         |
-| [cloud.operators.gcs.GCSToLocalOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py)                                                                                    | [contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_download_operator.py)                                           |
-| [cloud.operators.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs_to_gcs.py)                                                                        | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py)                                                     |
 | [cloud.operators.kubernetes_engine.GKECreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py)                                                  | [contrib.operators.gcp_container_operator.GKEClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py)                                                   |
 | [cloud.operators.kubernetes_engine.GKEDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py)                                                  | [contrib.operators.gcp_container_operator.GKEClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py)                                                   |
 | [cloud.operators.kubernetes_engine.GKEStartPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py)                                                       | [contrib.operators.gcp_container_operator.GKEPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py)                                                             |
-| [cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/local_to_gcs.py)                                                        | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py)                                                                 |
 | [cloud.operators.mlengine.MLEngineManageModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                                 | [contrib.operators.mlengine_operator.MLEngineModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                                |
 | [cloud.operators.mlengine.MLEngineManageVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                               | [contrib.operators.mlengine_operator.MLEngineVersionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                              |
 | [cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                     | [contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                      |
 | [cloud.operators.mlengine.MLEngineStartTrainingJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py)                                                            | [contrib.operators.mlengine_operator.MLEngineTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py)                                                             |
-| [cloud.operators.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mssql_to_gcs.py)                                                                  | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py)                                                              |
-| [cloud.operators.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mysql_to_gcs.py)                                                                  | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py)                                                              |
 | [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                                 | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                         |
 | [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                          | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                  |
 | [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                                | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                        |
 | [cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py)                                    | [contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py)                            |
-| [cloud.operators.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/postgres_to_gcs.py)                                                         | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py)                                   |
 | [cloud.operators.pubsub.PubSubCreateSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                | [contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                         |
 | [cloud.operators.pubsub.PubSubCreateTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                       | [contrib.operators.pubsub_operator.PubSubTopicCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                                |
 | [cloud.operators.pubsub.PubSubDeleteSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                | [contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                         |
 | [cloud.operators.pubsub.PubSubDeleteTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                       | [contrib.operators.pubsub_operator.PubSubTopicDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                                |
 | [cloud.operators.pubsub.PubSubPublishMessageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py)                                                                    | [contrib.operators.pubsub_operator.PubSubPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py)                                                                    |
-| [cloud.operators.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/s3_to_gcs.py)                                                                           | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py)                                                                    |
 | [cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                         | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                     |
 | [cloud.operators.spanner.SpannerDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                                 | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                             |
 | [cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                         | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                     |
@@ -398,14 +401,12 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                          | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                      |
 | [cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py)                                                         | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py)                                     |
 | [cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/speech_to_text.py)                                        | [contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_speech_to_text_operator.py)                           |
-| [cloud.operators.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/sql_to_gcs.py)                                                                    | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py)                                                                |
 | [cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/text_to_speech.py)                                             | [contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_text_to_speech_operator.py)                                |
 | [cloud.operators.translate.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate.py)                                                                | [contrib.operators.gcp_translate_operator.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_operator.py)                                                 |
 | [cloud.operators.translate_speech.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate_speech.py)                                                | [contrib.operators.gcp_translate_speech_operator.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_speech_operator.py)                                 |
 | [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py)                | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) |
 | [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py)                         | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py)          |
 | [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py)                          | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py)           |
-| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                       | [contrib.operators.gcp_vision_operator.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                        |
 | [cloud.operators.vision.CloudVisionCreateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
 | [cloud.operators.vision.CloudVisionCreateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                             | [contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                              |
 | [cloud.operators.vision.CloudVisionCreateReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                         | [contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                          |
@@ -421,7 +422,44 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 | [cloud.operators.vision.CloudVisionTextDetectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                   | [contrib.operators.gcp_vision_operator.CloudVisionDetectDocumentTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                            |
 | [cloud.operators.vision.CloudVisionUpdateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                                | [contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                                 |
 | [cloud.operators.vision.CloudVisionUpdateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py)                                                             | [contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py)                                              |
-| [suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/gcs_to_gdrive.py)                                                          | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py)                                                   |
+
+
+
+
+
+### New transfer operators
+
+| New Airflow 2.0 transfers: `airflow.providers.google` package                                                                                                                       |
+|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/transfers/ads_to_gcs.py)                               |
+| [cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py) |
+| [cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_local.py)                 |
+| [cloud.transfers.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_sftp.py)                              |
+| [cloud.transfers.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/presto_to_gcs.py)                        |
+| [cloud.transfers.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sftp_to_gcs.py)                              |
+| [cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sheets_to_gcs.py)                  |
+| [suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_sheets.py)                  |
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.google` package                                                                                                                         | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                 |
+|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [cloud.transfers.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/adls_to_gcs.py)                            | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py)                               |
+| [cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py) | [contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_bigquery.py)                   |
+| [cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py)                | [contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_gcs.py)                         |
+| [cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py)          | [contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_mysql_operator.py)          |
+| [cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py)             | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py)                |
+| [cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py)                | [contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_bq.py)                               |
+| [cloud.transfers.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_gcs.py)                               | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py)                   |
+| [cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/local_to_gcs.py)               | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py)                               |
+| [cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mssql_to_gcs.py)                         | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py)                            |
+| [cloud.transfers.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mysql_to_gcs.py)                         | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py)                            |
+| [cloud.transfers.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/postgres_to_gcs.py)                | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py) |
+| [cloud.transfers.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/s3_to_gcs.py)                                  | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py)                                  |
+| [cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sql_to_gcs.py)                           | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py)                              |
+| [suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_gdrive.py)                 | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py)                 |
 
 
 
@@ -444,6 +482,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 
 | Airflow 2.0 sensors: `airflow.providers.google` package                                                                                                                                                        | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                            |
 |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [cloud.sensors.bigquery.BigQueryTableExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py)                                                        | [contrib.sensors.bigquery_sensor.BigQueryTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/bigquery_sensor.py)                                   |
 | [cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigtable.py)                                             | [contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) |
 | [cloud.sensors.cloud_storage_transfer_service.CloudDataTransferServiceJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py) | [contrib.sensors.gcp_transfer_sensor.GCPTransferServiceWaitForJobStatusSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcp_transfer_sensor.py)      |
 | [cloud.sensors.gcs.GCSObjectExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py)                                                                      | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py)                                  |
@@ -482,6 +521,7 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 
 | Airflow 2.0 hooks: `airflow.providers.google` package                                                                                                                                           | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                     |
 |:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [cloud.hooks.bigquery.BigQueryHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery.py)                                                             | [contrib.hooks.bigquery_hook.BigQueryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/bigquery_hook.py)                                           |
 | [cloud.hooks.bigtable.BigtableHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigtable.py)                                                             | [contrib.hooks.gcp_bigtable_hook.BigtableHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_bigtable_hook.py)                                   |
 | [cloud.hooks.cloud_build.CloudBuildHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_build.py)                                                     | [contrib.hooks.gcp_cloud_build_hook.CloudBuildHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_cloud_build_hook.py)                           |
 | [cloud.hooks.cloud_sql.CloudSQLDatabaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py)                                                   | [contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py)                                     |
@@ -531,6 +571,26 @@ All classes in Airflow 2.0 are in `airflow.providers.google` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16  | Add support for latest Apache Beam SDK in Dataflow operators (#9323)                                                                                               |
+| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15  | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314)                                                                                           |
+| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15  | Resolve upstream tasks when template field is XComArg (#8805)                                                                                                      |
+| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15  | Wait for pipeline state in Data Fusion operators (#8954)                                                                                                           |
+| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10  | Add test for BQ operations using location (#9206)                                                                                                                  |
+| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10  | Make generated job_id more informative in BQ insert_job (#9203)                                                                                                    |
+| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10  | Upgrade pendulum to latest major version ~2.0 (#9184)                                                                                                              |
+| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09  | Allows using private endpoints in GKEStartPodOperator (#9169)                                                                                                      |
+| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05  | Add 3.8 to the test matrices (#8836)                                                                                                                               |
+| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05  | Add &#39;main&#39; param to template_fields in DataprocSubmitPySparkJobOperator (#9154)                                                                                    |
+| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05  | [AIRFLOW-6290] Create guide for GKE operators (#8883)                                                                                                              |
+| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04  | Fix sql_to_gcs hook gzip of schema_file (#9140)                                                                                                                    |
+| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02  | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)                                                                                     |
+| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01  | Add BigQueryInsertJobOperator (#8868)                                                                                                                              |
+| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31  | Create guide for Dataproc Operators (#9037)                                                                                                                        |
+| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29  | Add example dag and system test for LocalFilesystemToGCSOperator (#9043)                                                                                           |
+| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29  | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066)                                                                                   |
+| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29  | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055)                                                                                      |
+| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29  | add example dag and system test for GoogleSheetsToGCSOperator (#9056)                                                                                              |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26  | Refactor BigQuery operators (#8858)                                                                                                                                |
 | [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26  | All classes in backport providers are now importable in Airflow 1.10 (#8991)                                                                                       |
diff --git a/airflow/providers/google/ads/example_dags/example_ads.py b/airflow/providers/google/ads/example_dags/example_ads.py
index a22738a..f1682e7 100644
--- a/airflow/providers/google/ads/example_dags/example_ads.py
+++ b/airflow/providers/google/ads/example_dags/example_ads.py
@@ -21,7 +21,8 @@ Example Airflow DAG that shows how to use GoogleAdsToGcsOperator.
 import os
 
 from airflow import models
-from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator, GoogleAdsToGcsOperator
+from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator
+from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator
 from airflow.utils import dates
 
 # [START howto_google_ads_env_variables]
diff --git a/airflow/providers/google/ads/operators/ads.py b/airflow/providers/google/ads/operators/ads.py
index 8876ecf..950dc1f 100644
--- a/airflow/providers/google/ads/operators/ads.py
+++ b/airflow/providers/google/ads/operators/ads.py
@@ -19,9 +19,8 @@
 This module contains Google Ad to GCS operators.
 """
 import csv
-from operator import attrgetter
 from tempfile import NamedTemporaryFile
-from typing import Dict, List
+from typing import Dict
 
 from airflow.models import BaseOperator
 from airflow.providers.google.ads.hooks.ads import GoogleAdsHook
@@ -29,99 +28,6 @@ from airflow.providers.google.cloud.hooks.gcs import GCSHook
 from airflow.utils.decorators import apply_defaults
 
 
-class GoogleAdsToGcsOperator(BaseOperator):
-    """
-    Fetches the daily results from the Google Ads API for 1-n clients
-    Converts and saves the data as a temporary CSV file
-    Uploads the CSV to Google Cloud Storage
-
-    .. seealso::
-        For more information on the Google Ads API, take a look at the API docs:
-        https://developers.google.com/google-ads/api/docs/start
-
-    .. seealso::
-        For more information on how to use this operator, take a look at the guide:
-        :ref:`howto/operator:GoogleAdsToGcsOperator`
-
-    :param client_ids: Google Ads client IDs to query
-    :type client_ids: List[str]
-    :param query: Google Ads Query Language API query
-    :type query: str
-    :param attributes: List of Google Ads Row attributes to extract
-    :type attributes: List[str]
-    :param bucket: The GCS bucket to upload to
-    :type bucket: str
-    :param obj: GCS path to save the object. Must be the full file path (ex. `path/to/file.txt`)
-    :type obj: str
-    :param gcp_conn_id: Airflow Google Cloud Platform connection ID
-    :type gcp_conn_id: str
-    :param google_ads_conn_id: Airflow Google Ads connection ID
-    :type google_ads_conn_id: str
-    :param page_size: The number of results per API page request. Max 10,000
-    :type page_size: int
-    :param gzip: Option to compress local file or file data for upload
-    :type gzip: bool
-    """
-
-    template_fields = ("client_ids", "query", "attributes", "bucket", "obj")
-
-    @apply_defaults
-    def __init__(
-        self,
-        client_ids: List[str],
-        query: str,
-        attributes: List[str],
-        bucket: str,
-        obj: str,
-        gcp_conn_id: str = "google_cloud_default",
-        google_ads_conn_id: str = "google_ads_default",
-        page_size: int = 10000,
-        gzip: bool = False,
-        *args,
-        **kwargs,
-    ) -> None:
-        super().__init__(*args, **kwargs)
-        self.client_ids = client_ids
-        self.query = query
-        self.attributes = attributes
-        self.bucket = bucket
-        self.obj = obj
-        self.gcp_conn_id = gcp_conn_id
-        self.google_ads_conn_id = google_ads_conn_id
-        self.page_size = page_size
-        self.gzip = gzip
-
-    def execute(self, context: Dict):
-        service = GoogleAdsHook(
-            gcp_conn_id=self.gcp_conn_id,
-            google_ads_conn_id=self.google_ads_conn_id
-        )
-        rows = service.search(
-            client_ids=self.client_ids, query=self.query, page_size=self.page_size
-        )
-
-        try:
-            getter = attrgetter(*self.attributes)
-            converted_rows = [getter(row) for row in rows]
-        except Exception as e:
-            self.log.error("An error occurred in converting the Google Ad Rows. \n Error %s", e)
-            raise
-
-        with NamedTemporaryFile("w", suffix=".csv") as csvfile:
-            writer = csv.writer(csvfile)
-            writer.writerows(converted_rows)
-            csvfile.flush()
-
-            hook = GCSHook(gcp_conn_id=self.gcp_conn_id)
-            hook.upload(
-                bucket_name=self.bucket,
-                object_name=self.obj,
-                filename=csvfile.name,
-                gzip=self.gzip,
-            )
-            self.log.info("%s uploaded to GCS", self.obj)
-
-
 class GoogleAdsListAccountsOperator(BaseOperator):
     """
     Saves list of customers on GCS in form of a csv file.
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/google/ads/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/google/ads/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/google/ads/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/google/ads/operators/ads.py b/airflow/providers/google/ads/transfers/ads_to_gcs.py
similarity index 60%
copy from airflow/providers/google/ads/operators/ads.py
copy to airflow/providers/google/ads/transfers/ads_to_gcs.py
index 8876ecf..90a57b2 100644
--- a/airflow/providers/google/ads/operators/ads.py
+++ b/airflow/providers/google/ads/transfers/ads_to_gcs.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -15,9 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-"""
-This module contains Google Ad to GCS operators.
-"""
+
 import csv
 from operator import attrgetter
 from tempfile import NamedTemporaryFile
@@ -120,83 +117,3 @@ class GoogleAdsToGcsOperator(BaseOperator):
                 gzip=self.gzip,
             )
             self.log.info("%s uploaded to GCS", self.obj)
-
-
-class GoogleAdsListAccountsOperator(BaseOperator):
-    """
-    Saves list of customers on GCS in form of a csv file.
-
-    The resulting list of customers is based on your OAuth credentials. The request returns a list
-    of all accounts that you are able to act upon directly given your current credentials. This will
-    not necessarily include all accounts within the account hierarchy; rather, it will only include
-    accounts where your authenticated user has been added with admin or other rights in the account.
-
-    ..seealso::
-        https://developers.google.com/google-ads/api/reference/rpc
-
-
-    .. seealso::
-        For more information on how to use this operator, take a look at the guide:
-        :ref:`howto/operator:GoogleAdsListAccountsOperator`
-
-    :param bucket: The GCS bucket to upload to
-    :type bucket: str
-    :param object_name: GCS path to save the csv file. Must be the full file path (ex. `path/to/file.csv`)
-    :type object_name: str
-    :param gcp_conn_id: Airflow Google Cloud Platform connection ID
-    :type gcp_conn_id: str
-    :param google_ads_conn_id: Airflow Google Ads connection ID
-    :type google_ads_conn_id: str
-    :param page_size: The number of results per API page request. Max 10,000
-    :type page_size: int
-    :param gzip: Option to compress local file or file data for upload
-    :type gzip: bool
-    """
-
-    template_fields = ("bucket", "object_name")
-
-    @apply_defaults
-    def __init__(
-        self,
-        bucket: str,
-        object_name: str,
-        gcp_conn_id: str = "google_cloud_default",
-        google_ads_conn_id: str = "google_ads_default",
-        gzip: bool = False,
-        *args,
-        **kwargs,
-    ) -> None:
-        super().__init__(*args, **kwargs)
-        self.bucket = bucket
-        self.object_name = object_name
-        self.gcp_conn_id = gcp_conn_id
-        self.google_ads_conn_id = google_ads_conn_id
-        self.gzip = gzip
-
-    def execute(self, context: Dict):
-        uri = f"gs://{self.bucket}/{self.object_name}"
-
-        ads_hook = GoogleAdsHook(
-            gcp_conn_id=self.gcp_conn_id,
-            google_ads_conn_id=self.google_ads_conn_id
-        )
-
-        gcs_hook = GCSHook(gcp_conn_id=self.gcp_conn_id)
-
-        with NamedTemporaryFile("w+") as temp_file:
-            # Download accounts
-            accounts = ads_hook.list_accessible_customers()
-            writer = csv.writer(temp_file)
-            writer.writerows(accounts)
-            temp_file.flush()
-
-            # Upload to GCS
-            gcs_hook.upload(
-                bucket_name=self.bucket,
-                object_name=self.object_name,
-                gzip=self.gzip,
-                filename=temp_file.name
-            )
-            self.log.info("Uploaded %s to %s", len(accounts), uri)
-
-        return uri
diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
index c075dc2..3171d30 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
@@ -25,7 +25,7 @@ from airflow import models
 from airflow.providers.google.cloud.operators.bigquery import (
     BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator,
 )
-from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator
 from airflow.utils.dates import days_ago
 
 PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py
index 0b6ac88..221138f 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_to_gcs.py
@@ -25,7 +25,7 @@ from airflow import models
 from airflow.providers.google.cloud.operators.bigquery import (
     BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator,
 )
-from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator
 from airflow.utils.dates import days_ago
 
 PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py b/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py
index 79cd8de..4667919 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_transfer.py
@@ -25,8 +25,8 @@ from airflow import models
 from airflow.providers.google.cloud.operators.bigquery import (
     BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator,
 )
-from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator
-from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator
 from airflow.utils.dates import days_ago
 
 PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
diff --git a/airflow/providers/google/cloud/example_dags/example_dataflow.py b/airflow/providers/google/cloud/example_dags/example_dataflow.py
index d05318d..c9034cf 100644
--- a/airflow/providers/google/cloud/example_dags/example_dataflow.py
+++ b/airflow/providers/google/cloud/example_dags/example_dataflow.py
@@ -27,7 +27,7 @@ from airflow.providers.google.cloud.operators.dataflow import (
     CheckJobRunning, DataflowCreateJavaJobOperator, DataflowCreatePythonJobOperator,
     DataflowTemplatedJobStartOperator,
 )
-from airflow.providers.google.cloud.operators.gcs import GCSToLocalOperator
+from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator
 from airflow.utils.dates import days_ago
 
 GCS_TMP = os.environ.get('GCP_DATAFLOW_GCS_TMP', 'gs://test-dataflow-example/temp/')
@@ -70,7 +70,7 @@ with models.DAG(
     )
     # [END howto_operator_start_java_job]
 
-    jar_to_local = GCSToLocalOperator(
+    jar_to_local = GCSToLocalFilesystemOperator(
         task_id="jar-to-local",
         bucket=GCS_JAR_BUCKET_NAME,
         object_name=GCS_JAR_OBJECT_NAME,
diff --git a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
index b23d155..0bebd35 100644
--- a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
@@ -27,9 +27,9 @@ from airflow.providers.google.cloud.operators.bigquery import (
     BigQueryCreateEmptyDatasetOperator, BigQueryCreateEmptyTableOperator, BigQueryDeleteDatasetOperator,
     BigQueryExecuteQueryOperator,
 )
-from airflow.providers.google.cloud.operators.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator
 from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
-from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
+from airflow.providers.google.cloud.transfers.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator
+from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
 from airflow.utils.dates import days_ago
 
 # [START howto_GCS_env_variables]
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs.py b/airflow/providers/google/cloud/example_dags/example_gcs.py
index 55e24d6..4cdac36 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs.py
@@ -26,10 +26,11 @@ from airflow.operators.bash import BashOperator
 from airflow.providers.google.cloud.operators.gcs import (
     GCSBucketCreateAclEntryOperator, GCSCreateBucketOperator, GCSDeleteBucketOperator,
     GCSDeleteObjectsOperator, GCSFileTransformOperator, GCSListObjectsOperator,
-    GCSObjectCreateAclEntryOperator, GCSToLocalOperator,
+    GCSObjectCreateAclEntryOperator,
 )
-from airflow.providers.google.cloud.operators.gcs_to_gcs import GCSToGCSOperator
-from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator
+from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator
+from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
 from airflow.utils.dates import days_ago
 
 default_args = {"start_date": days_ago(1)}
@@ -106,12 +107,14 @@ with models.DAG(
     )
     # [END howto_operator_gcs_object_create_acl_entry_task]
 
-    download_file = GCSToLocalOperator(
+    # [START howto_operator_gcs_download_file_task]
+    download_file = GCSToLocalFilesystemOperator(
         task_id="download_file",
         object_name=BUCKET_FILE_LOCATION,
         bucket=BUCKET_1,
         filename=PATH_TO_SAVED_FILE,
     )
+    # [END howto_operator_gcs_download_file_task]
 
     copy_file = GCSToGCSOperator(
         task_id="copy_file",
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
index 33b9c69..9035f2d 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
@@ -26,7 +26,7 @@ from airflow import models
 from airflow.providers.google.cloud.operators.bigquery import (
     BigQueryCreateEmptyDatasetOperator, BigQueryDeleteDatasetOperator,
 )
-from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
+from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
 from airflow.utils.dates import days_ago
 
 DATASET_NAME = os.environ.get("GCP_DATASET_NAME", 'airflow_test')
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
index 30a542f..20f0b2b 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
@@ -22,9 +22,8 @@ Example Airflow DAG for Google Cloud Storage to Google Cloud Storage transfer op
 import os
 
 from airflow import models
-from airflow.providers.google.cloud.operators.gcs_to_gcs import (
-    GCSSynchronizeBucketsOperator, GCSToGCSOperator,
-)
+from airflow.providers.google.cloud.operators.gcs import GCSSynchronizeBucketsOperator
+from airflow.providers.google.cloud.transfers.gcs_to_gcs import GCSToGCSOperator
 from airflow.utils.dates import days_ago
 
 default_args = {"start_date": days_ago(1)}
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
index bb2e6e8..d325e9c 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
@@ -22,7 +22,7 @@ Example Airflow DAG for Google Cloud Storage to SFTP transfer operators.
 import os
 
 from airflow import models
-from airflow.providers.google.cloud.operators.gcs_to_sftp import GCSToSFTPOperator
+from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator
 from airflow.utils.dates import days_ago
 
 default_args = {"start_date": days_ago(1)}
diff --git a/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
index 5e8d7b0..eb3a8df 100644
--- a/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
@@ -19,7 +19,7 @@
 import os
 
 from airflow import models
-from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
 from airflow.utils import dates
 
 # [START howto_gcs_environment_variables]
diff --git a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
index 81f414f..d4df933 100644
--- a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
@@ -19,7 +19,7 @@
 Example DAG using PostgresToGoogleCloudStorageOperator.
 """
 from airflow import models
-from airflow.providers.google.cloud.operators.postgres_to_gcs import PostgresToGCSOperator
+from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator
 from airflow.utils.dates import days_ago
 
 GCS_BUCKET = "postgres_to_gcs_example"
diff --git a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
index 385cdc3..ca4ae0f 100644
--- a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
@@ -26,7 +26,7 @@ from airflow.providers.google.cloud.operators.bigquery import (
     BigQueryCreateEmptyDatasetOperator, BigQueryCreateExternalTableOperator, BigQueryDeleteDatasetOperator,
     BigQueryExecuteQueryOperator,
 )
-from airflow.providers.google.cloud.operators.presto_to_gcs import PrestoToGCSOperator
+from airflow.providers.google.cloud.transfers.presto_to_gcs import PrestoToGCSOperator
 from airflow.utils.dates import days_ago
 
 GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", 'example-project')
diff --git a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
index ee3a488..9c6f315 100644
--- a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
@@ -22,7 +22,7 @@ Example Airflow DAG for Google Cloud Storage to SFTP transfer operators.
 import os
 
 from airflow import models
-from airflow.providers.google.cloud.operators.sftp_to_gcs import SFTPToGCSOperator
+from airflow.providers.google.cloud.transfers.sftp_to_gcs import SFTPToGCSOperator
 from airflow.utils.dates import days_ago
 
 default_args = {"start_date": days_ago(1)}
diff --git a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
index 18c2d92..b4ecfae 100644
--- a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
@@ -19,7 +19,7 @@
 import os
 
 from airflow import models
-from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator
+from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator
 from airflow.utils.dates import days_ago
 
 BUCKET = os.environ.get("GCP_GCS_BUCKET", "test28397yeo")
diff --git a/airflow/providers/google/cloud/operators/gcs.py b/airflow/providers/google/cloud/operators/gcs.py
index dace60f..e76eb3c 100644
--- a/airflow/providers/google/cloud/operators/gcs.py
+++ b/airflow/providers/google/cloud/operators/gcs.py
@@ -28,7 +28,6 @@ from google.api_core.exceptions import Conflict
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
-from airflow.models.xcom import MAX_XCOM_SIZE
 from airflow.providers.google.cloud.hooks.gcs import GCSHook
 from airflow.utils.decorators import apply_defaults
 
@@ -227,102 +226,6 @@ class GCSListObjectsOperator(BaseOperator):
                          delimiter=self.delimiter)
 
 
-class GCSToLocalOperator(BaseOperator):
-    """
-    Downloads a file from Google Cloud Storage.
-
-    If a filename is supplied, it writes the file to the specified location, alternatively one can
-    set the ``store_to_xcom_key`` parameter to True push the file content into xcom. When the file size
-    exceeds the maximum size for xcom it is recommended to write to a file.
-
-    :param bucket: The Google Cloud Storage bucket where the object is.
-        Must not contain 'gs://' prefix. (templated)
-    :type bucket: str
-    :param object: The name of the object to download in the Google cloud
-        storage bucket. (templated)
-    :type object: str
-    :param filename: The file path, including filename,  on the local file system (where the
-        operator is being executed) that the file should be downloaded to. (templated)
-        If no filename passed, the downloaded data will not be stored on the local file
-        system.
-    :type filename: str
-    :param store_to_xcom_key: If this param is set, the operator will push
-        the contents of the downloaded file to XCom with the key set in this
-        parameter. If not set, the downloaded data will not be pushed to XCom. (templated)
-    :type store_to_xcom_key: str
-    :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform.
-    :type gcp_conn_id: str
-    :param google_cloud_storage_conn_id: (Deprecated) The connection ID used to connect to Google Cloud
-        Platform. This parameter has been deprecated. You should pass the gcp_conn_id parameter instead.
-    :type google_cloud_storage_conn_id: str
-    :param delegate_to: The account to impersonate, if any.
-        For this to work, the service account making the request must have
-        domain-wide delegation enabled.
-    :type delegate_to: str
-    """
-    template_fields = ('bucket', 'object', 'filename', 'store_to_xcom_key',)
-    ui_color = '#f0eee4'
-
-    @apply_defaults
-    def __init__(self,
-                 bucket: str,
-                 object_name: Optional[str] = None,
-                 filename: Optional[str] = None,
-                 store_to_xcom_key: Optional[str] = None,
-                 gcp_conn_id: str = 'google_cloud_default',
-                 google_cloud_storage_conn_id: Optional[str] = None,
-                 delegate_to: Optional[str] = None,
-                 *args,
-                 **kwargs) -> None:
-        # To preserve backward compatibility
-        # TODO: Remove one day
-        if object_name is None:
-            if 'object' in kwargs:
-                object_name = kwargs['object']
-                DeprecationWarning("Use 'object_name' instead of 'object'.")
-            else:
-                TypeError("__init__() missing 1 required positional argument: 'object_name'")
-
-        if filename is not None and store_to_xcom_key is not None:
-            raise ValueError("Either filename or store_to_xcom_key can be set")
-
-        if google_cloud_storage_conn_id:
-            warnings.warn(
-                "The google_cloud_storage_conn_id parameter has been deprecated. You should pass "
-                "the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3)
-            gcp_conn_id = google_cloud_storage_conn_id
-
-        super().__init__(*args, **kwargs)
-        self.bucket = bucket
-        self.object = object_name
-        self.filename = filename
-        self.store_to_xcom_key = store_to_xcom_key
-        self.gcp_conn_id = gcp_conn_id
-        self.delegate_to = delegate_to
-
-    def execute(self, context):
-        self.log.info('Executing download: %s, %s, %s', self.bucket,
-                      self.object, self.filename)
-        hook = GCSHook(
-            google_cloud_storage_conn_id=self.gcp_conn_id,
-            delegate_to=self.delegate_to
-        )
-
-        if self.store_to_xcom_key:
-            file_bytes = hook.download(bucket_name=self.bucket,
-                                       object_name=self.object)
-            if sys.getsizeof(file_bytes) < MAX_XCOM_SIZE:
-                context['ti'].xcom_push(key=self.store_to_xcom_key, value=file_bytes)
-            else:
-                raise AirflowException(
-                    'The size of the downloaded file is too large to push to XCom!'
-                )
-        else:
-            hook.download(bucket_name=self.bucket,
-                          object_name=self.object,
-                          filename=self.filename)
-
-
 class GCSDeleteObjectsOperator(BaseOperator):
     """
     Deletes objects from a Google Cloud Storage bucket, either
@@ -663,3 +566,94 @@ class GCSDeleteBucketOperator(BaseOperator):
     def execute(self, context):
         hook = GCSHook(gcp_conn_id=self.gcp_conn_id)
         hook.delete_bucket(bucket_name=self.bucket_name, force=self.force)
+
+
+class GCSSynchronizeBucketsOperator(BaseOperator):
+    """
+    Synchronizes the contents of the buckets or bucket's directories in the Google Cloud Services.
+
+    Parameters ``source_object`` and ``destination_object`` describe the root sync directory. If they are
+    not passed, the entire bucket will be synchronized. They should point to directories.
+
+    .. note::
+        The synchronization of individual files is not supported. Only entire directories can be
+        synchronized.
+
+    .. seealso::
+        For more information on how to use this operator, take a look at the guide:
+        :ref:`howto/operator:GCSSynchronizeBuckets`
+
+    :param source_bucket: The name of the bucket containing the source objects.
+    :type source_bucket: str
+    :param destination_bucket: The name of the bucket containing the destination objects.
+    :type destination_bucket: str
+    :param source_object: The root sync directory in the source bucket.
+    :type source_object: Optional[str]
+    :param destination_object: The root sync directory in the destination bucket.
+    :type destination_object: Optional[str]
+    :param recursive: If True, subdirectories will be considered
+    :type recursive: bool
+    :param allow_overwrite: if True, the files will be overwritten if a mismatched file is found.
+        By default, overwriting files is not allowed
+    :type allow_overwrite: bool
+    :param delete_extra_files: if True, deletes additional files from the source that not found in the
+        destination. By default extra files are not deleted.
+
+        .. note::
+            This option can delete data quickly if you specify the wrong source/destination combination.
+
+    :type delete_extra_files: bool
+    """
+
+    template_fields = (
+        'source_bucket',
+        'destination_bucket',
+        'source_object',
+        'destination_object',
+        'recursive',
+        'delete_extra_files',
+        'allow_overwrite',
+        'gcp_conn_id',
+        'delegate_to',
+    )
+
+    @apply_defaults
+    def __init__(
+        self,
+        source_bucket: str,
+        destination_bucket: str,
+        source_object: Optional[str] = None,
+        destination_object: Optional[str] = None,
+        recursive: bool = True,
+        delete_extra_files: bool = False,
+        allow_overwrite: bool = False,
+        gcp_conn_id: str = 'google_cloud_default',
+        delegate_to: Optional[str] = None,
+        *args,
+        **kwargs
+    ) -> None:
+        super().__init__(*args, **kwargs)
+        self.source_bucket = source_bucket
+        self.destination_bucket = destination_bucket
+        self.source_object = source_object
+        self.destination_object = destination_object
+        self.recursive = recursive
+        self.delete_extra_files = delete_extra_files
+        self.allow_overwrite = allow_overwrite
+        self.gcp_conn_id = gcp_conn_id
+        self.delegate_to = delegate_to
+
+    def execute(self, context):
+        hook = GCSHook(
+            google_cloud_storage_conn_id=self.gcp_conn_id,
+            delegate_to=self.delegate_to
+        )
+        hook.sync(
+            source_bucket=self.source_bucket,
+            destination_bucket=self.destination_bucket,
+            source_object=self.source_object,
+            destination_object=self.destination_object,
+            recursive=self.recursive,
+            delete_extra_files=self.delete_extra_files,
+            allow_overwrite=self.allow_overwrite
+        )
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/google/cloud/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/google/cloud/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/google/cloud/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/google/cloud/operators/adls_to_gcs.py b/airflow/providers/google/cloud/transfers/adls_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/adls_to_gcs.py
rename to airflow/providers/google/cloud/transfers/adls_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/bigquery_to_bigquery.py b/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/bigquery_to_bigquery.py
rename to airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py
diff --git a/airflow/providers/google/cloud/operators/bigquery_to_gcs.py b/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/bigquery_to_gcs.py
rename to airflow/providers/google/cloud/transfers/bigquery_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/bigquery_to_mysql.py
rename to airflow/providers/google/cloud/transfers/bigquery_to_mysql.py
diff --git a/airflow/providers/google/cloud/operators/cassandra_to_gcs.py b/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/cassandra_to_gcs.py
rename to airflow/providers/google/cloud/transfers/cassandra_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/facebook_ads_to_gcs.py b/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/facebook_ads_to_gcs.py
rename to airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/gcs_to_bigquery.py
rename to airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
diff --git a/airflow/providers/google/cloud/operators/gcs_to_gcs.py b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py
similarity index 82%
rename from airflow/providers/google/cloud/operators/gcs_to_gcs.py
rename to airflow/providers/google/cloud/transfers/gcs_to_gcs.py
index 207a535..a3d6db8 100644
--- a/airflow/providers/google/cloud/operators/gcs_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/gcs_to_gcs.py
@@ -19,7 +19,6 @@
 This module contains a Google Cloud Storage operator.
 """
 import warnings
-from typing import Optional
 
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
@@ -342,94 +341,3 @@ class GCSToGCSOperator(BaseOperator):
 
         if self.move_object:
             hook.delete(self.source_bucket, source_object)
-
-
-class GCSSynchronizeBucketsOperator(BaseOperator):
-    """
-    Synchronizes the contents of the buckets or bucket's directories in the Google Cloud Services.
-
-    Parameters ``source_object`` and ``destination_object`` describe the root sync directory. If they are
-    not passed, the entire bucket will be synchronized. They should point to directories.
-
-    .. note::
-        The synchronization of individual files is not supported. Only entire directories can be
-        synchronized.
-
-    .. seealso::
-        For more information on how to use this operator, take a look at the guide:
-        :ref:`howto/operator:GCSSynchronizeBuckets`
-
-    :param source_bucket: The name of the bucket containing the source objects.
-    :type source_bucket: str
-    :param destination_bucket: The name of the bucket containing the destination objects.
-    :type destination_bucket: str
-    :param source_object: The root sync directory in the source bucket.
-    :type source_object: Optional[str]
-    :param destination_object: The root sync directory in the destination bucket.
-    :type destination_object: Optional[str]
-    :param recursive: If True, subdirectories will be considered
-    :type recursive: bool
-    :param allow_overwrite: if True, the files will be overwritten if a mismatched file is found.
-        By default, overwriting files is not allowed
-    :type allow_overwrite: bool
-    :param delete_extra_files: if True, deletes additional files from the source that not found in the
-        destination. By default extra files are not deleted.
-
-        .. note::
-            This option can delete data quickly if you specify the wrong source/destination combination.
-
-    :type delete_extra_files: bool
-    """
-
-    template_fields = (
-        'source_bucket',
-        'destination_bucket',
-        'source_object',
-        'destination_object',
-        'recursive',
-        'delete_extra_files',
-        'allow_overwrite',
-        'gcp_conn_id',
-        'delegate_to',
-    )
-
-    @apply_defaults
-    def __init__(
-        self,
-        source_bucket: str,
-        destination_bucket: str,
-        source_object: Optional[str] = None,
-        destination_object: Optional[str] = None,
-        recursive: bool = True,
-        delete_extra_files: bool = False,
-        allow_overwrite: bool = False,
-        gcp_conn_id: str = 'google_cloud_default',
-        delegate_to: Optional[str] = None,
-        *args,
-        **kwargs
-    ) -> None:
-        super().__init__(*args, **kwargs)
-        self.source_bucket = source_bucket
-        self.destination_bucket = destination_bucket
-        self.source_object = source_object
-        self.destination_object = destination_object
-        self.recursive = recursive
-        self.delete_extra_files = delete_extra_files
-        self.allow_overwrite = allow_overwrite
-        self.gcp_conn_id = gcp_conn_id
-        self.delegate_to = delegate_to
-
-    def execute(self, context):
-        hook = GCSHook(
-            google_cloud_storage_conn_id=self.gcp_conn_id,
-            delegate_to=self.delegate_to
-        )
-        hook.sync(
-            source_bucket=self.source_bucket,
-            destination_bucket=self.destination_bucket,
-            source_object=self.source_object,
-            destination_object=self.destination_object,
-            recursive=self.recursive,
-            delete_extra_files=self.delete_extra_files,
-            allow_overwrite=self.allow_overwrite
-        )
diff --git a/airflow/providers/google/cloud/transfers/gcs_to_local.py b/airflow/providers/google/cloud/transfers/gcs_to_local.py
new file mode 100644
index 0000000..ffd3315
--- /dev/null
+++ b/airflow/providers/google/cloud/transfers/gcs_to_local.py
@@ -0,0 +1,122 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import sys
+import warnings
+from typing import Optional
+
+from airflow.exceptions import AirflowException
+from airflow.models import BaseOperator
+from airflow.models.xcom import MAX_XCOM_SIZE
+from airflow.providers.google.cloud.hooks.gcs import GCSHook
+from airflow.sensors.base_sensor_operator import apply_defaults
+
+
+class GCSToLocalFilesystemOperator(BaseOperator):
+    """
+    Downloads a file from Google Cloud Storage.
+
+    If a filename is supplied, it writes the file to the specified location, alternatively one can
+    set the ``store_to_xcom_key`` parameter to True push the file content into xcom. When the file size
+    exceeds the maximum size for xcom it is recommended to write to a file.
+
+    :param bucket: The Google Cloud Storage bucket where the object is.
+        Must not contain 'gs://' prefix. (templated)
+    :type bucket: str
+    :param object: The name of the object to download in the Google cloud
+        storage bucket. (templated)
+    :type object: str
+    :param filename: The file path, including filename,  on the local file system (where the
+        operator is being executed) that the file should be downloaded to. (templated)
+        If no filename passed, the downloaded data will not be stored on the local file
+        system.
+    :type filename: str
+    :param store_to_xcom_key: If this param is set, the operator will push
+        the contents of the downloaded file to XCom with the key set in this
+        parameter. If not set, the downloaded data will not be pushed to XCom. (templated)
+    :type store_to_xcom_key: str
+    :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud Platform.
+    :type gcp_conn_id: str
+    :param google_cloud_storage_conn_id: (Deprecated) The connection ID used to connect to Google Cloud
+        Platform. This parameter has been deprecated. You should pass the gcp_conn_id parameter instead.
+    :type google_cloud_storage_conn_id: str
+    :param delegate_to: The account to impersonate, if any.
+        For this to work, the service account making the request must have
+        domain-wide delegation enabled.
+    :type delegate_to: str
+    """
+    template_fields = ('bucket', 'object', 'filename', 'store_to_xcom_key',)
+    ui_color = '#f0eee4'
+
+    @apply_defaults
+    def __init__(self,
+                 bucket: str,
+                 object_name: Optional[str] = None,
+                 filename: Optional[str] = None,
+                 store_to_xcom_key: Optional[str] = None,
+                 gcp_conn_id: str = 'google_cloud_default',
+                 google_cloud_storage_conn_id: Optional[str] = None,
+                 delegate_to: Optional[str] = None,
+                 *args,
+                 **kwargs) -> None:
+        # To preserve backward compatibility
+        # TODO: Remove one day
+        if object_name is None:
+            if 'object' in kwargs:
+                object_name = kwargs['object']
+                DeprecationWarning("Use 'object_name' instead of 'object'.")
+            else:
+                TypeError("__init__() missing 1 required positional argument: 'object_name'")
+
+        if filename is not None and store_to_xcom_key is not None:
+            raise ValueError("Either filename or store_to_xcom_key can be set")
+
+        if google_cloud_storage_conn_id:
+            warnings.warn(
+                "The google_cloud_storage_conn_id parameter has been deprecated. You should pass "
+                "the gcp_conn_id parameter.", DeprecationWarning, stacklevel=3)
+            gcp_conn_id = google_cloud_storage_conn_id
+
+        super().__init__(*args, **kwargs)
+        self.bucket = bucket
+        self.object = object_name
+        self.filename = filename  # noqa
+        self.store_to_xcom_key = store_to_xcom_key # noqa
+        self.gcp_conn_id = gcp_conn_id
+        self.delegate_to = delegate_to
+
+    def execute(self, context):
+        self.log.info('Executing download: %s, %s, %s', self.bucket,
+                      self.object, self.filename)
+        hook = GCSHook(
+            google_cloud_storage_conn_id=self.gcp_conn_id,
+            delegate_to=self.delegate_to
+        )
+
+        if self.store_to_xcom_key:
+            file_bytes = hook.download(bucket_name=self.bucket,
+                                       object_name=self.object)
+            if sys.getsizeof(file_bytes) < MAX_XCOM_SIZE:
+                context['ti'].xcom_push(key=self.store_to_xcom_key, value=file_bytes)
+            else:
+                raise AirflowException(
+                    'The size of the downloaded file is too large to push to XCom!'
+                )
+        else:
+            hook.download(bucket_name=self.bucket,
+                          object_name=self.object,
+                          filename=self.filename)
diff --git a/airflow/providers/google/cloud/operators/gcs_to_sftp.py b/airflow/providers/google/cloud/transfers/gcs_to_sftp.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/gcs_to_sftp.py
rename to airflow/providers/google/cloud/transfers/gcs_to_sftp.py
diff --git a/airflow/providers/google/cloud/operators/local_to_gcs.py b/airflow/providers/google/cloud/transfers/local_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/local_to_gcs.py
rename to airflow/providers/google/cloud/transfers/local_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/mssql_to_gcs.py b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py
similarity index 97%
rename from airflow/providers/google/cloud/operators/mssql_to_gcs.py
rename to airflow/providers/google/cloud/transfers/mssql_to_gcs.py
index 4265dbd..d6d1c80 100644
--- a/airflow/providers/google/cloud/operators/mssql_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/mssql_to_gcs.py
@@ -21,7 +21,7 @@ MsSQL to GCS operator.
 
 import decimal
 
-from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator
+from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator
 from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/providers/google/cloud/operators/mysql_to_gcs.py b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py
similarity index 98%
rename from airflow/providers/google/cloud/operators/mysql_to_gcs.py
rename to airflow/providers/google/cloud/transfers/mysql_to_gcs.py
index 87fe5d4..8ab7539 100644
--- a/airflow/providers/google/cloud/operators/mysql_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/mysql_to_gcs.py
@@ -26,7 +26,7 @@ from decimal import Decimal
 
 from MySQLdb.constants import FIELD_TYPE
 
-from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator
+from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/providers/google/cloud/operators/postgres_to_gcs.py b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py
similarity index 98%
rename from airflow/providers/google/cloud/operators/postgres_to_gcs.py
rename to airflow/providers/google/cloud/transfers/postgres_to_gcs.py
index 2938eeb..1f0aabf 100644
--- a/airflow/providers/google/cloud/operators/postgres_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/postgres_to_gcs.py
@@ -26,7 +26,7 @@ from decimal import Decimal
 
 import pendulum
 
-from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator
+from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator
 from airflow.providers.postgres.hooks.postgres import PostgresHook
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/providers/google/cloud/operators/presto_to_gcs.py b/airflow/providers/google/cloud/transfers/presto_to_gcs.py
similarity index 99%
rename from airflow/providers/google/cloud/operators/presto_to_gcs.py
rename to airflow/providers/google/cloud/transfers/presto_to_gcs.py
index 49c931c..855b99a 100644
--- a/airflow/providers/google/cloud/operators/presto_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/presto_to_gcs.py
@@ -19,7 +19,7 @@ from typing import Any, List, Tuple
 
 from prestodb.dbapi import Cursor as PrestoCursor
 
-from airflow.providers.google.cloud.operators.sql_to_gcs import BaseSQLToGCSOperator
+from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator
 from airflow.providers.presto.hooks.presto import PrestoHook
 from airflow.utils.decorators import apply_defaults
 
diff --git a/airflow/providers/google/cloud/operators/s3_to_gcs.py b/airflow/providers/google/cloud/transfers/s3_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/s3_to_gcs.py
rename to airflow/providers/google/cloud/transfers/s3_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/sftp_to_gcs.py b/airflow/providers/google/cloud/transfers/sftp_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/sftp_to_gcs.py
rename to airflow/providers/google/cloud/transfers/sftp_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/sheets_to_gcs.py b/airflow/providers/google/cloud/transfers/sheets_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/sheets_to_gcs.py
rename to airflow/providers/google/cloud/transfers/sheets_to_gcs.py
diff --git a/airflow/providers/google/cloud/operators/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
similarity index 100%
rename from airflow/providers/google/cloud/operators/sql_to_gcs.py
rename to airflow/providers/google/cloud/transfers/sql_to_gcs.py
diff --git a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
index 0e8fd84..df4e538 100644
--- a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
+++ b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
@@ -22,7 +22,7 @@ import os
 from typing import Dict
 
 from airflow import models
-from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
+from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
 from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook
 from airflow.providers.google.marketing_platform.operators.display_video import (
     GoogleDisplayVideo360CreateReportOperator, GoogleDisplayVideo360CreateSDFDownloadTaskOperator,
diff --git a/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py b/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py
index 565ab4a..0d838d4 100644
--- a/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py
+++ b/airflow/providers/google/suite/example_dags/example_gcs_to_gdrive.py
@@ -21,7 +21,7 @@ Example DAG using GoogleCloudStorageToGoogleDriveOperator.
 import os
 
 from airflow import models
-from airflow.providers.google.suite.operators.gcs_to_gdrive import GCSToGoogleDriveOperator
+from airflow.providers.google.suite.transfers.gcs_to_gdrive import GCSToGoogleDriveOperator
 from airflow.utils.dates import days_ago
 
 GCS_TO_GDRIVE_BUCKET = os.environ.get("GCS_TO_DRIVE_BUCKET", "example-object")
diff --git a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
index 5aca951..52430c2 100644
--- a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
+++ b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
@@ -19,8 +19,8 @@
 import os
 
 from airflow import models
-from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator
-from airflow.providers.google.suite.operators.gcs_to_sheets import GCSToGoogleSheetsOperator
+from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator
+from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator
 from airflow.utils.dates import days_ago
 
 BUCKET = os.environ.get("GCP_GCS_BUCKET", "example-test-bucket3")
diff --git a/airflow/providers/google/suite/example_dags/example_sheets.py b/airflow/providers/google/suite/example_dags/example_sheets.py
index e5a51b7..fcb266b 100644
--- a/airflow/providers/google/suite/example_dags/example_sheets.py
+++ b/airflow/providers/google/suite/example_dags/example_sheets.py
@@ -20,9 +20,9 @@ import os
 
 from airflow import models
 from airflow.operators.bash import BashOperator
-from airflow.providers.google.cloud.operators.sheets_to_gcs import GoogleSheetsToGCSOperator
-from airflow.providers.google.suite.operators.gcs_to_sheets import GCSToGoogleSheetsOperator
+from airflow.providers.google.cloud.transfers.sheets_to_gcs import GoogleSheetsToGCSOperator
 from airflow.providers.google.suite.operators.sheets import GoogleSheetsCreateSpreadsheetOperator
+from airflow.providers.google.suite.transfers.gcs_to_sheets import GCSToGoogleSheetsOperator
 from airflow.utils.dates import days_ago
 
 GCS_BUCKET = os.environ.get("SHEETS_GCS_BUCKET", "test28397ye")
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/google/suite/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/google/suite/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/google/suite/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/google/suite/operators/gcs_to_gdrive.py b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py
similarity index 100%
rename from airflow/providers/google/suite/operators/gcs_to_gdrive.py
rename to airflow/providers/google/suite/transfers/gcs_to_gdrive.py
diff --git a/airflow/providers/google/suite/operators/gcs_to_sheets.py b/airflow/providers/google/suite/transfers/gcs_to_sheets.py
similarity index 100%
rename from airflow/providers/google/suite/operators/gcs_to_sheets.py
rename to airflow/providers/google/suite/transfers/gcs_to_sheets.py
diff --git a/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md
index bef9d4e..78a4c74 100644
--- a/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/grpc/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                       |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                 |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                   |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                  |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                       |
diff --git a/airflow/providers/grpc/README.md b/airflow/providers/grpc/README.md
index 5e5a477..121231d 100644
--- a/airflow/providers/grpc/README.md
+++ b/airflow/providers/grpc/README.md
@@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.grpc` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -107,6 +109,7 @@ All classes in Airflow 2.0 are in `airflow.providers.grpc` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                       |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                 |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                   |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                  |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                       |
diff --git a/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md
index fa6cd84..cd666b6 100644
--- a/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/hashicorp/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                   |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                             |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)               |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)              |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                   |
diff --git a/airflow/providers/hashicorp/README.md b/airflow/providers/hashicorp/README.md
index 8b5bad5..69ffdea 100644
--- a/airflow/providers/hashicorp/README.md
+++ b/airflow/providers/hashicorp/README.md
@@ -88,6 +88,8 @@ All classes in Airflow 2.0 are in `airflow.providers.hashicorp` package.
 
 
 
+
+
 ## Secrets
 
 
@@ -107,6 +109,7 @@ All classes in Airflow 2.0 are in `airflow.providers.hashicorp` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                   |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                             |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)               |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)              |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                   |
diff --git a/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md
index 47f1073..d6130dc 100644
--- a/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/http/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/http/README.md b/airflow/providers/http/README.md
index 498536e..e205fe9 100644
--- a/airflow/providers/http/README.md
+++ b/airflow/providers/http/README.md
@@ -78,6 +78,8 @@ All classes in Airflow 2.0 are in `airflow.providers.http` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -111,6 +113,7 @@ All classes in Airflow 2.0 are in `airflow.providers.http` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md
index 68bb8bf..e38a740 100644
--- a/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/imap/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/imap/README.md b/airflow/providers/imap/README.md
index bf02199..2f70cbb 100644
--- a/airflow/providers/imap/README.md
+++ b/airflow/providers/imap/README.md
@@ -64,6 +64,8 @@ All classes in Airflow 2.0 are in `airflow.providers.imap` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -97,6 +99,7 @@ All classes in Airflow 2.0 are in `airflow.providers.imap` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md
index f332e61..6c01c42 100644
--- a/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/jdbc/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
diff --git a/airflow/providers/jdbc/README.md b/airflow/providers/jdbc/README.md
index 544becc..3558dc0 100644
--- a/airflow/providers/jdbc/README.md
+++ b/airflow/providers/jdbc/README.md
@@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.jdbc` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.jdbc` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
diff --git a/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md
index 7a63064..e21875a 100644
--- a/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/jenkins/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/jenkins/README.md b/airflow/providers/jenkins/README.md
index 70215b7..91b92c7 100644
--- a/airflow/providers/jenkins/README.md
+++ b/airflow/providers/jenkins/README.md
@@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.jenkins` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.jenkins` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md
index ba65a7f..f48488c 100644
--- a/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/jira/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/jira/README.md b/airflow/providers/jira/README.md
index 93c3951..d780445 100644
--- a/airflow/providers/jira/README.md
+++ b/airflow/providers/jira/README.md
@@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.jira` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -119,6 +121,7 @@ All classes in Airflow 2.0 are in `airflow.providers.jira` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md
index 029b9ee..8a5afdc 100644
--- a/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/microsoft/azure/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/microsoft/azure/README.md b/airflow/providers/microsoft/azure/README.md
index 81c2724..474afe1 100644
--- a/airflow/providers/microsoft/azure/README.md
+++ b/airflow/providers/microsoft/azure/README.md
@@ -33,6 +33,8 @@ Release: 2020.5.20
     - [Operators](#operators)
         - [New operators](#new-operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Sensors](#sensors)
         - [New sensors](#new-sensors)
         - [Moved sensors](#moved-sensors)
@@ -71,11 +73,11 @@ For full compatibility and test status of the backport packages check
 | azure-batch                  | &gt;=8.0.0            |
 | azure-cosmos                 | &gt;=3.0.1,&lt;4         |
 | azure-datalake-store         | &gt;=0.0.45           |
-| azure-kusto-data             | &gt;=0.0.43           |
+| azure-kusto-data             | &gt;=0.0.43,&lt;0.1      |
 | azure-mgmt-containerinstance | &gt;=1.5.0            |
 | azure-mgmt-datalake-store    | &gt;=0.5.0            |
 | azure-mgmt-resource          | &gt;=2.2.0            |
-| azure-storage                | &gt;=0.34.0,&lt;0.37.0   |
+| azure-storage                | &gt;=0.34.0, &lt;0.37.0  |
 | azure-storage-blob           | &lt;12.0              |
 
 ## Cross provider package dependencies
@@ -112,14 +114,25 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.azure` package.
 
 ### Moved operators
 
-| Airflow 2.0 operators: `airflow.providers.microsoft.azure` package                                                                                                                                                    | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                                      |
-|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.adls_list.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adls_list.py)                                                        | [contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_list_operator.py)                                      |
-| [operators.azure_container_instances.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_container_instances.py)                         | [contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_container_instances_operator.py)       |
-| [operators.azure_cosmos.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_cosmos.py)                                                 | [contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_cosmos_operator.py)                               |
-| [operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/file_to_wasb.py)                                                                | [contrib.operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_wasb.py)                                                                |
-| [operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py) | [contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransferOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py) |
-| [operators.wasb_delete_blob.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py)                                                    | [contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/wasb_delete_blob_operator.py)                                  |
+| Airflow 2.0 operators: `airflow.providers.microsoft.azure` package                                                                                                                            | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                                |
+|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.adls_list.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adls_list.py)                                | [contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_list_operator.py)                                |
+| [operators.azure_container_instances.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_container_instances.py) | [contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_container_instances_operator.py) |
+| [operators.azure_cosmos.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_cosmos.py)                         | [contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_cosmos_operator.py)                         |
+| [operators.wasb_delete_blob.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py)                            | [contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/wasb_delete_blob_operator.py)                            |
+
+
+
+
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.microsoft.azure` package                                                                                                                          | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                                              |
+|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/file_to_wasb.py)                                      | [contrib.operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_wasb.py)                                                        |
+| [transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py) | [contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py) |
 
 
 
@@ -177,6 +190,7 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.azure` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/microsoft/azure/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/microsoft/azure/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/microsoft/azure/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/microsoft/azure/operators/file_to_wasb.py b/airflow/providers/microsoft/azure/transfers/file_to_wasb.py
similarity index 100%
rename from airflow/providers/microsoft/azure/operators/file_to_wasb.py
rename to airflow/providers/microsoft/azure/transfers/file_to_wasb.py
diff --git a/airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
similarity index 98%
rename from airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py
rename to airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
index 5f59030..11013d5 100644
--- a/airflow/providers/microsoft/azure/operators/oracle_to_azure_data_lake_transfer.py
+++ b/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py
@@ -27,7 +27,7 @@ from airflow.providers.oracle.hooks.oracle import OracleHook
 from airflow.utils.decorators import apply_defaults
 
 
-class OracleToAzureDataLakeTransferOperator(BaseOperator):
+class OracleToAzureDataLakeOperator(BaseOperator):
     """
     Moves data from Oracle to Azure Data Lake. The operator runs the query against
     Oracle and stores the file locally before loading it into Azure Data Lake.
diff --git a/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md
index d942c5b..aef3d8b 100644
--- a/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/microsoft/mssql/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/microsoft/mssql/README.md b/airflow/providers/microsoft/mssql/README.md
index 32cc3fc..d76b174 100644
--- a/airflow/providers/microsoft/mssql/README.md
+++ b/airflow/providers/microsoft/mssql/README.md
@@ -100,6 +100,8 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.mssql` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -121,6 +123,7 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.mssql` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md
index dc9ea5e..518dcbc 100644
--- a/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/microsoft/winrm/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                          |
diff --git a/airflow/providers/microsoft/winrm/README.md b/airflow/providers/microsoft/winrm/README.md
index d760db8..036a1fd 100644
--- a/airflow/providers/microsoft/winrm/README.md
+++ b/airflow/providers/microsoft/winrm/README.md
@@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.winrm` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.microsoft.winrm` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                          |
diff --git a/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md
index 1419261..c2162a1 100644
--- a/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/mongo/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/mongo/README.md b/airflow/providers/mongo/README.md
index 13f3042..870398a 100644
--- a/airflow/providers/mongo/README.md
+++ b/airflow/providers/mongo/README.md
@@ -72,6 +72,8 @@ All classes in Airflow 2.0 are in `airflow.providers.mongo` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.mongo` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md
index 6af033e..5bc13ed 100644
--- a/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/mysql/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                     |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
diff --git a/airflow/providers/mysql/README.md b/airflow/providers/mysql/README.md
index fb57a3a..4d722d3 100644
--- a/airflow/providers/mysql/README.md
+++ b/airflow/providers/mysql/README.md
@@ -31,8 +31,10 @@ Release: 2020.5.20
 - [Cross provider package dependencies](#cross-provider-package-dependencies)
 - [Provider class summary](#provider-class-summary)
     - [Operators](#operators)
-        - [New operators](#new-operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [New transfer operators](#new-transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Hooks](#hooks)
         - [Moved hooks](#moved-hooks)
 - [Releases](#releases)
@@ -92,21 +94,32 @@ All classes in Airflow 2.0 are in `airflow.providers.mysql` package.
 ## Operators
 
 
-### New operators
 
-| New Airflow 2.0 operators: `airflow.providers.mysql` package                                                                                      |
-|:--------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.s3_to_mysql.S3ToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/s3_to_mysql.py) |
 
+### Moved operators
 
+| Airflow 2.0 operators: `airflow.providers.mysql` package                                                                  | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                      |
+|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------|
+| [operators.mysql.MySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/mysql.py) | [operators.mysql_operator.MySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_operator.py) |
 
-### Moved operators
 
-| Airflow 2.0 operators: `airflow.providers.mysql` package                                                                                                         | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                   |
-|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.mysql.MySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/mysql.py)                                        | [operators.mysql_operator.MySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_operator.py)                              |
-| [operators.presto_to_mysql.PrestoToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/presto_to_mysql.py)    | [operators.presto_to_mysql.PrestoToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/presto_to_mysql.py)                    |
-| [operators.vertica_to_mysql.VerticaToMySqlTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/vertica_to_mysql.py) | [contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_mysql.py) |
+
+
+
+### New transfer operators
+
+| New Airflow 2.0 transfers: `airflow.providers.mysql` package                                                                              |
+|:------------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.s3_to_mysql.S3ToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/s3_to_mysql.py) |
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.mysql` package                                                                                                 | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                   |
+|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.presto_to_mysql.PrestoToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/presto_to_mysql.py)    | [operators.presto_to_mysql.PrestoToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/presto_to_mysql.py)                    |
+| [transfers.vertica_to_mysql.VerticaToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/vertica_to_mysql.py) | [contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_mysql.py) |
 
 
 
@@ -133,6 +146,7 @@ All classes in Airflow 2.0 are in `airflow.providers.mysql` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                     |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/mysql/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/mysql/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/mysql/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/mysql/operators/presto_to_mysql.py b/airflow/providers/mysql/transfers/presto_to_mysql.py
similarity index 98%
rename from airflow/providers/mysql/operators/presto_to_mysql.py
rename to airflow/providers/mysql/transfers/presto_to_mysql.py
index c727fb9..156ff31 100644
--- a/airflow/providers/mysql/operators/presto_to_mysql.py
+++ b/airflow/providers/mysql/transfers/presto_to_mysql.py
@@ -23,7 +23,7 @@ from airflow.providers.presto.hooks.presto import PrestoHook
 from airflow.utils.decorators import apply_defaults
 
 
-class PrestoToMySqlTransferOperator(BaseOperator):
+class PrestoToMySqlOperator(BaseOperator):
     """
     Moves data from Presto to MySQL, note that for now the data is loaded
     into memory before being pushed to MySQL, so this operator should
diff --git a/airflow/providers/mysql/operators/s3_to_mysql.py b/airflow/providers/mysql/transfers/s3_to_mysql.py
similarity index 98%
rename from airflow/providers/mysql/operators/s3_to_mysql.py
rename to airflow/providers/mysql/transfers/s3_to_mysql.py
index e8a3415..53cb144 100644
--- a/airflow/providers/mysql/operators/s3_to_mysql.py
+++ b/airflow/providers/mysql/transfers/s3_to_mysql.py
@@ -24,7 +24,7 @@ from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.utils.decorators import apply_defaults
 
 
-class S3ToMySqlTransferOperator(BaseOperator):
+class S3ToMySqlOperator(BaseOperator):
     """
     Loads a file from S3 into a MySQL table.
 
diff --git a/airflow/providers/mysql/operators/vertica_to_mysql.py b/airflow/providers/mysql/transfers/vertica_to_mysql.py
similarity index 99%
rename from airflow/providers/mysql/operators/vertica_to_mysql.py
rename to airflow/providers/mysql/transfers/vertica_to_mysql.py
index 7e952b8..0e6b8a6 100644
--- a/airflow/providers/mysql/operators/vertica_to_mysql.py
+++ b/airflow/providers/mysql/transfers/vertica_to_mysql.py
@@ -28,7 +28,7 @@ from airflow.providers.vertica.hooks.vertica import VerticaHook
 from airflow.utils.decorators import apply_defaults
 
 
-class VerticaToMySqlTransferOperator(BaseOperator):
+class VerticaToMySqlOperator(BaseOperator):
     """
     Moves data from Vertica to MySQL.
 
diff --git a/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md
index b7c4c76..658a3d1 100644
--- a/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/odbc/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/odbc/README.md b/airflow/providers/odbc/README.md
index bda579a..ae79968 100644
--- a/airflow/providers/odbc/README.md
+++ b/airflow/providers/odbc/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.odbc` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.odbc` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md
index 077e678..95d1ad8 100644
--- a/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/openfaas/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/openfaas/README.md b/airflow/providers/openfaas/README.md
index abcc8e7..b44dc99 100644
--- a/airflow/providers/openfaas/README.md
+++ b/airflow/providers/openfaas/README.md
@@ -63,6 +63,8 @@ All classes in Airflow 2.0 are in `airflow.providers.openfaas` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -84,6 +86,7 @@ All classes in Airflow 2.0 are in `airflow.providers.openfaas` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md
index 78bd5ae..667607a 100644
--- a/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/opsgenie/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/opsgenie/README.md b/airflow/providers/opsgenie/README.md
index 8f27ed8..f41652f 100644
--- a/airflow/providers/opsgenie/README.md
+++ b/airflow/providers/opsgenie/README.md
@@ -93,6 +93,8 @@ All classes in Airflow 2.0 are in `airflow.providers.opsgenie` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -114,6 +116,7 @@ All classes in Airflow 2.0 are in `airflow.providers.opsgenie` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                                                                                                            |
diff --git a/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md
index dd65bf5..9a20422 100644
--- a/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/oracle/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                     |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
diff --git a/airflow/providers/oracle/README.md b/airflow/providers/oracle/README.md
index 6281383..35de449 100644
--- a/airflow/providers/oracle/README.md
+++ b/airflow/providers/oracle/README.md
@@ -31,6 +31,8 @@ Release: 2020.5.20
 - [Provider class summary](#provider-class-summary)
     - [Operators](#operators)
         - [Moved operators](#moved-operators)
+    - [Transfer operators](#transfers)
+        - [Moved transfer operators](#moved-transfers)
     - [Hooks](#hooks)
         - [Moved hooks](#moved-hooks)
 - [Releases](#releases)
@@ -76,10 +78,21 @@ All classes in Airflow 2.0 are in `airflow.providers.oracle` package.
 
 ### Moved operators
 
-| Airflow 2.0 operators: `airflow.providers.oracle` package                                                                                                                           | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                     |
-|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.oracle.OracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle.py)                                                       | [operators.oracle_operator.OracleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/oracle_operator.py)                                             |
-| [operators.oracle_to_oracle_transfer.OracleToOracleTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle_to_oracle_transfer.py) | [contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_oracle_transfer.py) |
+| Airflow 2.0 operators: `airflow.providers.oracle` package                                                                     | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                         |
+|:------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.oracle.OracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle.py) | [operators.oracle_operator.OracleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/oracle_operator.py) |
+
+
+
+
+
+
+
+### Moved transfer operators
+
+| Airflow 2.0 transfers: `airflow.providers.oracle` package                                                                                                 | Airflow 1.10.* previous location (usually `airflow.contrib`)                                                                                                                     |
+|:----------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.oracle_to_oracle.OracleToOracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/transfers/oracle_to_oracle.py) | [contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_oracle_transfer.py) |
 
 
 
@@ -106,6 +119,7 @@ All classes in Airflow 2.0 are in `airflow.providers.oracle` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                     |
 |:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                               |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                 |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                |
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/oracle/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/oracle/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/oracle/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/oracle/operators/oracle_to_oracle_transfer.py b/airflow/providers/oracle/transfers/oracle_to_oracle.py
similarity index 98%
rename from airflow/providers/oracle/operators/oracle_to_oracle_transfer.py
rename to airflow/providers/oracle/transfers/oracle_to_oracle.py
index 7973e4a..6f845d3 100644
--- a/airflow/providers/oracle/operators/oracle_to_oracle_transfer.py
+++ b/airflow/providers/oracle/transfers/oracle_to_oracle.py
@@ -21,7 +21,7 @@ from airflow.providers.oracle.hooks.oracle import OracleHook
 from airflow.utils.decorators import apply_defaults
 
 
-class OracleToOracleTransferOperator(BaseOperator):
+class OracleToOracleOperator(BaseOperator):
     """
     Moves data from Oracle to Oracle.
 
diff --git a/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md
index eff10ce..0a2fbd1 100644
--- a/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/pagerduty/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                           |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
diff --git a/airflow/providers/pagerduty/README.md b/airflow/providers/pagerduty/README.md
index 71dd9c8..7f967a9 100644
--- a/airflow/providers/pagerduty/README.md
+++ b/airflow/providers/pagerduty/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.pagerduty` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.pagerduty` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                           |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
diff --git a/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md
index 7fdb7ae..4acd97c 100644
--- a/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/postgres/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                              |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                           |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                             |
diff --git a/airflow/providers/postgres/README.md b/airflow/providers/postgres/README.md
index 66b2ca9..6bdf413 100644
--- a/airflow/providers/postgres/README.md
+++ b/airflow/providers/postgres/README.md
@@ -100,6 +100,8 @@ All classes in Airflow 2.0 are in `airflow.providers.postgres` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -121,6 +123,7 @@ All classes in Airflow 2.0 are in `airflow.providers.postgres` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                              |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                           |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                             |
diff --git a/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md
index 45bca41..c7a4f78 100644
--- a/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/presto/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/presto/README.md b/airflow/providers/presto/README.md
index 689a51b..dd3ac0d 100644
--- a/airflow/providers/presto/README.md
+++ b/airflow/providers/presto/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.presto` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.presto` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md
index 7d7c8bd..b44865f 100644
--- a/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/qubole/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/qubole/README.md b/airflow/providers/qubole/README.md
index eb57f8a..5d18994 100644
--- a/airflow/providers/qubole/README.md
+++ b/airflow/providers/qubole/README.md
@@ -87,6 +87,8 @@ All classes in Airflow 2.0 are in `airflow.providers.qubole` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -123,6 +125,7 @@ All classes in Airflow 2.0 are in `airflow.providers.qubole` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md
index 131848f..70a4104 100644
--- a/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/redis/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/redis/README.md b/airflow/providers/redis/README.md
index 83d92c0..98bbeb3 100644
--- a/airflow/providers/redis/README.md
+++ b/airflow/providers/redis/README.md
@@ -85,6 +85,8 @@ All classes in Airflow 2.0 are in `airflow.providers.redis` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -119,6 +121,7 @@ All classes in Airflow 2.0 are in `airflow.providers.redis` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md
index e3786ff..bf54e1f 100644
--- a/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/salesforce/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                   |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
diff --git a/airflow/providers/salesforce/README.md b/airflow/providers/salesforce/README.md
index 00307f3..20bcf6e 100644
--- a/airflow/providers/salesforce/README.md
+++ b/airflow/providers/salesforce/README.md
@@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.salesforce` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -125,6 +127,7 @@ All classes in Airflow 2.0 are in `airflow.providers.salesforce` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                   |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
diff --git a/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md
index 752fed9..cd65a92 100644
--- a/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/samba/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/samba/README.md b/airflow/providers/samba/README.md
index 50253ed..a17a54e 100644
--- a/airflow/providers/samba/README.md
+++ b/airflow/providers/samba/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.samba` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.samba` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                 |
diff --git a/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md
index a9c72ea..9c1eac6 100644
--- a/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/segment/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/segment/README.md b/airflow/providers/segment/README.md
index fb5e31c..66cecd9 100644
--- a/airflow/providers/segment/README.md
+++ b/airflow/providers/segment/README.md
@@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.segment` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.segment` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md
index 2cad633..a59e463 100644
--- a/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/sftp/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/sftp/README.md b/airflow/providers/sftp/README.md
index 2121228..689c984 100644
--- a/airflow/providers/sftp/README.md
+++ b/airflow/providers/sftp/README.md
@@ -103,6 +103,8 @@ All classes in Airflow 2.0 are in `airflow.providers.sftp` package.
 
 
 
+
+
 ## Sensors
 
 
@@ -136,6 +138,7 @@ All classes in Airflow 2.0 are in `airflow.providers.sftp` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md
index d310578..ead7f4d 100644
--- a/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/singularity/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23  | Fix typo in test_project_structure (#8978)                                       |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
diff --git a/airflow/providers/singularity/README.md b/airflow/providers/singularity/README.md
index 9b1d8f8..81fc560 100644
--- a/airflow/providers/singularity/README.md
+++ b/airflow/providers/singularity/README.md
@@ -86,12 +86,15 @@ All classes in Airflow 2.0 are in `airflow.providers.singularity` package.
 
 
 
+
+
 ## Releases
 
 ### Release 2020.5.20
 
 | Commit                                                                                         | Committed   | Subject                                                                          |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                    |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                      |
 | [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23  | Fix typo in test_project_structure (#8978)                                       |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                     |
diff --git a/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md
index f55384a..755504b 100644
--- a/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/slack/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,8 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                       |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------|
+| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29  | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004)      |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                 |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                   |
 | [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24  | Remove defunct code from setup.py (#8982)                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                  |
diff --git a/airflow/providers/slack/README.md b/airflow/providers/slack/README.md
index 36484db..6f8004c 100644
--- a/airflow/providers/slack/README.md
+++ b/airflow/providers/slack/README.md
@@ -31,6 +31,7 @@ Release: 2020.5.20
 - [Cross provider package dependencies](#cross-provider-package-dependencies)
 - [Provider class summary](#provider-class-summary)
     - [Operators](#operators)
+        - [New operators](#new-operators)
         - [Moved operators](#moved-operators)
     - [Hooks](#hooks)
         - [Moved hooks](#moved-hooks)
@@ -88,6 +89,12 @@ All classes in Airflow 2.0 are in `airflow.providers.slack` package.
 ## Operators
 
 
+### New operators
+
+| New Airflow 2.0 operators: `airflow.providers.slack` package                                                                     |
+|:---------------------------------------------------------------------------------------------------------------------------------|
+| [operators.slack.SlackAPIFileOperator](https://github.com/apache/airflow/blob/master/airflow/providers/slack/operators/slack.py) |
+
 
 
 ### Moved operators
@@ -102,6 +109,8 @@ All classes in Airflow 2.0 are in `airflow.providers.slack` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -124,6 +133,8 @@ All classes in Airflow 2.0 are in `airflow.providers.slack` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                       |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------|
+| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29  | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004)      |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                 |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                   |
 | [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24  | Remove defunct code from setup.py (#8982)                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                  |
diff --git a/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md
index 992d413..a5e944a 100644
--- a/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/snowflake/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,8 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03  | Add snowflake to slack operator (#9023)                                 |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                          |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
diff --git a/airflow/providers/snowflake/README.md b/airflow/providers/snowflake/README.md
index b800a87..1dd193a 100644
--- a/airflow/providers/snowflake/README.md
+++ b/airflow/providers/snowflake/README.md
@@ -28,9 +28,12 @@ Release: 2020.5.20
 - [Installation](#installation)
 - [Compatibility](#compatibility)
 - [PIP requirements](#pip-requirements)
+- [Cross provider package dependencies](#cross-provider-package-dependencies)
 - [Provider class summary](#provider-class-summary)
     - [Operators](#operators)
         - [New operators](#new-operators)
+    - [Transfer operators](#transfers)
+        - [New transfer operators](#new-transfers)
     - [Hooks](#hooks)
         - [New hooks](#new-hooks)
 - [Releases](#releases)
@@ -65,6 +68,21 @@ For full compatibility and test status of the backport packages check
 | snowflake-connector-python | &gt;=1.5.2            |
 | snowflake-sqlalchemy       | &gt;=1.1.0            |
 
+## Cross provider package dependencies
+
+Those are dependencies that might be needed in order to use all the features of the package.
+You need to install the specified backport providers package in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. For example:
+
+```bash
+pip install apache-airflow-backport-providers-snowflake[slack]
+```
+
+| Dependent package                                                                                                | Extra   |
+|:-----------------------------------------------------------------------------------------------------------------|:--------|
+| [apache-airflow-backport-providers-slack](https://github.com/apache/airflow/tree/master/airflow/providers/slack) | slack   |
+
 # Provider class summary
 
 All classes in Airflow 2.0 are in `airflow.providers.snowflake` package.
@@ -75,10 +93,22 @@ All classes in Airflow 2.0 are in `airflow.providers.snowflake` package.
 
 ### New operators
 
-| New Airflow 2.0 operators: `airflow.providers.snowflake` package                                                                                                  |
-|:------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| [operators.s3_to_snowflake.S3ToSnowflakeTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/s3_to_snowflake.py) |
-| [operators.snowflake.SnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/snowflake.py)                         |
+| New Airflow 2.0 operators: `airflow.providers.snowflake` package                                                                          |
+|:------------------------------------------------------------------------------------------------------------------------------------------|
+| [operators.snowflake.SnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/snowflake.py) |
+
+
+
+
+
+
+
+### New transfer operators
+
+| New Airflow 2.0 transfers: `airflow.providers.snowflake` package                                                                                                   |
+|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [transfers.s3_to_snowflake.S3ToSnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/s3_to_snowflake.py)          |
+| [transfers.snowflake_to_slack.SnowflakeToSlackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/snowflake_to_slack.py) |
 
 
 
@@ -107,6 +137,8 @@ All classes in Airflow 2.0 are in `airflow.providers.snowflake` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03  | Add snowflake to slack operator (#9023)                                 |
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                          |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
diff --git a/airflow/providers/snowflake/example_dags/example_snowflake.py b/airflow/providers/snowflake/example_dags/example_snowflake.py
index 944efad..7026101 100644
--- a/airflow/providers/snowflake/example_dags/example_snowflake.py
+++ b/airflow/providers/snowflake/example_dags/example_snowflake.py
@@ -21,9 +21,9 @@ Example use of Snowflake related operators.
 import os
 
 from airflow import DAG
-from airflow.providers.snowflake.operators.s3_to_snowflake import S3ToSnowflakeTransferOperator
 from airflow.providers.snowflake.operators.snowflake import SnowflakeOperator
-from airflow.providers.snowflake.operators.snowflake_to_slack import SnowflakeToSlackOperator
+from airflow.providers.snowflake.transfers.s3_to_snowflake import S3ToSnowflakeOperator
+from airflow.providers.snowflake.transfers.snowflake_to_slack import SnowflakeToSlackOperator
 from airflow.utils.dates import days_ago
 
 SNOWFLAKE_CONN_ID = os.environ.get('SNOWFLAKE_CONN_ID', 'snowflake_default')
@@ -76,7 +76,7 @@ create_table = SnowflakeOperator(
     dag=dag,
 )
 
-copy_into_table = S3ToSnowflakeTransferOperator(
+copy_into_table = S3ToSnowflakeOperator(
     task_id='copy_into_table',
     snowflake_conn_id=SNOWFLAKE_CONN_ID,
     s3_keys=[SNOWFLAKE_LOAD_JSON_PATH],
diff --git a/tests/providers/oracle/operators/__init__.py b/airflow/providers/snowflake/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to airflow/providers/snowflake/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/airflow/providers/snowflake/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/airflow/providers/snowflake/operators/s3_to_snowflake.py b/airflow/providers/snowflake/transfers/s3_to_snowflake.py
similarity index 98%
rename from airflow/providers/snowflake/operators/s3_to_snowflake.py
rename to airflow/providers/snowflake/transfers/s3_to_snowflake.py
index 4f2cc81..bbd6192 100644
--- a/airflow/providers/snowflake/operators/s3_to_snowflake.py
+++ b/airflow/providers/snowflake/transfers/s3_to_snowflake.py
@@ -25,7 +25,7 @@ from airflow.providers.snowflake.hooks.snowflake import SnowflakeHook
 from airflow.utils.decorators import apply_defaults
 
 
-class S3ToSnowflakeTransferOperator(BaseOperator):
+class S3ToSnowflakeOperator(BaseOperator):
     """
     Executes an COPY command to load files from s3 to Snowflake
 
diff --git a/airflow/providers/snowflake/operators/snowflake_to_slack.py b/airflow/providers/snowflake/transfers/snowflake_to_slack.py
similarity index 100%
rename from airflow/providers/snowflake/operators/snowflake_to_slack.py
rename to airflow/providers/snowflake/transfers/snowflake_to_slack.py
diff --git a/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md
index f22f50d..0260584 100644
--- a/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/sqlite/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                          |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
diff --git a/airflow/providers/sqlite/README.md b/airflow/providers/sqlite/README.md
index 1bfed7c..7b88ffe 100644
--- a/airflow/providers/sqlite/README.md
+++ b/airflow/providers/sqlite/README.md
@@ -77,6 +77,8 @@ All classes in Airflow 2.0 are in `airflow.providers.sqlite` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -98,6 +100,7 @@ All classes in Airflow 2.0 are in `airflow.providers.sqlite` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                 |
 |:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                           |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)             |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                          |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)            |
diff --git a/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md
index e1bfc46..8322cdd 100644
--- a/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/ssh/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/ssh/README.md b/airflow/providers/ssh/README.md
index b936eb0..92bb81c 100644
--- a/airflow/providers/ssh/README.md
+++ b/airflow/providers/ssh/README.md
@@ -86,6 +86,8 @@ All classes in Airflow 2.0 are in `airflow.providers.ssh` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -107,6 +109,7 @@ All classes in Airflow 2.0 are in `airflow.providers.ssh` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                    |
 |:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                              |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)               |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                    |
diff --git a/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md
index 14789c6..50c95df 100644
--- a/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/vertica/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/vertica/README.md b/airflow/providers/vertica/README.md
index 7b4e136..90c86d9 100644
--- a/airflow/providers/vertica/README.md
+++ b/airflow/providers/vertica/README.md
@@ -84,6 +84,8 @@ All classes in Airflow 2.0 are in `airflow.providers.vertica` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -105,6 +107,7 @@ All classes in Airflow 2.0 are in `airflow.providers.vertica` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md
index 6a0606f..b1a69b7 100644
--- a/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/yandex/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/yandex/README.md b/airflow/providers/yandex/README.md
index 72b8f2d..d5ab9f1 100644
--- a/airflow/providers/yandex/README.md
+++ b/airflow/providers/yandex/README.md
@@ -89,6 +89,8 @@ All classes in Airflow 2.0 are in `airflow.providers.yandex` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -111,6 +113,7 @@ All classes in Airflow 2.0 are in `airflow.providers.yandex` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                                                                                                            |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                                                                                                                      |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)                                                                                                        |
 | [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23  | Fix references in docs (#8984)                                                                                                                                     |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)                                                                                                       |
diff --git a/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md b/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md
index a9c72ea..9c1eac6 100644
--- a/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md
+++ b/airflow/providers/zendesk/PROVIDERS_CHANGES_2020.05.20.md
@@ -4,6 +4,7 @@
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/airflow/providers/zendesk/README.md b/airflow/providers/zendesk/README.md
index 933b8c2..bc8162d 100644
--- a/airflow/providers/zendesk/README.md
+++ b/airflow/providers/zendesk/README.md
@@ -70,6 +70,8 @@ All classes in Airflow 2.0 are in `airflow.providers.zendesk` package.
 
 
 
+
+
 ## Hooks
 
 
@@ -91,6 +93,7 @@ All classes in Airflow 2.0 are in `airflow.providers.zendesk` package.
 
 | Commit                                                                                         | Committed   | Subject                                                                  |
 |:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------|
+| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26  | Preparing for RC3 relase of backports (#9026)                            |
 | [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26  | Fixed name of 20 remaining wrongly named operators. (#8994)              |
 | [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19  | Release candidate 2 for backport packages 2020.05.20 (#8898)             |
 | [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17  | Prepare release candidate for backport packages (#8891)                  |
diff --git a/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2 b/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2
index 6363e71..3fa7613 100644
--- a/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2
+++ b/backport_packages/PROVIDERS_CLASSES_TEMPLATE.md.jinja2
@@ -37,6 +37,20 @@ All classes in Airflow 2.0 are in `{{FULL_PACKAGE_NAME}}` package.
 {% endif %}
 {% endif %}
 
+{% if NEW_TRANSFERS or MOVED_TRANSFERS %}
+{% if NEW_TRANSFERS %}
+### New transfer operators
+
+{{NEW_TRANSFERS_TABLE}}
+{% endif %}
+
+{% if MOVED_TRANSFERS %}
+### Moved transfer operators
+
+{{MOVED_TRANSFERS_TABLE}}
+{% endif %}
+{% endif %}
+
 {% if NEW_SENSORS or MOVED_SENSORS %}
 ## Sensors
 
@@ -65,20 +79,7 @@ All classes in Airflow 2.0 are in `{{FULL_PACKAGE_NAME}}` package.
 {{MOVED_HOOKS_TABLE}}
 {% endif %}
 {% endif %}
-{% if NEW_PROTOCOLS or MOVED_PROTOCOLS %}
-## Protocols
-
-{% if NEW_PROTOCOLS %}
-### New protocols
-
-{{NEW_PROTOCOLS_TABLE}}
-{% endif %}
-{% if MOVED_PROTOCOLS %}
-### Moved protocols
 
-{{MOVED_PROTOCOLS_TABLE}}
-{% endif %}
-{% endif %}
 {% if NEW_SECRETS or MOVED_SECRETS %}
 ## Secrets
 
diff --git a/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2 b/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2
index fecf6a5..ae5a138 100644
--- a/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2
+++ b/backport_packages/PROVIDERS_README_TEMPLATE.md.jinja2
@@ -42,6 +42,15 @@ Release: {{ RELEASE_NO_LEADING_ZEROS }}
         - [Moved operators](#moved-operators)
     {%- endif %}
 {%- endif %}
+{%- if NEW_TRANSFERS or MOVED_TRANSFERS %}
+    - [Transfer operators](#transfers)
+    {%- if NEW_TRANSFERS %}
+        - [New transfer operators](#new-transfers)
+    {%- endif %}
+    {%- if MOVED_TRANSFERS %}
+        - [Moved transfer operators](#moved-transfers)
+    {%- endif %}
+{%- endif %}
 {%- if NEW_SENSORS or MOVED_SENSORS %}
     - [Sensors](#sensors)
     {%- if NEW_SENSORS %}
@@ -60,15 +69,6 @@ Release: {{ RELEASE_NO_LEADING_ZEROS }}
         - [Moved hooks](#moved-hooks)
     {%- endif %}
 {%- endif %}
-{%- if NEW_PROTOCOLS or MOVED_PROTOCOLS %}
-    - [Protocols](#protocols)
-    {%- if NEW_PROTOCOLS %}
-        - [New protocols](#new-protocols)
-    {%- endif %}
-    {%- if MOVED_PROTOCOLS %}
-        - [Moved protocols](#moved-protocols)
-    {%- endif %}
-{%- endif %}
 {%- if NEW_SECRETS or MOVED_SECRETS %}
     - [Secrets](#secrets)
     {%- if NEW_SECRETS %}
diff --git a/backport_packages/refactor_backport_packages.py b/backport_packages/refactor_backport_packages.py
index ef97174..37a0d0a 100755
--- a/backport_packages/refactor_backport_packages.py
+++ b/backport_packages/refactor_backport_packages.py
@@ -25,7 +25,7 @@ from typing import List
 
 from backport_packages.setup_backport_packages import (
     get_source_airflow_folder, get_source_providers_folder, get_target_providers_folder,
-    get_target_providers_package_folder, is_bigquery_non_dts_module,
+    get_target_providers_package_folder,
 )
 from bowler import LN, TOKEN, Capture, Filename, Query
 from fissix.fixer_util import Comma, KeywordArg, Name
@@ -46,28 +46,6 @@ def copy_provider_sources() -> None:
         if os.path.isdir(build_dir):
             rmtree(build_dir)
 
-    def ignore_bigquery_files(src: str, names: List[str]) -> List[str]:
-        """
-        Ignore files with bigquery
-        :param src: source file
-        :param names: Name of the file
-        :return:
-        """
-        ignored_names = []
-        if any([src.endswith(os.path.sep + class_type) for class_type in CLASS_TYPES]):
-            ignored_names = [name for name in names
-                             if is_bigquery_non_dts_module(module_name=name)]
-        if src.endswith(os.path.sep + "example_dags"):
-            for file_name in names:
-                file_path = src + os.path.sep + file_name
-                with open(file_path, "rt") as file:
-                    text = file.read()
-                if any([f"airflow.providers.google.cloud.{class_type}.bigquery" in text
-                        for class_type in CLASS_TYPES]) or "_to_bigquery" in text:
-                    print(f"Ignoring {file_path}")
-                    ignored_names.append(file_name)
-        return ignored_names
-
     def ignore_kubernetes_files(src: str, names: List[str]) -> List[str]:
         ignored_names = []
         if src.endswith(os.path.sep + "example_dags"):
@@ -77,7 +55,7 @@ def copy_provider_sources() -> None:
         return ignored_names
 
     def ignore_some_files(src: str, names: List[str]) -> List[str]:
-        ignored_list = ignore_bigquery_files(src=src, names=names)
+        ignored_list = []
         ignored_list.extend(ignore_kubernetes_files(src=src, names=names))
         return ignored_list
 
diff --git a/backport_packages/setup_backport_packages.py b/backport_packages/setup_backport_packages.py
index 420be3a..6c991fa 100644
--- a/backport_packages/setup_backport_packages.py
+++ b/backport_packages/setup_backport_packages.py
@@ -36,7 +36,10 @@ from backport_packages.import_all_provider_classes import import_all_provider_cl
 from setup import PROVIDERS_REQUIREMENTS
 from setuptools import Command, find_packages, setup as setuptools_setup
 
-from tests.test_core_to_contrib import HOOK, OPERATOR, PROTOCOLS, SECRETS, SENSOR
+from tests.test_core_to_contrib import HOOKS, OPERATORS, SECRETS, SENSORS, TRANSFERS
+
+# Note - we do not test protocols as they are not really part of the official API of
+# Apache Airflow
 
 # noinspection DuplicatedCode
 logger = logging.getLogger(__name__)  # noqa
@@ -50,6 +53,23 @@ AIRFLOW_PATH = os.path.join(SOURCE_DIR_PATH, "airflow")
 PROVIDERS_PATH = os.path.join(AIRFLOW_PATH, "providers")
 
 
+OPERATORS_PATTERN = r".*Operator$"
+SENSORS_PATTERN = r".*Sensor$"
+HOOKS_PATTERN = r".*Hook$"
+SECRETS_PATTERN = r".*Backend$"
+TRANSFERS_PATTERN = r".*To[A-Z0-9].*Operator$"
+WRONG_TRANSFERS_PATTERN = r".*Transfer$|.*TransferOperator$"
+
+ALL_PATTERNS = {
+    OPERATORS_PATTERN,
+    SENSORS_PATTERN,
+    HOOKS_PATTERN,
+    SECRETS_PATTERN,
+    TRANSFERS_PATTERN,
+    WRONG_TRANSFERS_PATTERN,
+}
+
+
 def get_source_airflow_folder() -> str:
     """
     Returns source directory for whole airflow (from the main airflow project).
@@ -115,11 +135,11 @@ import setup  # From AIRFLOW_SOURCES/setup.py # noqa  # isort:skip
 
 DEPENDENCIES_JSON_FILE = os.path.join(PROVIDERS_PATH, "dependencies.json")
 
-MOVED_OPERATORS_DICT = {value[0]: value[1] for value in OPERATOR}
-MOVED_SENSORS_DICT = {value[0]: value[1] for value in SENSOR}
-MOVED_HOOKS_DICT = {value[0]: value[1] for value in HOOK}
-MOVED_PROTOCOLS_DICT = {value[0]: value[1] for value in PROTOCOLS}
+MOVED_OPERATORS_DICT = {value[0]: value[1] for value in OPERATORS}
+MOVED_SENSORS_DICT = {value[0]: value[1] for value in SENSORS}
+MOVED_HOOKS_DICT = {value[0]: value[1] for value in HOOKS}
 MOVED_SECRETS_DICT = {value[0]: value[1] for value in SECRETS}
+MOVED_TRANSFERS_DICT = {value[0]: value[1] for value in TRANSFERS}
 
 
 def get_pip_package_name(provider_package_id: str) -> str:
@@ -132,19 +152,6 @@ def get_pip_package_name(provider_package_id: str) -> str:
     return "apache-airflow-backport-providers-" + provider_package_id.replace(".", "-")
 
 
-def is_bigquery_non_dts_module(module_name: str) -> bool:
-    """
-    Returns true if the module name indicates this is a bigquery module that should be skipped
-    for now.
-    TODO: this method should be removed as soon as BigQuery rewrite is finished.
-
-    :param module_name: name of the module
-    :return: true if module is a bigquery module (but not bigquery_dts)
-    """
-    return module_name.startswith("bigquery") and "bigquery_dts" not in module_name \
-        or "_to_bigquery" in module_name
-
-
 def get_long_description(provider_package_id: str) -> str:
     """
     Gets long description of the package.
@@ -279,7 +286,7 @@ def usage() -> None:
     print()
     print("  list-providers-packages       - lists all provider packages")
     print("  list-backportable-packages    - lists all packages that are backportable")
-    print("  update-package-release-notes YYYY.MM.DD [PACKAGES] - updates package release notes")
+    print("  update-package-release-notes [YYYY.MM.DD] [PACKAGES] - updates package release notes")
     print("  --version-suffix <SUFFIX>     - adds version suffix to version of the packages.")
     print()
 
@@ -305,7 +312,7 @@ def is_example_dag(imported_name: str) -> bool:
     return ".example_dags." in imported_name
 
 
-def is_from_the_expected_package(the_class: Type, expected_package: str) -> bool:
+def is_from_the_expected_base_package(the_class: Type, expected_package: str) -> bool:
     """
     Returns true if the class is from the package expected.
     :param the_class: the class object
@@ -339,55 +346,68 @@ def is_class(the_class: Type) -> bool:
     return inspect.isclass(the_class)
 
 
-def is_bigquery_class(imported_name: str) -> bool:
+def package_name_matches(the_class: Type, expected_pattern: Optional[str]) -> bool:
     """
-    Returns true if the object passed is a class
-    :param imported_name: name of the class imported
-    :return: true if it is a class
+    In case expected_pattern is set, it checks if the package name matches the pattern.
+    .
+    :param the_class: imported class
+    :param expected_pattern: the pattern that should match the package
+    :return: true if the expected_pattern is None or the pattern matches the package
     """
-    return is_bigquery_non_dts_module(module_name=imported_name.split(".")[-2])
+    return expected_pattern is None or re.match(expected_pattern, the_class.__module__)
 
 
-def has_expected_string_in_name(the_class: Type, expected_string: Optional[str]) -> bool:
-    """
-    In case expected_string is different than None then it checks for presence of the string in the
-    imported_name.
-    :param the_class: name of the imported object
-    :param expected_string: string to expect
-    :return: true if the expected_string is None or the expected string is found in the imported name
-    """
-    return expected_string is None or expected_string in the_class.__module__
-
-
-def find_all_subclasses(imported_classes: List[str],
-                        expected_package: str,
-                        expected_ancestor: Type,
-                        expected_string: Optional[str] = None,
-                        exclude_class_type=None) -> Set[str]:
+def find_all_classes(imported_classes: List[str],
+                     base_package: str,
+                     ancestor_match: Type,
+                     sub_package_pattern_match: str,
+                     expected_class_name_pattern: str,
+                     unexpected_class_name_patterns: Set[str],
+                     exclude_class_type: Type = None,
+                     false_positive_class_names: Optional[Set[str]] = None,
+                     ) -> Tuple[Set[str], List[Tuple[type, str]]]:
     """
     Returns set of classes containing all subclasses in package specified.
 
     :param imported_classes: classes imported from providers
-    :param expected_package: full package name where to look for the classes
-    :param expected_ancestor: type of the object the method looks for
-    :param expected_string: this string is expected to appear in the package name
-    :param exclude_class_type: exclude class of this type (Sensor are also Operators so they should be
-           excluded from the Operator list)
-    """
-    subclasses = set()
+    :param base_package: base package name where to start looking for the classes
+    :param sub_package_pattern_match: this string is expected to appear in the sub-package name
+    :param ancestor_match: type of the object the method looks for
+    :param expected_class_name_pattern: regexp of class name pattern to expect
+    :param unexpected_class_name_patterns: set of regexp of class name pattern that are not expected
+    :param exclude_class_type: exclude class of this type (Sensor are also Operators so
+           they should be excluded from the list)
+    :param false_positive_class_names: set of class names that are wrongly recognised as badly named
+    """
+    found_classes: Set[str] = set()
+    wrong_classes: List[Tuple[type, str]] = []
     for imported_name in imported_classes:
         module, class_name = imported_name.rsplit(".", maxsplit=1)
         the_class = getattr(importlib.import_module(module), class_name)
         if is_class(the_class=the_class) \
             and not is_example_dag(imported_name=imported_name) \
-            and is_from_the_expected_package(the_class=the_class, expected_package=expected_package) \
+            and is_from_the_expected_base_package(the_class=the_class, expected_package=base_package) \
             and is_imported_from_same_module(the_class=the_class, imported_name=imported_name) \
-            and has_expected_string_in_name(the_class=the_class, expected_string=expected_string) \
-            and inherits_from(the_class=the_class, expected_ancestor=expected_ancestor) \
+            and inherits_from(the_class=the_class, expected_ancestor=ancestor_match) \
             and not inherits_from(the_class=the_class, expected_ancestor=exclude_class_type) \
-                and not is_bigquery_class(imported_name=imported_name):
-            subclasses.add(imported_name)
-    return subclasses
+                and package_name_matches(the_class=the_class, expected_pattern=sub_package_pattern_match):
+
+            if not false_positive_class_names or class_name not in false_positive_class_names:
+                if not re.match(expected_class_name_pattern, class_name):
+                    wrong_classes.append(
+                        (the_class, f"The class name {class_name} is wrong. "
+                                    f"It should match {expected_class_name_pattern}"))
+                    continue
+                if unexpected_class_name_patterns:
+                    for unexpected_class_name_pattern in unexpected_class_name_patterns:
+                        if re.match(unexpected_class_name_pattern, class_name):
+                            wrong_classes.append(
+                                (the_class,
+                                 f"The class name {class_name} is wrong. "
+                                 f"It should not match {unexpected_class_name_pattern}"))
+                        continue
+            found_classes.add(imported_name)
+    return found_classes, wrong_classes
 
 
 def get_new_and_moved_classes(classes: Set[str],
@@ -482,49 +502,94 @@ def convert_moved_objects_to_table(class_dict: Dict[str, str],
     return tabulate(table, headers=headers, tablefmt="pipe")
 
 
-def get_package_class_summary(full_package_name: str, imported_classes: List[str]) -> Dict[str, Any]:
+def print_wrong_naming(class_type: str, wrong_classes: List[Tuple[type, str]]):
+    """
+    Prints wrong classes of a given type if there are any
+    :param class_type: type of the class to print
+    :param wrong_classes: list of wrong classes
+    """
+    if wrong_classes:
+        print(f"\nThere are wrongly named classes of type {class_type}:\n", file=sys.stderr)
+        for class_type, message in wrong_classes:
+            print(f"{class_type}: {message}", file=sys.stderr)
+
+
+def get_package_class_summary(full_package_name: str, imported_classes: List[str]) \
+        -> Tuple[Dict[str, Any], int]:
     """
     Gets summary of the package in the form of dictionary containing all types of classes
     :param full_package_name: full package name
     :param imported_classes: classes imported_from providers
-    :return: dictionary of objects usable as context for Jinja2 templates
+    :return: dictionary of objects usable as context for Jinja2 templates - or None if there are some errors
     """
     from airflow.secrets import BaseSecretsBackend
     from airflow.sensors.base_sensor_operator import BaseSensorOperator
     from airflow.hooks.base_hook import BaseHook
     from airflow.models.baseoperator import BaseOperator
-    from typing_extensions import Protocol
-    operators = find_all_subclasses(
+
+    operators, wrong_operators = find_all_classes(
         imported_classes=imported_classes,
-        expected_package=full_package_name,
-        expected_ancestor=BaseOperator,
-        expected_string=".operators.",
-        exclude_class_type=BaseSensorOperator)
-    sensors = find_all_subclasses(
+        base_package=full_package_name,
+        sub_package_pattern_match=r".*\.operators\..*",
+        ancestor_match=BaseOperator,
+        expected_class_name_pattern=OPERATORS_PATTERN,
+        unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN},
+        exclude_class_type=BaseSensorOperator,
+        false_positive_class_names={
+            'CloudVisionAddProductToProductSetOperator',
+            'CloudDataTransferServiceGCSToGCSOperator',
+            'CloudDataTransferServiceS3ToGCSOperator',
+            'BigQueryCreateDataTransferOperator',
+            'CloudTextToSpeechSynthesizeOperator',
+            'CloudSpeechToTextRecognizeSpeechOperator',
+        }
+    )
+    sensors, wrong_sensors = find_all_classes(
         imported_classes=imported_classes,
-        expected_package=full_package_name,
-        expected_ancestor=BaseSensorOperator,
-        expected_string='.sensors.')
-    hooks = find_all_subclasses(
+        base_package=full_package_name,
+        sub_package_pattern_match=r".*\.sensors\..*",
+        ancestor_match=BaseSensorOperator,
+        expected_class_name_pattern=SENSORS_PATTERN,
+        unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, SENSORS_PATTERN}
+    )
+    hooks, wrong_hooks = find_all_classes(
         imported_classes=imported_classes,
-        expected_package=full_package_name,
-        expected_ancestor=BaseHook,
-        expected_string='.hooks.')
-    protocols = find_all_subclasses(
+        base_package=full_package_name,
+        sub_package_pattern_match=r".*\.hooks\..*",
+        ancestor_match=BaseHook,
+        expected_class_name_pattern=HOOKS_PATTERN,
+        unexpected_class_name_patterns=ALL_PATTERNS - {HOOKS_PATTERN}
+    )
+    secrets, wrong_secrets = find_all_classes(
         imported_classes=imported_classes,
-        expected_package=full_package_name,
-        expected_ancestor=Protocol,
+        sub_package_pattern_match=r".*\.secrets\..*",
+        base_package=full_package_name,
+        ancestor_match=BaseSecretsBackend,
+        expected_class_name_pattern=SECRETS_PATTERN,
+        unexpected_class_name_patterns=ALL_PATTERNS - {SECRETS_PATTERN},
     )
-    secrets = find_all_subclasses(
+    transfers, wrong_transfers = find_all_classes(
         imported_classes=imported_classes,
-        expected_package=full_package_name,
-        expected_ancestor=BaseSecretsBackend,
+        base_package=full_package_name,
+        sub_package_pattern_match=r".*\.transfers\..*",
+        ancestor_match=BaseOperator,
+        expected_class_name_pattern=TRANSFERS_PATTERN,
+        unexpected_class_name_patterns=ALL_PATTERNS - {OPERATORS_PATTERN, TRANSFERS_PATTERN},
     )
+    print_wrong_naming("Operators", wrong_operators)
+    print_wrong_naming("Sensors", wrong_sensors)
+    print_wrong_naming("Hooks", wrong_hooks)
+    print_wrong_naming("Secrets", wrong_secrets)
+    print_wrong_naming("Transfers", wrong_transfers)
+
+    num_errors = len(wrong_operators) + len(wrong_sensors) + len(wrong_hooks) + \
+        len(wrong_secrets) + len(wrong_transfers)
+
     new_operators, moved_operators = get_new_and_moved_classes(operators, MOVED_OPERATORS_DICT)
     new_sensors, moved_sensors = get_new_and_moved_classes(sensors, MOVED_SENSORS_DICT)
     new_hooks, moved_hooks = get_new_and_moved_classes(hooks, MOVED_HOOKS_DICT)
-    new_protocols, moved_protocols = get_new_and_moved_classes(protocols, MOVED_PROTOCOLS_DICT)
     new_secrets, moved_secrets = get_new_and_moved_classes(secrets, MOVED_SECRETS_DICT)
+    new_transfers, moved_transfers = get_new_and_moved_classes(transfers, MOVED_TRANSFERS_DICT)
     class_summary = {
         "NEW_OPERATORS": new_operators,
         "MOVED_OPERATORS": moved_operators,
@@ -532,23 +597,22 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str
         "MOVED_SENSORS": moved_sensors,
         "NEW_HOOKS": new_hooks,
         "MOVED_HOOKS": moved_hooks,
-        "NEW_PROTOCOLS": new_protocols,
-        "MOVED_PROTOCOLS": moved_protocols,
         "NEW_SECRETS": new_secrets,
         "MOVED_SECRETS": moved_secrets,
+        "NEW_TRANSFERS": new_transfers,
+        "MOVED_TRANSFERS": moved_transfers,
         "OPERATORS": operators,
         "HOOKS": hooks,
         "SENSORS": sensors,
-        "PROTOCOLS": protocols,
         "SECRETS": secrets,
-
+        "TRANSFERS": transfers,
     }
     for from_name, to_name, object_type in [
         ("NEW_OPERATORS", "NEW_OPERATORS_TABLE", "operators"),
         ("NEW_SENSORS", "NEW_SENSORS_TABLE", "sensors"),
         ("NEW_HOOKS", "NEW_HOOKS_TABLE", "hooks"),
-        ("NEW_PROTOCOLS", "NEW_PROTOCOLS_TABLE", "protocols"),
         ("NEW_SECRETS", "NEW_SECRETS_TABLE", "secrets"),
+        ("NEW_TRANSFERS", "NEW_TRANSFERS_TABLE", "transfers"),
     ]:
         class_summary[to_name] = convert_new_classes_to_table(class_summary[from_name],
                                                               full_package_name,
@@ -557,13 +621,13 @@ def get_package_class_summary(full_package_name: str, imported_classes: List[str
         ("MOVED_OPERATORS", "MOVED_OPERATORS_TABLE", "operators"),
         ("MOVED_SENSORS", "MOVED_SENSORS_TABLE", "sensors"),
         ("MOVED_HOOKS", "MOVED_HOOKS_TABLE", "hooks"),
-        ("MOVED_PROTOCOLS", "MOVED_PROTOCOLS_TABLE", "protocols"),
         ("MOVED_SECRETS", "MOVED_SECRETS_TABLE", "protocols"),
+        ("MOVED_TRANSFERS", "MOVED_TRANSFERS_TABLE", "transfers"),
     ]:
         class_summary[to_name] = convert_moved_objects_to_table(class_summary[from_name],
                                                                 full_package_name,
                                                                 object_type)
-    return class_summary
+    return class_summary, num_errors
 
 
 def render_template(template_name: str, context: Dict[str, Any]) -> str:
@@ -741,13 +805,11 @@ def get_previous_release_info(previous_release_version: str,
 
 def check_if_release_version_ok(
         past_releases: List[ReleaseInfo],
-        current_release_version: str,
-        package_id: str) -> Tuple[str, Optional[str]]:
+        current_release_version: str) -> Tuple[str, Optional[str]]:
     """
     Check if the release version passed is not later than the last release version
     :param past_releases: all past releases (if there are any)
     :param current_release_version: release version to check
-    :param package_id: package id
     :return: Tuple of current/previous_release (previous might be None if there are no releases)
     """
     previous_release_version = past_releases[0].release_version if past_releases else None
@@ -864,8 +926,8 @@ EXPECTED_SUFFIXES: Dict[str, str] = {
     "OPERATORS": "Operator",
     "HOOKS": "Hook",
     "SENSORS": "Sensor",
-    "PROTOCOLS": "Protocol",
     "SECRETS": "Backend",
+    "TRANSFERS": "Operator",
 }
 
 
@@ -920,10 +982,10 @@ def update_release_notes_for_package(provider_package_id: str, current_release_v
     """
     full_package_name = f"airflow.providers.{provider_package_id}"
     provider_package_path = get_package_path(provider_package_id)
-    class_summary = get_package_class_summary(full_package_name, imported_classes)
+    class_summary, num_errors = get_package_class_summary(full_package_name, imported_classes)
     past_releases = get_all_releases(provider_package_path=provider_package_path)
     current_release_version, previous_release = check_if_release_version_ok(
-        past_releases, current_release_version, provider_package_id)
+        past_releases, current_release_version)
     cross_providers_dependencies = \
         get_cross_provider_dependent_packages(provider_package_id=provider_package_id)
     previous_release = get_previous_release_info(previous_release_version=previous_release,
@@ -982,9 +1044,10 @@ def update_release_notes_for_package(provider_package_id: str, current_release_v
         finally:
             os.remove(temp_file_path)
     total, bad = check_if_classes_are_properly_named(class_summary)
+    bad = bad + num_errors
     if bad != 0:
         print()
-        print(f"ERROR! There are {bad} classes badly named out of {total} classes for {provider_package_id}")
+        print(f"ERROR! There are {bad} errors of {total} classes for {provider_package_id}")
         print()
     return total, bad
 
diff --git a/docs/autoapi_templates/index.rst b/docs/autoapi_templates/index.rst
index ed1a910..d3d102a 100644
--- a/docs/autoapi_templates/index.rst
+++ b/docs/autoapi_templates/index.rst
@@ -70,16 +70,22 @@ All operators are in the following packages:
 
   airflow/providers/amazon/aws/sensors/index
 
+  airflow/providers/amazon/aws/transfers/index
+
   airflow/providers/apache/cassandra/sensors/index
 
   airflow/providers/apache/druid/operators/index
 
+  airflow/providers/apache/druid/transfers/index
+
   airflow/providers/apache/hdfs/sensors/index
 
   airflow/providers/apache/hive/operators/index
 
   airflow/providers/apache/hive/sensors/index
 
+  airflow/providers/apache/hive/transfers/index
+
   airflow/providers/apache/livy/operators/index
 
   airflow/providers/apache/livy/sensors/index
@@ -114,10 +120,14 @@ All operators are in the following packages:
 
   airflow/providers/google/ads/operators/index
 
+  airflow/providers/google/ads/transfers/index
+
   airflow/providers/google/cloud/operators/index
 
   airflow/providers/google/cloud/sensors/index
 
+  airflow/providers/google/cloud/transfers/index
+
   airflow/providers/google/firebase/operators/index
 
   airflow/providers/google/marketing_platform/operators/index
@@ -126,6 +136,8 @@ All operators are in the following packages:
 
   airflow/providers/google/suite/operators/index
 
+  airflow/providers/google/suite/transfers/index
+
   airflow/providers/grpc/operators/index
 
   airflow/providers/http/operators/index
@@ -146,6 +158,8 @@ All operators are in the following packages:
 
   airflow/providers/microsoft/azure/sensors/index
 
+  airflow/providers/microsoft/azure/transfers/index
+
   airflow/providers/microsoft/mssql/operators/index
 
   airflow/providers/microsoft/winrm/operators/index
@@ -154,10 +168,14 @@ All operators are in the following packages:
 
   airflow/providers/mysql/operators/index
 
+  airflow/providers/mysql/transfers/index
+
   airflow/providers/opsgenie/operators/index
 
   airflow/providers/oracle/operators/index
 
+  airflow/providers/oracle/transfers/index
+
   airflow/providers/papermill/operators/index
 
   airflow/providers/postgres/operators/index
@@ -186,6 +204,8 @@ All operators are in the following packages:
 
   airflow/providers/snowflake/operators/index
 
+  airflow/providers/snowflake/transfers/index
+
   airflow/providers/sqlite/operators/index
 
   airflow/providers/ssh/operators/index
diff --git a/docs/build b/docs/build
index 7e0720a..8266243 100755
--- a/docs/build
+++ b/docs/build
@@ -168,7 +168,8 @@ def check_class_links_in_operators_and_hooks_ref() -> None:
 
     airflow_modules = find_modules() - find_modules(deprecated_only=True)
     airflow_modules = {
-        o for o in airflow_modules if any(f".{d}." in o for d in ["operators", "hooks", "sensors"])
+        o for o in airflow_modules if any(f".{d}." in o for d in
+                                          ["operators", "hooks", "sensors", "transfers"])
     }
 
     missing_modules = airflow_modules - current_modules_in_file
diff --git a/docs/concepts.rst b/docs/concepts.rst
index 79bc024..ca66a40 100644
--- a/docs/concepts.rst
+++ b/docs/concepts.rst
@@ -278,7 +278,7 @@ Airflow provides operators for many common tasks, including:
 In addition to these basic building blocks, there are many more specific
 operators: :class:`~airflow.providers.docker.operators.docker.DockerOperator`,
 :class:`~airflow.providers.apache.hive.operators.hive.HiveOperator`, :class:`~airflow.providers.amazon.aws.operators.s3_file_transform.S3FileTransformOperator`,
-:class:`~airflow.providers.mysql.operators.presto_to_mysql.PrestoToMySqlTransferOperator`,
+:class:`~airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`,
 :class:`~airflow.providers.slack.operators.slack.SlackAPIOperator`... you get the idea!
 
 Operators are only loaded by Airflow if they are assigned to a DAG.
diff --git a/docs/howto/define_extra_link.rst b/docs/howto/define_extra_link.rst
index ee6506a..47bb5ed 100644
--- a/docs/howto/define_extra_link.rst
+++ b/docs/howto/define_extra_link.rst
@@ -66,7 +66,7 @@ You can also add (or override) an extra link to an existing operators
 through an Airflow plugin.
 
 For example, the following Airflow plugin will add an Operator Link on all
-tasks using :class:`~airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator` operator.
+tasks using :class:`~airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSToS3Operator` operator.
 
 **Adding Operator Links to Existing Operators**
 ``plugins/extra_link.py``:
@@ -75,7 +75,7 @@ tasks using :class:`~airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Ope
 
   from airflow.plugins_manager import AirflowPlugin
   from airflow.models.baseoperator import BaseOperatorLink
-  from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator
+  from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
 
   class S3LogLink(BaseOperatorLink):
       name = 'S3'
diff --git a/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst b/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst
index 0c095d5..ff7f354 100644
--- a/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst
+++ b/docs/howto/operator/amazon/aws/google_api_to_s3_transfer.rst
@@ -31,7 +31,7 @@ Overview
 The ``GoogleApiToS3Transfer`` can call requests to any Google API which supports discovery and save its response on S3.
 
 Two example_dags are provided which showcase the
-:class:`~airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleApiToS3Transfer`
+:class:`~airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleApiToS3Transfer`
 in action.
 
  - example_google_api_to_s3_transfer_basic.py
diff --git a/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst b/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst
index 94cfea3..cf28ad5 100644
--- a/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst
+++ b/docs/howto/operator/amazon/aws/imap_attachment_to_s3.rst
@@ -32,7 +32,7 @@ The ``ImapAttachmentToS3Operator`` can transfer an email attachment via IMAP
 protocol from a mail server to S3 Bucket.
 
 An example dag ``example_imap_attachment_to_s3.py`` is provided which showcase the
-:class:`~airflow.providers.amazon.aws.operators.imap_attachment_to_s3.ImapAttachmentToS3Operator`
+:class:`~airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator`
 in action.
 
 example_imap_attachment_to_s3.py
diff --git a/docs/howto/operator/amazon/aws/s3_to_redshift.rst b/docs/howto/operator/amazon/aws/s3_to_redshift.rst
index 7a634da..dc5089e 100644
--- a/docs/howto/operator/amazon/aws/s3_to_redshift.rst
+++ b/docs/howto/operator/amazon/aws/s3_to_redshift.rst
@@ -16,7 +16,7 @@
     under the License.
 
 
-.. _howto/operator:S3ToRedshiftTransferOperator:
+.. _howto/operator:S3ToRedshiftOperator:
 
 S3 To Redshift Transfer Operator
 ================================
@@ -28,10 +28,10 @@ S3 To Redshift Transfer Operator
 Overview
 --------
 
-The ``S3ToRedshiftTransferOperator`` copies data from a S3 Bucket into a Redshift table.
+The ``S3ToRedshiftOperator`` copies data from a S3 Bucket into a Redshift table.
 
 The example dag provided showcases the
-:class:`~airflow.providers.amazon.aws.operators.s3_to_redshift.S3ToRedshiftTransferOperator`
+:class:`~airflow.providers.amazon.aws.transfers.s3_to_redshift.S3ToRedshiftOperator`
 in action.
 
  - example_s3_to_redshift.py
@@ -42,7 +42,7 @@ example_s3_to_redshift.py
 Purpose
 """""""
 
-This is a basic example dag for using ``S3ToRedshiftTransferOperator`` to copies data from a S3 Bucket into a Redshift table.
+This is a basic example dag for using ``S3ToRedshiftOperator`` to copies data from a S3 Bucket into a Redshift table.
 
 Environment variables
 """""""""""""""""""""
diff --git a/docs/howto/operator/gcp/ads.rst b/docs/howto/operator/gcp/ads.rst
index 311304b..ad2c376 100644
--- a/docs/howto/operator/gcp/ads.rst
+++ b/docs/howto/operator/gcp/ads.rst
@@ -35,7 +35,7 @@ Google Ads to GCS
 ^^^^^^^^^^^^^^^^^
 
 To query the Google Ads API and generate a CSV report of the results use
-:class:`~airflow.providers.google.ads.operators.ads.GoogleAdsToGcsOperator`.
+:class:`~airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator`.
 
 .. exampleinclude:: ../../../../airflow/providers/google/ads/example_dags/example_ads.py
     :language: python
@@ -44,7 +44,7 @@ To query the Google Ads API and generate a CSV report of the results use
     :end-before: [END howto_google_ads_to_gcs_operator]
 
 Use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.ads.operators.ads.GoogleAdsToGcsOperator`
+:template-fields:`airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator`
 parameters which allow you to dynamically determine values.
 The result is saved to :ref:`XCom <concepts:xcom>`, which allows the result to be used by other operators.
 
@@ -54,7 +54,7 @@ Upload Google Ads Accounts to GCS
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 To upload Google Ads accounts to Google Cloud Storage bucket use the
-:class:`~airflow.providers.google.ads.operators.ads.GoogleAdsListAccountsOperator`.
+:class:`~airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsListAccountsOperator`.
 
 .. exampleinclude:: ../../../../airflow/providers/google/ads/example_dags/example_ads.py
     :language: python
@@ -63,6 +63,6 @@ To upload Google Ads accounts to Google Cloud Storage bucket use the
     :end-before: [END howto_ads_list_accounts_operator]
 
 Use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.ads.operators.ads.GoogleAdsToGcsOperator`
+:template-fields:`airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator`
 parameters which allow you to dynamically determine values.
 The result is saved to :ref:`XCom <concepts:xcom>`, which allows the result to be used by other operators.
diff --git a/docs/howto/operator/gcp/facebook_ads_to_gcs.rst b/docs/howto/operator/gcp/facebook_ads_to_gcs.rst
index a686878..7d6e50b 100644
--- a/docs/howto/operator/gcp/facebook_ads_to_gcs.rst
+++ b/docs/howto/operator/gcp/facebook_ads_to_gcs.rst
@@ -35,7 +35,7 @@ FacebookAdsReportToGcsOperator
 ------------------------------
 
 Use the
-:class:`~airflow.providers.google.cloud.operators.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator`
+:class:`~airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator`
 to execute a Facebook ads report fetch and load to GCS.
 
 .. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
diff --git a/docs/howto/operator/gcp/gcs.rst b/docs/howto/operator/gcp/gcs.rst
index b35659f..675bbf6 100644
--- a/docs/howto/operator/gcp/gcs.rst
+++ b/docs/howto/operator/gcp/gcs.rst
@@ -35,7 +35,7 @@ GCSToBigQueryOperator
 ---------------------
 
 Use the
-:class:`~airflow.providers.google.cloud.operators.gcs_to_bigquery.GCSToBigQueryOperator`
+:class:`~airflow.providers.google.cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator`
 to execute a BigQuery load job.
 
 .. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
diff --git a/docs/howto/operator/gcp/gcs_to_gcs.rst b/docs/howto/operator/gcp/gcs_to_gcs.rst
index e17bedf..a23b37c 100644
--- a/docs/howto/operator/gcp/gcs_to_gcs.rst
+++ b/docs/howto/operator/gcp/gcs_to_gcs.rst
@@ -72,7 +72,7 @@ Operators
 GCSToGCSOperator
 ~~~~~~~~~~~~~~~~
 
-:class:`~airflow.providers.google.cloud.operators.gcs_to_gcs.GCSToGCSOperator` allows you to copy
+:class:`~airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSToGCSOperator` allows you to copy
 one or more files within GCS. The files may be copied between two different buckets or within one bucket.
 The copying always takes place without taking into account the initial state of the destination bucket.
 
@@ -165,7 +165,7 @@ the ``delimiter`` argument apply to moves as well as copies.
 GCSSynchronizeBuckets
 ~~~~~~~~~~~~~~~~~~~~~
 
-The :class:`~airflow.providers.google.cloud.operators.gcs_to_gcs.GCSSynchronizeBuckets`
+The :class:`~airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSSynchronizeBuckets`
 operator checks the initial state of the destination bucket, and then compares it with the source bucket.
 Based on this, it creates an operation plan that describes which objects should be deleted from
 the destination bucket, which should be overwritten, and which should be copied.
diff --git a/docs/howto/operator/gcp/gcs_to_gdrive.rst b/docs/howto/operator/gcp/gcs_to_gdrive.rst
index 61e7501..4c4c43f 100644
--- a/docs/howto/operator/gcp/gcs_to_gdrive.rst
+++ b/docs/howto/operator/gcp/gcs_to_gdrive.rst
@@ -41,10 +41,10 @@ Operator
 ^^^^^^^^
 
 Transfer files between Google Storage and Google Drive is performed with the
-:class:`~airflow.providers.google.suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator` operator.
+:class:`~airflow.providers.google.suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator` operator.
 
 You can use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.suite.operators.gcs_to_gdrive.GCSToGoogleDriveOperator`
+:template-fields:`airflow.providers.google.suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator`
 parameters which allows you to dynamically determine values.
 
 Copy single files
diff --git a/docs/howto/operator/gcp/local_to_gcs.rst b/docs/howto/operator/gcp/gcs_to_local.rst
similarity index 68%
copy from docs/howto/operator/gcp/local_to_gcs.rst
copy to docs/howto/operator/gcp/gcs_to_local.rst
index 098618c..860fc06 100644
--- a/docs/howto/operator/gcp/local_to_gcs.rst
+++ b/docs/howto/operator/gcp/gcs_to_local.rst
@@ -16,10 +16,10 @@
     under the License.
 
 
-Upload data from Local Filesystem to Google Cloud Storage
-=========================================================
+Downloads data from Google Cloud Storage to Local Filesystem
+============================================================
 The `Google Cloud Storage <https://cloud.google.com/storage/>`__  (GCS) is used to store large data from various applications.
-This page shows how to upload data from local filesystem to GCS.
+This page shows how to download data from GCS to local filesystem.
 
 .. contents::
   :depth: 1
@@ -31,23 +31,22 @@ Prerequisite Tasks
 
 .. include:: _partials/prerequisite_tasks.rst
 
-.. _howto/operator:LocalFilesystemToGCSOperator:
+.. _howto/operator:GCSToLocalFilesystemOperator:
 
-LocalFileSystemToGCSOperator
+GCSToLocalFilesystemOperator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-:class:`~airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator` allows you to upload
-data from local filesystem to GCS.
+:class:`~airflow.providers.google.cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator` allows you to download
+data from GCS to local filesystem.
 
-When you use this operator, you can optionally compress the data being uploaded.
 
 Below is an example of using this operator to upload a file to GCS.
 
-.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
+.. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_gcs.py
     :language: python
     :dedent: 0
-    :start-after: [START howto_operator_local_filesystem_to_gcs]
-    :end-before: [END howto_operator_local_filesystem_to_gcs]
+    :start-after: [START howto_operator_gcs_download_file_task]
+    :end-before: [END howto_operator_gcs_download_file_task]
 
 
 Reference
diff --git a/docs/howto/operator/gcp/gcs_to_sftp.rst b/docs/howto/operator/gcp/gcs_to_sftp.rst
index 682be89..d50f0ad 100644
--- a/docs/howto/operator/gcp/gcs_to_sftp.rst
+++ b/docs/howto/operator/gcp/gcs_to_sftp.rst
@@ -41,10 +41,10 @@ Operator
 ^^^^^^^^
 
 Transfer files between SFTP and Google Storage is performed with the
-:class:`~airflow.providers.google.cloud.operators.gcs_to_sftp.GCSToSFTPOperator` operator.
+:class:`~airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSToSFTPOperator` operator.
 
 Use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.cloud.operators.gcs_to_sftp.GCSToSFTPOperator`
+:template-fields:`airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSToSFTPOperator`
 to define values dynamically.
 
 
diff --git a/docs/howto/operator/gcp/gcs_to_sheets.rst b/docs/howto/operator/gcp/gcs_to_sheets.rst
index fdfd268..2af5042 100644
--- a/docs/howto/operator/gcp/gcs_to_sheets.rst
+++ b/docs/howto/operator/gcp/gcs_to_sheets.rst
@@ -40,7 +40,7 @@ Upload data from GCS to Google Sheets
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 To upload data from Google Cloud Storage to Google Spreadsheet you can use the
-:class:`~airflow.providers.google.suite.operators.gcs_to_sheets.GCSToGoogleSheetsOperator`.
+:class:`~airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`.
 
 .. exampleinclude:: ../../../../airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
     :language: python
@@ -49,4 +49,4 @@ To upload data from Google Cloud Storage to Google Spreadsheet you can use the
     :end-before: [END upload_gcs_to_sheets]
 
 You can use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.suite.operators.gcs_to_sheets.GCSToGoogleSheetsOperator`.
+:template-fields:`airflow.providers.google.suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator`.
diff --git a/docs/howto/operator/gcp/local_to_gcs.rst b/docs/howto/operator/gcp/local_to_gcs.rst
index 098618c..a52c7c7 100644
--- a/docs/howto/operator/gcp/local_to_gcs.rst
+++ b/docs/howto/operator/gcp/local_to_gcs.rst
@@ -36,7 +36,7 @@ Prerequisite Tasks
 LocalFileSystemToGCSOperator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-:class:`~airflow.providers.google.cloud.operators.local_to_gcs.LocalFilesystemToGCSOperator` allows you to upload
+:class:`~airflow.providers.google.cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator` allows you to upload
 data from local filesystem to GCS.
 
 When you use this operator, you can optionally compress the data being uploaded.
diff --git a/docs/howto/operator/gcp/presto_to_gcs.rst b/docs/howto/operator/gcp/presto_to_gcs.rst
index d961e3c..e6fc954 100644
--- a/docs/howto/operator/gcp/presto_to_gcs.rst
+++ b/docs/howto/operator/gcp/presto_to_gcs.rst
@@ -35,7 +35,7 @@ Data transfer
 -------------
 
 Transfer files between Presto and Google Storage is performed with the
-:class:`~airflow.providers.google.cloud.operators.presto_to_gcs.PrestoToGCSOperator` operator.
+:class:`~airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoToGCSOperator` operator.
 
 This operator has 3 required parameters:
 
@@ -45,7 +45,7 @@ This operator has 3 required parameters:
   A ``{}`` should be specified in the filename to allow the operator to inject file
   numbers in cases where the file is split due to size.
 
-All parameters are described in the reference documentation - :class:`~airflow.providers.google.cloud.operators.presto_to_gcs.PrestoToGCSOperator`.
+All parameters are described in the reference documentation - :class:`~airflow.providers.google.cloud.transfers.presto_to_gcs.PrestoToGCSOperator`.
 
 An example operator call might look like this:
 
diff --git a/docs/howto/operator/gcp/sftp_to_gcs.rst b/docs/howto/operator/gcp/sftp_to_gcs.rst
index f93c833..e807e62 100644
--- a/docs/howto/operator/gcp/sftp_to_gcs.rst
+++ b/docs/howto/operator/gcp/sftp_to_gcs.rst
@@ -40,10 +40,10 @@ Operator
 ^^^^^^^^
 
 Transfer files between SFTP and Google Storage is performed with the
-:class:`~airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPToGCSOperator` operator.
+:class:`~airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPToGCSOperator` operator.
 
 Use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.cloud.operators.sftp_to_gcs.SFTPToGCSOperator`
+:template-fields:`airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPToGCSOperator`
 to define values dynamically.
 
 Copying single files
diff --git a/docs/howto/operator/gcp/sheets_to_gcs.rst b/docs/howto/operator/gcp/sheets_to_gcs.rst
index b42c796..373bb01 100644
--- a/docs/howto/operator/gcp/sheets_to_gcs.rst
+++ b/docs/howto/operator/gcp/sheets_to_gcs.rst
@@ -40,7 +40,7 @@ Upload data from Google Sheets to GCS
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 To upload data from Google Spreadsheet to Google Cloud Storage you can use the
-:class:`~airflow.providers.google.cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator`.
+:class:`~airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`.
 
 .. exampleinclude:: ../../../../airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
     :language: python
@@ -49,4 +49,4 @@ To upload data from Google Spreadsheet to Google Cloud Storage you can use the
     :end-before: [END upload_sheet_to_gcs]
 
 You can use :ref:`Jinja templating <jinja-templating>` with
-:template-fields:`airflow.providers.google.cloud.operators.sheets_to_gcs.GoogleSheetsToGCSOperator`.
+:template-fields:`airflow.providers.google.cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator`.
diff --git a/docs/installation.rst b/docs/installation.rst
index 29a228a..8913943 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -188,55 +188,55 @@ Here's the list of the subpackages and what they enable:
 
 **Software:**
 
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| subpackage          | install command                                     | enables                                                                           |
-+=====================+=====================================================+===================================================================================+
-| async               | ``pip install 'apache-airflow[async]'``             | Async worker classes for Gunicorn                                                 |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| celery              | ``pip install 'apache-airflow[celery]'``            | CeleryExecutor                                                                    |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| dask                | ``pip install 'apache-airflow[dask]'``              | DaskExecutor                                                                      |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| docker              | ``pip install 'apache-airflow[docker]'``            | Docker hooks and operators                                                        |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| elasticsearch       | ``pip install 'apache-airflow[elasticsearch]'``     | Elasticsearch hooks and Log Handler                                               |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| exasol              | ``pip install 'apache-airflow[exasol]'``            | Exasol hooks and operators                                                        |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| kubernetes          | ``pip install 'apache-airflow[cncf.kubernetes]'``   | Kubernetes Executor and operator                                                  |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| mongo               | ``pip install 'apache-airflow[mongo]'``             | Mongo hooks and operators                                                         |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| mssql (deprecated)  | ``pip install 'apache-airflow[microsoft.mssql]'``   | Microsoft SQL Server operators and hook,                                          |
-|                     |                                                     | support as an Airflow backend.  Uses pymssql.                                     |
-|                     |                                                     | Will be replaced by subpackage ``odbc``.                                          |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| mysql               | ``pip install 'apache-airflow[mysql]'``             | MySQL operators and hook, support as an Airflow                                   |
-|                     |                                                     | backend. The version of MySQL server has to be                                    |
-|                     |                                                     | 5.6.4+. The exact version upper bound depends                                     |
-|                     |                                                     | on version of ``mysqlclient`` package. For                                        |
-|                     |                                                     | example, ``mysqlclient`` 1.3.12 can only be                                       |
-|                     |                                                     | used with MySQL server 5.6.4 through 5.7.                                         |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| odbc                | ``pip install 'apache-airflow[odbc]'``              | ODBC data sources including MS SQL Server.  Can use MsSqlOperator,                |
-|                     |                                                     | or as metastore database backend.  Uses pyodbc.                                   |
-|                     |                                                     | See :ref:`howto/connection/odbc` for more info.                                   |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| oracle              | ``pip install 'apache-airflow[oracle]'``            | Oracle hooks and operators                                                        |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| pinot               | ``pip install 'apache-airflow[pinot]'``             | Pinot DB hook                                                                     |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| postgres            | ``pip install 'apache-airflow[postgres]'``          | PostgreSQL operators and hook, support as an                                      |
-|                     |                                                     | Airflow backend                                                                   |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| rabbitmq            | ``pip install 'apache-airflow[rabbitmq]'``          | RabbitMQ support as a Celery backend                                              |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| redis               | ``pip install 'apache-airflow[redis]'``             | Redis hooks and sensors                                                           |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| samba               | ``pip install 'apache-airflow[samba]'``             | :class:`airflow.providers.apache.hive.operators.hive_to_samba.Hive2SambaOperator` |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
-| statsd              | ``pip install 'apache-airflow[statsd]'``            | Needed by StatsD metrics                                                          |
-+---------------------+-----------------------------------------------------+-----------------------------------------------------------------------------------+
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| subpackage          | install command                                     | enables                                                                            |
++=====================+=====================================================+====================================================================================+
+| async               | ``pip install 'apache-airflow[async]'``             | Async worker classes for Gunicorn                                                  |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| celery              | ``pip install 'apache-airflow[celery]'``            | CeleryExecutor                                                                     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| dask                | ``pip install 'apache-airflow[dask]'``              | DaskExecutor                                                                       |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| docker              | ``pip install 'apache-airflow[docker]'``            | Docker hooks and operators                                                         |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| elasticsearch       | ``pip install 'apache-airflow[elasticsearch]'``     | Elasticsearch hooks and Log Handler                                                |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| exasol              | ``pip install 'apache-airflow[exasol]'``            | Exasol hooks and operators                                                         |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| kubernetes          | ``pip install 'apache-airflow[cncf.kubernetes]'``   | Kubernetes Executor and operator                                                   |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| mongo               | ``pip install 'apache-airflow[mongo]'``             | Mongo hooks and operators                                                          |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| mssql (deprecated)  | ``pip install 'apache-airflow[microsoft.mssql]'``   | Microsoft SQL Server operators and hook,                                           |
+|                     |                                                     | support as an Airflow backend.  Uses pymssql.                                      |
+|                     |                                                     | Will be replaced by subpackage ``odbc``.                                           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| mysql               | ``pip install 'apache-airflow[mysql]'``             | MySQL operators and hook, support as an Airflow                                    |
+|                     |                                                     | backend. The version of MySQL server has to be                                     |
+|                     |                                                     | 5.6.4+. The exact version upper bound depends                                      |
+|                     |                                                     | on version of ``mysqlclient`` package. For                                         |
+|                     |                                                     | example, ``mysqlclient`` 1.3.12 can only be                                        |
+|                     |                                                     | used with MySQL server 5.6.4 through 5.7.                                          |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| odbc                | ``pip install 'apache-airflow[odbc]'``              | ODBC data sources including MS SQL Server.  Can use MsSqlOperator,                 |
+|                     |                                                     | or as metastore database backend.  Uses pyodbc.                                    |
+|                     |                                                     | See :ref:`howto/connection/odbc` for more info.                                    |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| oracle              | ``pip install 'apache-airflow[oracle]'``            | Oracle hooks and operators                                                         |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| pinot               | ``pip install 'apache-airflow[pinot]'``             | Pinot DB hook                                                                      |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| postgres            | ``pip install 'apache-airflow[postgres]'``          | PostgreSQL operators and hook, support as an                                       |
+|                     |                                                     | Airflow backend                                                                    |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| rabbitmq            | ``pip install 'apache-airflow[rabbitmq]'``          | RabbitMQ support as a Celery backend                                               |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| redis               | ``pip install 'apache-airflow[redis]'``             | Redis hooks and sensors                                                            |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| samba               | ``pip install 'apache-airflow[samba]'``             | :class:`airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator` |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
+| statsd              | ``pip install 'apache-airflow[statsd]'``            | Needed by StatsD metrics                                                           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
 
 
 **Other:**
diff --git a/docs/operators-and-hooks-ref.rst b/docs/operators-and-hooks-ref.rst
index 46f5f1d..01b1401 100644
--- a/docs/operators-and-hooks-ref.rst
+++ b/docs/operators-and-hooks-ref.rst
@@ -213,52 +213,52 @@ Foundation.
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.s3_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.s3_to_hive`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.mysql.operators.s3_to_mysql`
+     - :mod:`airflow.providers.mysql.transfers.s3_to_mysql`
 
    * - `Apache Cassandra <http://cassandra.apache.org/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.cassandra_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.cassandra_to_gcs`
 
    * - `Apache Hive <https://hive.apache.org/>`__
      - `Amazon DynamoDB <https://aws.amazon.com/dynamodb/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.hive_to_dynamodb`
+     - :mod:`airflow.providers.amazon.aws.transfers.hive_to_dynamodb`
 
    * - `Apache Hive <https://hive.apache.org/>`__
      - `Apache Druid <https://druid.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.druid.operators.hive_to_druid`
+     - :mod:`airflow.providers.apache.druid.transfers.hive_to_druid`
 
    * - `Apache Hive <https://hive.apache.org/>`__
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.hive_to_mysql`
+     - :mod:`airflow.providers.apache.hive.transfers.hive_to_mysql`
 
    * - `Apache Hive <https://hive.apache.org/>`__
      - `Samba <https://www.samba.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.hive_to_samba`
+     - :mod:`airflow.providers.apache.hive.transfers.hive_to_samba`
 
    * - `Microsoft SQL Server (MSSQL) <https://www.microsoft.com/pl-pl/sql-server/sql-server-downloads>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.mssql_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.mssql_to_hive`
 
    * - `MySQL <https://www.mysql.com/>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.mysql_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.mysql_to_hive`
 
    * - `Vertica <https://www.vertica.com/>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.vertica_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.vertica_to_hive`
 
 .. _Azure:
 
@@ -335,17 +335,17 @@ These integrations allow you to copy data from/to Microsoft Azure.
    * - `Azure Data Lake Storage <https://azure.microsoft.com/en-us/services/storage/data-lake-storage/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.adls_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.adls_to_gcs`
 
    * - Local
      - `Azure Blob Storage <https://azure.microsoft.com/en-us/services/storage/blobs/>`__
      -
-     - :mod:`airflow.providers.microsoft.azure.operators.file_to_wasb`
+     - :mod:`airflow.providers.microsoft.azure.transfers.file_to_wasb`
 
    * - `Oracle <https://www.oracle.com/pl/database/>`__
      - `Azure Data Lake Storage <https://azure.microsoft.com/en-us/services/storage/data-lake-storage/>`__
      -
-     - :mod:`airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer`
+     - :mod:`airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake`
 
 
 .. _AWS:
@@ -520,7 +520,7 @@ These integrations allow you to copy data from/to Amazon Web Services.
        All GCP services :ref:`[1] <integration:GCP-Discovery>`
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      - :doc:`How to use <howto/operator/amazon/aws/google_api_to_s3_transfer>`
-     - :mod:`airflow.providers.amazon.aws.operators.google_api_to_s3_transfer`
+     - :mod:`airflow.providers.amazon.aws.transfers.google_api_to_s3`
 
    * - `Amazon DataSync <https://aws.amazon.com/datasync/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
@@ -530,63 +530,63 @@ These integrations allow you to copy data from/to Amazon Web Services.
    * - `Amazon DynamoDB <https://aws.amazon.com/dynamodb/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      -
-     - :mod:`airflow.providers.amazon.aws.operators.dynamodb_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.dynamodb_to_s3`
 
    * - `Amazon Redshift <https://aws.amazon.com/redshift/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      -
-     - :mod:`airflow.providers.amazon.aws.operators.redshift_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.redshift_to_s3`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `Amazon Redshift <https://aws.amazon.com/redshift/>`__
      - :doc:`How to use <howto/operator/amazon/aws/s3_to_redshift>`
-     - :mod:`airflow.providers.amazon.aws.operators.s3_to_redshift`
+     - :mod:`airflow.providers.amazon.aws.transfers.s3_to_redshift`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `Snowflake <https://snowflake.com/>`__
      -
-     - :mod:`airflow.providers.snowflake.operators.s3_to_snowflake`
+     - :mod:`airflow.providers.snowflake.transfers.s3_to_snowflake`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.s3_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.s3_to_hive`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/cloud_storage_transfer_service>`
-     - :mod:`airflow.providers.google.cloud.operators.s3_to_gcs`,
+     - :mod:`airflow.providers.google.cloud.transfers.s3_to_gcs`,
        :mod:`airflow.providers.google.cloud.operators.cloud_storage_transfer_service`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.s3_to_sftp`
+     - :mod:`airflow.providers.amazon.aws.transfers.s3_to_sftp`
 
    * - `Apache Hive <https://hive.apache.org/>`__
      - `Amazon DynamoDB <https://aws.amazon.com/dynamodb/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.hive_to_dynamodb`
+     - :mod:`airflow.providers.amazon.aws.transfers.hive_to_dynamodb`
 
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.gcs_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.gcs_to_s3`
 
    * - `Internet Message Access Protocol (IMAP) <https://tools.ietf.org/html/rfc3501>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      - :doc:`How to use <howto/operator/amazon/aws/imap_attachment_to_s3>`
-     - :mod:`airflow.providers.amazon.aws.operators.imap_attachment_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`
 
    * - `MongoDB <https://www.mongodb.com/what-is-mongodb>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.mongo_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.mongo_to_s3`
 
    * - `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      -
-     - :mod:`airflow.providers.amazon.aws.operators.sftp_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.sftp_to_s3`
 
 :ref:`[1] <integration:AWS-Discovery-ref>` Those discovery-based operators use
 :class:`~airflow.providers.google.common.hooks.discovery_api.GoogleDiscoveryApiHook` to communicate with Google
@@ -850,43 +850,49 @@ These integrations allow you to copy data from/to Google Cloud Platform.
        All services :ref:`[1] <integration:GCP-Discovery>`
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      - :doc:`How to use <howto/operator/amazon/aws/google_api_to_s3_transfer>`
-     - :mod:`airflow.providers.amazon.aws.operators.google_api_to_s3_transfer`
+     - :mod:`airflow.providers.amazon.aws.transfers.google_api_to_s3`
 
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/cloud_storage_transfer_service>`
-     - :mod:`airflow.providers.google.cloud.operators.s3_to_gcs`,
+     - :mod:`airflow.providers.google.cloud.transfers.s3_to_gcs`,
        :mod:`airflow.providers.google.cloud.operators.cloud_storage_transfer_service`
 
    * - `Apache Cassandra <http://cassandra.apache.org/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.cassandra_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.cassandra_to_gcs`
 
    * - `Azure Data Lake Storage <https://azure.microsoft.com/pl-pl/services/storage/data-lake-storage/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.adls_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.adls_to_gcs`
 
    * - `Facebook Ads <http://business.facebook.com>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/facebook_ads_to_gcs>`
-     - :mod:`airflow.providers.google.cloud.operators.facebook_ads_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.facebook_ads_to_gcs`
+
+
+   * - `Google Ads <https://ads.google.com/>`__
+     - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
+     - :doc:`How to use <howto/operator/gcp/ads>`
+     - :mod:`airflow.providers.google.ads.transfers.ads_to_gcs`
 
    * - `Google BigQuery <https://cloud.google.com/bigquery/>`__
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.bigquery_to_mysql`
+     - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_mysql`
 
    * - `Google BigQuery <https://cloud.google.com/bigquery/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.bigquery_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_gcs`
 
    * - `Google BigQuery <https://cloud.google.com/bigquery/>`__
      - `Google BigQuery <https://cloud.google.com/bigquery/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.bigquery_to_bigquery`
+     - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_bigquery`
 
    * - `Cloud Firestore <https://firebase.google.com/docs/firestore>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
@@ -896,74 +902,79 @@ These integrations allow you to copy data from/to Google Cloud Platform.
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.gcs_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.gcs_to_s3`
 
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Google BigQuery <https://cloud.google.com/bigquery/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.gcs_to_bigquery`
+     - :mod:`airflow.providers.google.cloud.transfers.gcs_to_bigquery`
 
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/gcs_to_gcs>`,
        :doc:`How to use <howto/operator/gcp/cloud_storage_transfer_service>`
-     - :mod:`airflow.providers.google.cloud.operators.gcs_to_gcs`,
+     - :mod:`airflow.providers.google.cloud.transfers.gcs_to_gcs`,
        :mod:`airflow.providers.google.cloud.operators.cloud_storage_transfer_service`
 
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
+     - Local
+     - :doc:`How to use <howto/operator/gcp/gcs_to_local>`
+     - :mod:`airflow.providers.google.cloud.transfers.gcs_to_local`
+
+   * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Google Drive <https://www.google.com/drive/>`__
      -
-     - :mod:`airflow.providers.google.suite.operators.gcs_to_gdrive`
+     - :mod:`airflow.providers.google.suite.transfers.gcs_to_gdrive`
 
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - SFTP
      - :doc:`How to use <howto/operator/gcp/gcs_to_sftp>`
-     - :mod:`airflow.providers.google.cloud.operators.gcs_to_sftp`
+     - :mod:`airflow.providers.google.cloud.transfers.gcs_to_sftp`
 
    * - Local
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/local_to_gcs>`
-     - :mod:`airflow.providers.google.cloud.operators.local_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.local_to_gcs`
 
    * - `Microsoft SQL Server (MSSQL) <https://www.microsoft.com/pl-pl/sql-server/sql-server-downloads>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.mssql_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.mssql_to_gcs`
 
    * - `MySQL <https://www.mysql.com/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.mysql_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.mysql_to_gcs`
 
    * - `PostgresSQL <https://www.postgresql.org/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.postgres_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.postgres_to_gcs`
 
    * - `Presto <https://prestodb.io/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/presto_to_gcs>`
-     - :mod:`airflow.providers.google.cloud.operators.presto_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.presto_to_gcs`
 
    * - SFTP
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/sftp_to_gcs>`
-     - :mod:`airflow.providers.google.cloud.operators.sftp_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.sftp_to_gcs`
 
    * - SQL
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.sql_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.sql_to_gcs`
 
    * - `Google Spreadsheet <https://www.google.com/intl/en/sheets/about/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - :doc:`How to use <howto/operator/gcp/sheets_to_gcs>`
-     - :mod:`airflow.providers.google.cloud.operators.sheets_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.sheets_to_gcs`
 
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Google Spreadsheet <https://www.google.com/intl/en/sheets/about/>`__
      - :doc:`How to use <howto/operator/gcp/gcs_to_sheets>`
-     - :mod:`airflow.providers.google.suite.operators.gcs_to_sheets`
+     - :mod:`airflow.providers.google.suite.transfers.gcs_to_sheets`
 
 .. _integration:GCP-Discovery:
 
@@ -1219,7 +1230,7 @@ These integrations allow you to perform various operations within various servic
      -
      - :mod:`airflow.providers.snowflake.hooks.snowflake`
      - :mod:`airflow.providers.snowflake.operators.snowflake`,
-       :mod:`airflow.providers.snowflake.operators.snowflake_to_slack`
+       :mod:`airflow.providers.snowflake.transfers.snowflake_to_slack`
      -
 
    * - `Vertica <https://www.vertica.com/>`__
@@ -1250,17 +1261,17 @@ These integrations allow you to perform various operations within various servic
    * - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      - `Google Drive <https://www.google.com/drive/>`__
      - :doc:`How to use <howto/operator/gcp/gcs_to_gdrive>`
-     - :mod:`airflow.providers.google.suite.operators.gcs_to_gdrive`
+     - :mod:`airflow.providers.google.suite.transfers.gcs_to_gdrive`
 
    * - `Vertica <https://www.vertica.com/>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.vertica_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.vertica_to_hive`
 
    * - `Vertica <https://www.vertica.com/>`__
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.mysql.operators.vertica_to_mysql`
+     - :mod:`airflow.providers.mysql.transfers.vertica_to_mysql`
 
 .. _software:
 
@@ -1425,72 +1436,72 @@ These integrations allow you to copy data.
    * - `Apache Hive <https://hive.apache.org/>`__
      - `Samba <https://www.samba.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.hive_to_samba`
+     - :mod:`airflow.providers.apache.hive.transfers.hive_to_samba`
 
    * - `BigQuery <https://cloud.google.com/bigquery/>`__
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.bigquery_to_mysql`
+     - :mod:`airflow.providers.google.cloud.transfers.bigquery_to_mysql`
 
    * - `Microsoft SQL Server (MSSQL) <https://www.microsoft.com/pl-pl/sql-server/sql-server-downloads>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.mssql_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.mssql_to_hive`
 
    * - `Microsoft SQL Server (MSSQL) <https://www.microsoft.com/pl-pl/sql-server/sql-server-downloads>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.mssql_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.mssql_to_gcs`
 
    * - `MongoDB <https://www.mongodb.com/what-is-mongodb>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.mongo_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.mongo_to_s3`
 
    * - `MySQL <https://www.mysql.com/>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.mysql_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.mysql_to_hive`
 
    * - `MySQL <https://www.mysql.com/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.mysql_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.mysql_to_gcs`
 
    * - `Oracle <https://www.oracle.com/pl/database/>`__
      - `Azure Data Lake Storage <https://azure.microsoft.com/en-us/services/storage/data-lake-storage/>`__
      -
-     - :mod:`airflow.providers.microsoft.azure.operators.oracle_to_azure_data_lake_transfer`
+     - :mod:`airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake`
 
    * - `Oracle <https://www.oracle.com/pl/database/>`__
      - `Oracle <https://www.oracle.com/pl/database/>`__
      -
-     - :mod:`airflow.providers.oracle.operators.oracle_to_oracle_transfer`
+     - :mod:`airflow.providers.oracle.transfers.oracle_to_oracle`
 
    * - `PostgresSQL <https://www.postgresql.org/>`__
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.postgres_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.postgres_to_gcs`
 
    * - `Presto <https://prestodb.github.io/>`__
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.mysql.operators.presto_to_mysql`
+     - :mod:`airflow.providers.mysql.transfers.presto_to_mysql`
 
    * - SQL
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.sql_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.sql_to_gcs`
 
    * - `Vertica <https://www.vertica.com/>`__
      - `Apache Hive <https://hive.apache.org/>`__
      -
-     - :mod:`airflow.providers.apache.hive.operators.vertica_to_hive`
+     - :mod:`airflow.providers.apache.hive.transfers.vertica_to_hive`
 
    * - `Vertica <https://www.vertica.com/>`__
      - `MySQL <https://www.mysql.com/>`__
      -
-     - :mod:`airflow.providers.mysql.operators.vertica_to_mysql`
+     - :mod:`airflow.providers.mysql.transfers.vertica_to_mysql`
 
 .. _protocol:
 
@@ -1588,24 +1599,24 @@ These integrations allow you to copy data.
    * - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      - `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
      -
-     - :mod:`airflow.providers.amazon.aws.operators.s3_to_sftp`
+     - :mod:`airflow.providers.amazon.aws.transfers.s3_to_sftp`
 
    * - Filesystem
      - `Azure Blob Storage <https://azure.microsoft.com/en-us/services/storage/blobs/>`__
      -
-     - :mod:`airflow.providers.microsoft.azure.operators.file_to_wasb`
+     - :mod:`airflow.providers.microsoft.azure.transfers.file_to_wasb`
 
    * - Filesystem
      - `Google Cloud Storage (GCS) <https://cloud.google.com/gcs/>`__
      -
-     - :mod:`airflow.providers.google.cloud.operators.local_to_gcs`
+     - :mod:`airflow.providers.google.cloud.transfers.local_to_gcs`
 
    * - `Internet Message Access Protocol (IMAP) <https://tools.ietf.org/html/rfc3501>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`__
      - :doc:`How to use <howto/operator/amazon/aws/imap_attachment_to_s3>`
-     - :mod:`airflow.providers.amazon.aws.operators.imap_attachment_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`
 
    * - `SSH File Transfer Protocol (SFTP) <https://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/>`__
      - `Amazon Simple Storage Service (S3) <https://aws.amazon.com/s3/>`_
      -
-     - :mod:`airflow.providers.amazon.aws.operators.sftp_to_s3`
+     - :mod:`airflow.providers.amazon.aws.transfers.sftp_to_s3`
diff --git a/docs/plugins.rst b/docs/plugins.rst
index c67ead9..63719d5 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -161,7 +161,7 @@ definitions in Airflow.
     from airflow.hooks.base_hook import BaseHook
     from airflow.models import BaseOperator
     from airflow.models.baseoperator import BaseOperatorLink
-    from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator
+    from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
     from airflow.sensors.base_sensor_operator import BaseSensorOperator
 
     # Will show up under airflow.hooks.test_plugin.PluginHook
diff --git a/requirements/requirements-python3.6.txt b/requirements/requirements-python3.6.txt
index fe07f34..0a4e142 100644
--- a/requirements/requirements-python3.6.txt
+++ b/requirements/requirements-python3.6.txt
@@ -15,7 +15,7 @@ Flask==1.1.2
 GitPython==3.1.3
 HeapDict==1.0.1
 JPype1==0.7.5
-JayDeBeApi==1.2.2
+JayDeBeApi==1.2.3
 Jinja2==2.10.3
 Mako==1.1.3
 Markdown==2.6.11
@@ -29,7 +29,7 @@ Pygments==2.6.1
 SQLAlchemy-JSONField==0.9.0
 SQLAlchemy-Utils==0.36.6
 SQLAlchemy==1.3.17
-Sphinx==3.1.0
+Sphinx==3.1.1
 Unidecode==1.1.1
 WTForms==2.3.1
 Werkzeug==0.16.1
@@ -72,9 +72,9 @@ beautifulsoup4==4.7.1
 billiard==3.6.3.0
 black==19.10b0
 blinker==1.4
-boto3==1.14.0
+boto3==1.14.3
 boto==2.49.0
-botocore==1.17.0
+botocore==1.17.3
 bowler==0.8.0
 cached-property==1.5.1
 cachetools==4.1.0
@@ -96,7 +96,7 @@ colorlog==4.0.2
 connexion==2.7.0
 contextvars==2.4
 coverage==5.1
-croniter==0.3.32
+croniter==0.3.33
 cryptography==2.9.2
 curlify==2.2.1
 cx-Oracle==7.3.0
@@ -104,7 +104,7 @@ dask==2.18.1
 datadog==0.36.0
 decorator==4.4.2
 defusedxml==0.6.0
-dill==0.3.1.1
+dill==0.3.2
 distlib==0.3.0
 distributed==2.18.0
 dnspython==1.16.0
@@ -135,14 +135,14 @@ funcsigs==1.0.2
 future-fstrings==1.2.0
 future==0.18.2
 gcsfs==0.6.2
-gevent==20.6.0
+gevent==20.6.1
 gitdb==4.0.5
 google-ads==4.0.0
 google-api-core==1.20.0
-google-api-python-client==1.9.2
+google-api-python-client==1.9.3
 google-auth-httplib2==0.0.3
 google-auth-oauthlib==0.4.1
-google-auth==1.16.1
+google-auth==1.17.2
 google-cloud-automl==0.10.0
 google-cloud-bigquery-datatransfer==1.0.0
 google-cloud-bigquery==1.25.0
@@ -161,7 +161,7 @@ google-cloud-redis==1.0.0
 google-cloud-secret-manager==1.0.0
 google-cloud-spanner==1.17.0
 google-cloud-speech==1.3.2
-google-cloud-storage==1.28.1
+google-cloud-storage==1.29.0
 google-cloud-tasks==1.5.0
 google-cloud-texttospeech==1.0.1
 google-cloud-translate==2.0.1
@@ -187,7 +187,7 @@ ijson==2.6.1
 imagesize==1.2.0
 immutables==0.14
 importlib-metadata==1.6.1
-importlib-resources==2.0.0
+importlib-resources==2.0.1
 inflection==0.5.0
 ipdb==0.13.2
 ipython-genutils==0.2.0
@@ -220,7 +220,7 @@ mccabe==0.6.1
 mock==4.0.2
 mongomock==3.19.0
 monotonic==1.5
-more-itertools==8.3.0
+more-itertools==8.4.0
 moto==1.3.14
 msgpack==1.0.0
 msrest==0.6.16
@@ -257,7 +257,7 @@ pep562==1.0
 pexpect==4.8.0
 pickleshare==0.7.5
 pinotdb==0.1.1
-pipdeptree==0.13.2
+pipdeptree==1.0.0
 pluggy==0.13.1
 pre-commit==2.5.1
 presto-python-client==0.7.0
@@ -268,7 +268,7 @@ psutil==5.7.0
 psycopg2-binary==2.8.5
 ptyprocess==0.6.0
 py4j==0.10.7
-py==1.8.1
+py==1.8.2
 pyOpenSSL==19.1.0
 pyarrow==0.17.1
 pyasn1-modules==0.2.8
@@ -291,11 +291,11 @@ pypd==1.1.0
 pyrsistent==0.16.0
 pysftp==0.2.9
 pyspark==2.4.6
-pytest-cov==2.9.0
+pytest-cov==2.10.0
 pytest-forked==1.1.3
-pytest-instafail==0.4.1.post0
+pytest-instafail==0.4.2
 pytest-rerunfailures==9.0
-pytest-timeout==1.3.4
+pytest-timeout==1.4.1
 pytest-xdist==1.32.0
 pytest==5.4.3
 python-daemon==2.1.2
@@ -320,11 +320,11 @@ requests-ntlm==1.1.0
 requests-oauthlib==1.1.0
 requests-toolbelt==0.9.1
 requests==2.23.0
-responses==0.10.14
-rsa==4.0
+responses==0.10.15
+rsa==4.6
 s3transfer==0.3.3
 sasl==0.2.1
-semver==2.10.1
+semver==2.10.2
 sendgrid==6.3.1
 sentinels==1.0.0
 sentry-sdk==0.14.4
@@ -379,7 +379,7 @@ uritemplate==3.0.1
 urllib3==1.25.9
 vertica-python==0.10.4
 vine==1.3.0
-virtualenv==20.0.21
+virtualenv==20.0.23
 watchtower==0.7.3
 wcwidth==0.2.4
 websocket-client==0.57.0
diff --git a/requirements/requirements-python3.7.txt b/requirements/requirements-python3.7.txt
index aa1b97b..24487b5 100644
--- a/requirements/requirements-python3.7.txt
+++ b/requirements/requirements-python3.7.txt
@@ -15,7 +15,7 @@ Flask==1.1.2
 GitPython==3.1.3
 HeapDict==1.0.1
 JPype1==0.7.5
-JayDeBeApi==1.2.2
+JayDeBeApi==1.2.3
 Jinja2==2.10.3
 Mako==1.1.3
 Markdown==2.6.11
@@ -29,7 +29,7 @@ Pygments==2.6.1
 SQLAlchemy-JSONField==0.9.0
 SQLAlchemy-Utils==0.36.6
 SQLAlchemy==1.3.17
-Sphinx==3.1.0
+Sphinx==3.1.1
 Unidecode==1.1.1
 WTForms==2.3.1
 Werkzeug==0.16.1
@@ -45,7 +45,7 @@ apispec==1.3.3
 appdirs==1.4.4
 argcomplete==1.11.1
 asn1crypto==1.3.0
-astroid==2.4.2
+astroid==2.3.3
 async-generator==1.10
 async-timeout==3.0.1
 atlasclient==1.0.0
@@ -72,9 +72,9 @@ beautifulsoup4==4.7.1
 billiard==3.6.3.0
 black==19.10b0
 blinker==1.4
-boto3==1.14.0
+boto3==1.14.3
 boto==2.49.0
-botocore==1.17.0
+botocore==1.17.3
 bowler==0.8.0
 cached-property==1.5.1
 cachetools==4.1.0
@@ -95,7 +95,7 @@ colorama==0.4.3
 colorlog==4.0.2
 connexion==2.7.0
 coverage==5.1
-croniter==0.3.32
+croniter==0.3.33
 cryptography==2.9.2
 curlify==2.2.1
 cx-Oracle==7.3.0
@@ -103,7 +103,7 @@ dask==2.18.1
 datadog==0.36.0
 decorator==4.4.2
 defusedxml==0.6.0
-dill==0.3.1.1
+dill==0.3.2
 distlib==0.3.0
 distributed==2.18.0
 dnspython==1.16.0
@@ -134,14 +134,14 @@ funcsigs==1.0.2
 future-fstrings==1.2.0
 future==0.18.2
 gcsfs==0.6.2
-gevent==20.6.0
+gevent==20.6.1
 gitdb==4.0.5
 google-ads==5.1.0
 google-api-core==1.20.0
-google-api-python-client==1.9.2
+google-api-python-client==1.9.3
 google-auth-httplib2==0.0.3
 google-auth-oauthlib==0.4.1
-google-auth==1.16.1
+google-auth==1.17.2
 google-cloud-automl==0.10.0
 google-cloud-bigquery-datatransfer==1.0.0
 google-cloud-bigquery==1.25.0
@@ -160,7 +160,7 @@ google-cloud-redis==1.0.0
 google-cloud-secret-manager==1.0.0
 google-cloud-spanner==1.17.0
 google-cloud-speech==1.3.2
-google-cloud-storage==1.28.1
+google-cloud-storage==1.29.0
 google-cloud-tasks==1.5.0
 google-cloud-texttospeech==1.0.1
 google-cloud-translate==2.0.1
@@ -216,7 +216,7 @@ mccabe==0.6.1
 mock==4.0.2
 mongomock==3.19.0
 monotonic==1.5
-more-itertools==8.3.0
+more-itertools==8.4.0
 moto==1.3.14
 msgpack==1.0.0
 msrest==0.6.16
@@ -252,7 +252,7 @@ pendulum==2.1.0
 pexpect==4.8.0
 pickleshare==0.7.5
 pinotdb==0.1.1
-pipdeptree==0.13.2
+pipdeptree==1.0.0
 pluggy==0.13.1
 pre-commit==2.5.1
 presto-python-client==0.7.0
@@ -263,7 +263,7 @@ psutil==5.7.0
 psycopg2-binary==2.8.5
 ptyprocess==0.6.0
 py4j==0.10.7
-py==1.8.1
+py==1.8.2
 pyOpenSSL==19.1.0
 pyarrow==0.17.1
 pyasn1-modules==0.2.8
@@ -286,11 +286,11 @@ pypd==1.1.0
 pyrsistent==0.16.0
 pysftp==0.2.9
 pyspark==2.4.6
-pytest-cov==2.9.0
+pytest-cov==2.10.0
 pytest-forked==1.1.3
-pytest-instafail==0.4.1.post0
+pytest-instafail==0.4.2
 pytest-rerunfailures==9.0
-pytest-timeout==1.3.4
+pytest-timeout==1.4.1
 pytest-xdist==1.32.0
 pytest==5.4.3
 python-daemon==2.1.2
@@ -315,11 +315,11 @@ requests-ntlm==1.1.0
 requests-oauthlib==1.1.0
 requests-toolbelt==0.9.1
 requests==2.23.0
-responses==0.10.14
-rsa==4.0
+responses==0.10.15
+rsa==4.6
 s3transfer==0.3.3
 sasl==0.2.1
-semver==2.10.1
+semver==2.10.2
 sendgrid==6.3.1
 sentinels==1.0.0
 sentry-sdk==0.14.4
@@ -373,7 +373,7 @@ uritemplate==3.0.1
 urllib3==1.25.9
 vertica-python==0.10.4
 vine==1.3.0
-virtualenv==20.0.21
+virtualenv==20.0.23
 watchtower==0.7.3
 wcwidth==0.2.4
 websocket-client==0.57.0
diff --git a/requirements/requirements-python3.8.txt b/requirements/requirements-python3.8.txt
index 976bf7f..4e7317f 100644
--- a/requirements/requirements-python3.8.txt
+++ b/requirements/requirements-python3.8.txt
@@ -15,7 +15,7 @@ Flask==1.1.2
 GitPython==3.1.3
 HeapDict==1.0.1
 JPype1==0.7.5
-JayDeBeApi==1.2.2
+JayDeBeApi==1.2.3
 Jinja2==2.10.3
 Mako==1.1.3
 Markdown==2.6.11
@@ -29,7 +29,7 @@ Pygments==2.6.1
 SQLAlchemy-JSONField==0.9.0
 SQLAlchemy-Utils==0.36.6
 SQLAlchemy==1.3.17
-Sphinx==3.1.0
+Sphinx==3.1.1
 Unidecode==1.1.1
 WTForms==2.3.1
 Werkzeug==0.16.1
@@ -45,7 +45,7 @@ apispec==1.3.3
 appdirs==1.4.4
 argcomplete==1.11.1
 asn1crypto==1.3.0
-astroid==2.4.2
+astroid==2.3.3
 async-generator==1.10
 async-timeout==3.0.1
 atlasclient==1.0.0
@@ -72,9 +72,9 @@ beautifulsoup4==4.7.1
 billiard==3.6.3.0
 black==19.10b0
 blinker==1.4
-boto3==1.14.0
+boto3==1.14.3
 boto==2.49.0
-botocore==1.17.0
+botocore==1.17.3
 bowler==0.8.0
 cached-property==1.5.1
 cachetools==4.1.0
@@ -95,7 +95,7 @@ colorama==0.4.3
 colorlog==4.0.2
 connexion==2.7.0
 coverage==5.1
-croniter==0.3.32
+croniter==0.3.33
 cryptography==2.9.2
 curlify==2.2.1
 cx-Oracle==7.3.0
@@ -103,7 +103,7 @@ dask==2.18.1
 datadog==0.36.0
 decorator==4.4.2
 defusedxml==0.6.0
-dill==0.3.1.1
+dill==0.3.2
 distlib==0.3.0
 distributed==2.18.0
 dnspython==1.16.0
@@ -134,14 +134,14 @@ funcsigs==1.0.2
 future-fstrings==1.2.0
 future==0.18.2
 gcsfs==0.6.2
-gevent==20.6.0
+gevent==20.6.1
 gitdb==4.0.5
 google-ads==5.1.0
 google-api-core==1.20.0
-google-api-python-client==1.9.2
+google-api-python-client==1.9.3
 google-auth-httplib2==0.0.3
 google-auth-oauthlib==0.4.1
-google-auth==1.16.1
+google-auth==1.17.2
 google-cloud-automl==0.10.0
 google-cloud-bigquery-datatransfer==1.0.0
 google-cloud-bigquery==1.25.0
@@ -160,7 +160,7 @@ google-cloud-redis==1.0.0
 google-cloud-secret-manager==1.0.0
 google-cloud-spanner==1.17.0
 google-cloud-speech==1.3.2
-google-cloud-storage==1.28.1
+google-cloud-storage==1.29.0
 google-cloud-tasks==1.5.0
 google-cloud-texttospeech==1.0.1
 google-cloud-translate==2.0.1
@@ -216,7 +216,7 @@ mccabe==0.6.1
 mock==4.0.2
 mongomock==3.19.0
 monotonic==1.5
-more-itertools==8.3.0
+more-itertools==8.4.0
 moto==1.3.14
 msgpack==1.0.0
 msrest==0.6.16
@@ -252,7 +252,7 @@ pendulum==2.1.0
 pexpect==4.8.0
 pickleshare==0.7.5
 pinotdb==0.1.1
-pipdeptree==0.13.2
+pipdeptree==1.0.0
 pluggy==0.13.1
 pre-commit==2.5.1
 presto-python-client==0.7.0
@@ -263,7 +263,7 @@ psutil==5.7.0
 psycopg2-binary==2.8.5
 ptyprocess==0.6.0
 py4j==0.10.7
-py==1.8.1
+py==1.8.2
 pyOpenSSL==19.1.0
 pyarrow==0.17.1
 pyasn1-modules==0.2.8
@@ -285,11 +285,11 @@ pypd==1.1.0
 pyrsistent==0.16.0
 pysftp==0.2.9
 pyspark==2.4.6
-pytest-cov==2.9.0
+pytest-cov==2.10.0
 pytest-forked==1.1.3
-pytest-instafail==0.4.1.post0
+pytest-instafail==0.4.2
 pytest-rerunfailures==9.0
-pytest-timeout==1.3.4
+pytest-timeout==1.4.1
 pytest-xdist==1.32.0
 pytest==5.4.3
 python-daemon==2.1.2
@@ -314,11 +314,11 @@ requests-ntlm==1.1.0
 requests-oauthlib==1.1.0
 requests-toolbelt==0.9.1
 requests==2.23.0
-responses==0.10.14
-rsa==4.1
+responses==0.10.15
+rsa==4.6
 s3transfer==0.3.3
 sasl==0.2.1
-semver==2.10.1
+semver==2.10.2
 sendgrid==6.3.1
 sentinels==1.0.0
 sentry-sdk==0.14.4
@@ -372,11 +372,11 @@ uritemplate==3.0.1
 urllib3==1.25.9
 vertica-python==0.10.4
 vine==1.3.0
-virtualenv==20.0.21
+virtualenv==20.0.23
 watchtower==0.7.3
 wcwidth==0.2.4
 websocket-client==0.57.0
-wrapt==1.12.1
+wrapt==1.11.2
 xmltodict==0.12.0
 yamllint==1.23.0
 yandexcloud==0.41.0
diff --git a/scripts/ci/in_container/_in_container_utils.sh b/scripts/ci/in_container/_in_container_utils.sh
index bb11fc5..35f6d62 100644
--- a/scripts/ci/in_container/_in_container_utils.sh
+++ b/scripts/ci/in_container/_in_container_utils.sh
@@ -97,7 +97,7 @@ function in_container_fix_ownership() {
         set +o pipefail
         echo "Fixing ownership of mounted files"
         sudo find "${AIRFLOW_SOURCES}" -print0 -user root \
-        | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference >/dev/null 2>&1
+        | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference || true >/dev/null 2>&1
         sudo find "/root/.aws" "/root/.azure" "/root/.config" "/root/.docker" -print0 -user root \
         | sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference || true >/dev/null 2>&1
         set -o pipefail
diff --git a/tests/providers/oracle/operators/__init__.py b/tests/providers/amazon/aws/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to tests/providers/amazon/aws/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/tests/providers/amazon/aws/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/tests/providers/amazon/aws/operators/test_dynamodb_to_s3.py b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
similarity index 92%
rename from tests/providers/amazon/aws/operators/test_dynamodb_to_s3.py
rename to tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
index d51231c..ee27aa0 100644
--- a/tests/providers/amazon/aws/operators/test_dynamodb_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_dynamodb_to_s3.py
@@ -20,7 +20,7 @@ import json
 import unittest
 from unittest.mock import MagicMock, patch
 
-from airflow.providers.amazon.aws.operators.dynamodb_to_s3 import DynamoDBToS3Operator
+from airflow.providers.amazon.aws.transfers.dynamodb_to_s3 import DynamoDBToS3Operator
 
 
 class DynamodbToS3Test(unittest.TestCase):
@@ -34,8 +34,8 @@ class DynamodbToS3Test(unittest.TestCase):
             for line in lines:
                 self.output_queue.append(json.loads(line))
 
-    @patch('airflow.providers.amazon.aws.operators.dynamodb_to_s3.S3Hook')
-    @patch('airflow.providers.amazon.aws.operators.dynamodb_to_s3.AwsDynamoDBHook')
+    @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.S3Hook')
+    @patch('airflow.providers.amazon.aws.transfers.dynamodb_to_s3.AwsDynamoDBHook')
     def test_dynamodb_to_s3_success(self, mock_aws_dynamodb_hook, mock_s3_hook):
         responses = [
             {
diff --git a/tests/providers/amazon/aws/operators/test_gcs_to_s3.py b/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py
similarity index 95%
rename from tests/providers/amazon/aws/operators/test_gcs_to_s3.py
rename to tests/providers/amazon/aws/transfers/test_gcs_to_s3.py
index 97b7668..eb5d058 100644
--- a/tests/providers/amazon/aws/operators/test_gcs_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py
@@ -21,7 +21,7 @@ import unittest
 import mock
 
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
-from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator
+from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
 
 try:
     from moto import mock_s3
@@ -41,7 +41,7 @@ class TestGCSToS3Operator(unittest.TestCase):
     # Test1: incremental behaviour (just some files missing)
     @mock_s3
     @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook')
-    @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook')
     def test_execute_incremental(self, mock_hook, mock_hook2):
         mock_hook.return_value.list.return_value = MOCK_FILES
         mock_hook.return_value.download.return_value = b"testing"
@@ -71,7 +71,7 @@ class TestGCSToS3Operator(unittest.TestCase):
     # Test2: All the files are already in origin and destination without replace
     @mock_s3
     @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook')
-    @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook')
     def test_execute_without_replace(self, mock_hook, mock_hook2):
         mock_hook.return_value.list.return_value = MOCK_FILES
         mock_hook.return_value.download.return_value = b"testing"
@@ -102,7 +102,7 @@ class TestGCSToS3Operator(unittest.TestCase):
     # Test3: There are no files in destination bucket
     @mock_s3
     @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook')
-    @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook')
     def test_execute(self, mock_hook, mock_hook2):
         mock_hook.return_value.list.return_value = MOCK_FILES
         mock_hook.return_value.download.return_value = b"testing"
@@ -131,7 +131,7 @@ class TestGCSToS3Operator(unittest.TestCase):
     # Test4: Destination and Origin are in sync but replace all files in destination
     @mock_s3
     @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook')
-    @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook')
     def test_execute_with_replace(self, mock_hook, mock_hook2):
         mock_hook.return_value.list.return_value = MOCK_FILES
         mock_hook.return_value.download.return_value = b"testing"
@@ -162,7 +162,7 @@ class TestGCSToS3Operator(unittest.TestCase):
     # Test5: Incremental sync with replace
     @mock_s3
     @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook')
-    @mock.patch('airflow.providers.amazon.aws.operators.gcs_to_s3.GCSHook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook')
     def test_execute_incremental_with_replace(self, mock_hook, mock_hook2):
         mock_hook.return_value.list.return_value = MOCK_FILES
         mock_hook.return_value.download.return_value = b"testing"
diff --git a/tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py
similarity index 81%
rename from tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer.py
rename to tests/providers/amazon/aws/transfers/test_google_api_to_s3.py
index 8db9c44..0283937 100644
--- a/tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer.py
+++ b/tests/providers/amazon/aws/transfers/test_google_api_to_s3.py
@@ -22,11 +22,11 @@ from unittest.mock import Mock, patch
 from airflow import models
 from airflow.configuration import load_test_config
 from airflow.models.xcom import MAX_XCOM_SIZE
-from airflow.providers.amazon.aws.operators.google_api_to_s3_transfer import GoogleApiToS3TransferOperator
+from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator
 from airflow.utils import db
 
 
-class TestGoogleApiToS3Transfer(unittest.TestCase):
+class TestGoogleApiToS3(unittest.TestCase):
 
     def setUp(self):
         load_test_config()
@@ -66,13 +66,13 @@ class TestGoogleApiToS3Transfer(unittest.TestCase):
             'dag': None
         }
 
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleDiscoveryApiHook.query')
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.S3Hook.load_string')
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.json.dumps')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleDiscoveryApiHook.query')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.S3Hook.load_string')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.json.dumps')
     def test_execute(self, mock_json_dumps, mock_s3_hook_load_string, mock_google_api_hook_query):
         context = {'task_instance': Mock()}
 
-        GoogleApiToS3TransferOperator(**self.kwargs).execute(context)
+        GoogleApiToS3Operator(**self.kwargs).execute(context)
 
         mock_google_api_hook_query.assert_called_once_with(
             endpoint=self.kwargs['google_api_endpoint_path'],
@@ -89,9 +89,9 @@ class TestGoogleApiToS3Transfer(unittest.TestCase):
         context['task_instance'].xcom_pull.assert_not_called()
         context['task_instance'].xcom_push.assert_not_called()
 
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleDiscoveryApiHook.query')
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.S3Hook.load_string')
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.json.dumps')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleDiscoveryApiHook.query')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.S3Hook.load_string')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.json.dumps')
     def test_execute_with_xcom(self, mock_json_dumps, mock_s3_hook_load_string, mock_google_api_hook_query):
         context = {'task_instance': Mock()}
         xcom_kwargs = {
@@ -101,7 +101,7 @@ class TestGoogleApiToS3Transfer(unittest.TestCase):
         }
         context['task_instance'].xcom_pull.return_value = {}
 
-        GoogleApiToS3TransferOperator(**self.kwargs, **xcom_kwargs).execute(context)
+        GoogleApiToS3Operator(**self.kwargs, **xcom_kwargs).execute(context)
 
         mock_google_api_hook_query.assert_called_once_with(
             endpoint=self.kwargs['google_api_endpoint_path'],
@@ -124,11 +124,11 @@ class TestGoogleApiToS3Transfer(unittest.TestCase):
             value=mock_google_api_hook_query.return_value
         )
 
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.GoogleDiscoveryApiHook.query')
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.S3Hook.load_string')
-    @patch('airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.json.dumps')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.GoogleDiscoveryApiHook.query')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.S3Hook.load_string')
+    @patch('airflow.providers.amazon.aws.transfers.google_api_to_s3.json.dumps')
     @patch(
-        'airflow.providers.amazon.aws.operators.google_api_to_s3_transfer.sys.getsizeof',
+        'airflow.providers.amazon.aws.transfers.google_api_to_s3.sys.getsizeof',
         return_value=MAX_XCOM_SIZE
     )
     def test_execute_with_xcom_exceeded_max_xcom_size(
@@ -147,7 +147,7 @@ class TestGoogleApiToS3Transfer(unittest.TestCase):
         context['task_instance'].xcom_pull.return_value = {}
 
         self.assertRaises(RuntimeError,
-                          GoogleApiToS3TransferOperator(**self.kwargs, **xcom_kwargs).execute, context)
+                          GoogleApiToS3Operator(**self.kwargs, **xcom_kwargs).execute, context)
 
         mock_google_api_hook_query.assert_called_once_with(
             endpoint=self.kwargs['google_api_endpoint_path'],
diff --git a/tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer_system.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py
similarity index 100%
rename from tests/providers/amazon/aws/operators/test_google_api_to_s3_transfer_system.py
rename to tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py
diff --git a/tests/providers/amazon/aws/operators/test_hive_to_dynamodb.py b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
similarity index 93%
rename from tests/providers/amazon/aws/operators/test_hive_to_dynamodb.py
rename to tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
index bb75724..b27a9c4 100644
--- a/tests/providers/amazon/aws/operators/test_hive_to_dynamodb.py
+++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
@@ -24,7 +24,7 @@ from unittest import mock
 
 import pandas as pd
 
-import airflow.providers.amazon.aws.operators.hive_to_dynamodb
+import airflow.providers.amazon.aws.transfers.hive_to_dynamodb
 from airflow.models.dag import DAG
 from airflow.providers.amazon.aws.hooks.aws_dynamodb import AwsDynamoDBHook
 
@@ -38,7 +38,7 @@ except ImportError:
     mock_dynamodb2 = None
 
 
-class TestHiveToDynamoDBTransferOperator(unittest.TestCase):
+class TestHiveToDynamoDBOperator(unittest.TestCase):
 
     def setUp(self):
         args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
@@ -84,7 +84,7 @@ class TestHiveToDynamoDBTransferOperator(unittest.TestCase):
             }
         )
 
-        operator = airflow.providers.amazon.aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator(
+        operator = airflow.providers.amazon.aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator(
             sql=self.sql,
             table_name="test_airflow",
             task_id='hive_to_dynamodb_check',
@@ -124,7 +124,7 @@ class TestHiveToDynamoDBTransferOperator(unittest.TestCase):
             }
         )
 
-        operator = airflow.providers.amazon.aws.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator(
+        operator = airflow.providers.amazon.aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator(
             sql=self.sql,
             table_name='test_airflow',
             task_id='hive_to_dynamodb_check',
diff --git a/tests/providers/amazon/aws/operators/test_imap_attachment_to_s3.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py
similarity index 92%
rename from tests/providers/amazon/aws/operators/test_imap_attachment_to_s3.py
rename to tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py
index 8cf35df..18e8788 100644
--- a/tests/providers/amazon/aws/operators/test_imap_attachment_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3.py
@@ -19,7 +19,7 @@
 import unittest
 from unittest.mock import patch
 
-from airflow.providers.amazon.aws.operators.imap_attachment_to_s3 import ImapAttachmentToS3Operator
+from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator
 
 
 class TestImapAttachmentToS3Operator(unittest.TestCase):
@@ -36,8 +36,8 @@ class TestImapAttachmentToS3Operator(unittest.TestCase):
             dag=None
         )
 
-    @patch('airflow.providers.amazon.aws.operators.imap_attachment_to_s3.S3Hook')
-    @patch('airflow.providers.amazon.aws.operators.imap_attachment_to_s3.ImapHook')
+    @patch('airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.S3Hook')
+    @patch('airflow.providers.amazon.aws.transfers.imap_attachment_to_s3.ImapHook')
     def test_execute(self, mock_imap_hook, mock_s3_hook):
         mock_imap_hook.return_value.__enter__ = mock_imap_hook
         mock_imap_hook.return_value.retrieve_mail_attachments.return_value = [('test_file', b'Hello World')]
diff --git a/tests/providers/amazon/aws/operators/test_imap_attachment_to_s3_system.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py
similarity index 100%
rename from tests/providers/amazon/aws/operators/test_imap_attachment_to_s3_system.py
rename to tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py
diff --git a/tests/providers/amazon/aws/operators/test_mongo_to_s3.py b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py
similarity index 95%
rename from tests/providers/amazon/aws/operators/test_mongo_to_s3.py
rename to tests/providers/amazon/aws/transfers/test_mongo_to_s3.py
index ea39888..81db174 100644
--- a/tests/providers/amazon/aws/operators/test_mongo_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_mongo_to_s3.py
@@ -21,7 +21,7 @@ import mock
 
 from airflow.models import TaskInstance
 from airflow.models.dag import DAG
-from airflow.providers.amazon.aws.operators.mongo_to_s3 import MongoToS3Operator
+from airflow.providers.amazon.aws.transfers.mongo_to_s3 import MongoToS3Operator
 from airflow.utils import timezone
 
 TASK_ID = 'test_mongo_to_s3_operator'
@@ -83,8 +83,8 @@ class TestMongoToS3Operator(unittest.TestCase):
             getattr(self.mock_operator, 'mongo_query')
         )
 
-    @mock.patch('airflow.providers.amazon.aws.operators.mongo_to_s3.MongoHook')
-    @mock.patch('airflow.providers.amazon.aws.operators.mongo_to_s3.S3Hook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.mongo_to_s3.MongoHook')
+    @mock.patch('airflow.providers.amazon.aws.transfers.mongo_to_s3.S3Hook')
     def test_execute(self, mock_s3_hook, mock_mongo_hook):
         operator = self.mock_operator
 
diff --git a/tests/providers/amazon/aws/operators/test_redshift_to_s3.py b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
similarity index 95%
rename from tests/providers/amazon/aws/operators/test_redshift_to_s3.py
rename to tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
index f1ee3e2..84783fa 100644
--- a/tests/providers/amazon/aws/operators/test_redshift_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py
@@ -23,7 +23,7 @@ from unittest import mock
 from boto3.session import Session
 from parameterized import parameterized
 
-from airflow.providers.amazon.aws.operators.redshift_to_s3 import RedshiftToS3TransferOperator
+from airflow.providers.amazon.aws.transfers.redshift_to_s3 import RedshiftToS3Operator
 from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
 
 
@@ -45,7 +45,7 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
         s3_key = "key"
         unload_options = ['HEADER', ]
 
-        RedshiftToS3TransferOperator(
+        RedshiftToS3Operator(
             schema=schema,
             table=table,
             s3_bucket=s3_bucket,
diff --git a/tests/providers/amazon/aws/operators/test_s3_to_redshift.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py
similarity index 94%
rename from tests/providers/amazon/aws/operators/test_s3_to_redshift.py
rename to tests/providers/amazon/aws/transfers/test_s3_to_redshift.py
index 992c133..32da61d 100644
--- a/tests/providers/amazon/aws/operators/test_s3_to_redshift.py
+++ b/tests/providers/amazon/aws/transfers/test_s3_to_redshift.py
@@ -22,7 +22,7 @@ from unittest import mock
 
 from boto3.session import Session
 
-from airflow.providers.amazon.aws.operators.s3_to_redshift import S3ToRedshiftTransferOperator
+from airflow.providers.amazon.aws.transfers.s3_to_redshift import S3ToRedshiftOperator
 from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
 
 
@@ -41,7 +41,7 @@ class TestS3ToRedshiftTransfer(unittest.TestCase):
         s3_key = "key"
         copy_options = ""
 
-        op = S3ToRedshiftTransferOperator(
+        op = S3ToRedshiftOperator(
             schema=schema,
             table=table,
             s3_bucket=s3_bucket,
diff --git a/tests/providers/amazon/aws/operators/test_s3_to_redshift_system.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py
similarity index 100%
rename from tests/providers/amazon/aws/operators/test_s3_to_redshift_system.py
rename to tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py
diff --git a/tests/providers/amazon/aws/operators/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
similarity index 98%
rename from tests/providers/amazon/aws/operators/test_s3_to_sftp.py
rename to tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
index 24eda72..de9c9e5 100644
--- a/tests/providers/amazon/aws/operators/test_s3_to_sftp.py
+++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
@@ -22,7 +22,7 @@ import boto3
 from moto import mock_s3
 
 from airflow.models import DAG, TaskInstance
-from airflow.providers.amazon.aws.operators.s3_to_sftp import S3ToSFTPOperator
+from airflow.providers.amazon.aws.transfers.s3_to_sftp import S3ToSFTPOperator
 from airflow.providers.ssh.operators.ssh import SSHOperator
 from airflow.utils import timezone
 from airflow.utils.timezone import datetime
diff --git a/tests/providers/amazon/aws/operators/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
similarity index 98%
rename from tests/providers/amazon/aws/operators/test_sftp_to_s3.py
rename to tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
index ebd5cde..3621104 100644
--- a/tests/providers/amazon/aws/operators/test_sftp_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
@@ -23,7 +23,7 @@ from moto import mock_s3
 
 from airflow.models import DAG, TaskInstance
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
-from airflow.providers.amazon.aws.operators.sftp_to_s3 import SFTPToS3Operator
+from airflow.providers.amazon.aws.transfers.sftp_to_s3 import SFTPToS3Operator
 from airflow.providers.ssh.hooks.ssh import SSHHook
 from airflow.providers.ssh.operators.ssh import SSHOperator
 from airflow.utils import timezone
diff --git a/tests/providers/oracle/operators/__init__.py b/tests/providers/apache/druid/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to tests/providers/apache/druid/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/tests/providers/apache/druid/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/tests/providers/apache/druid/operators/test_hive_to_druid.py b/tests/providers/apache/druid/transfers/test_hive_to_druid.py
similarity index 97%
rename from tests/providers/apache/druid/operators/test_hive_to_druid.py
rename to tests/providers/apache/druid/transfers/test_hive_to_druid.py
index 11433e7..8951fe6 100644
--- a/tests/providers/apache/druid/operators/test_hive_to_druid.py
+++ b/tests/providers/apache/druid/transfers/test_hive_to_druid.py
@@ -23,7 +23,7 @@ import requests
 import requests_mock
 
 from airflow.models.dag import DAG
-from airflow.providers.apache.druid.operators.hive_to_druid import HiveToDruidTransferOperator
+from airflow.providers.apache.druid.transfers.hive_to_druid import HiveToDruidOperator
 
 
 class TestDruidHook(unittest.TestCase):
@@ -74,7 +74,7 @@ class TestDruidHook(unittest.TestCase):
         session.mount('mock', adapter)
 
     def test_construct_ingest_query(self):
-        operator = HiveToDruidTransferOperator(
+        operator = HiveToDruidOperator(
             task_id='hive_to_druid',
             dag=self.dag,
             **self.hook_config
diff --git a/tests/providers/oracle/operators/__init__.py b/tests/providers/apache/hive/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to tests/providers/apache/hive/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/tests/providers/apache/hive/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/tests/providers/apache/hive/operators/test_hive_to_mysql.py b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
similarity index 83%
rename from tests/providers/apache/hive/operators/test_hive_to_mysql.py
rename to tests/providers/apache/hive/transfers/test_hive_to_mysql.py
index 25a79d2..1a437d8 100644
--- a/tests/providers/apache/hive/operators/test_hive_to_mysql.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
@@ -20,7 +20,7 @@ import re
 import unittest
 from unittest.mock import MagicMock, PropertyMock, patch
 
-from airflow.providers.apache.hive.operators.hive_to_mysql import HiveToMySqlTransferOperator
+from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
 from airflow.utils import timezone
 from airflow.utils.operator_helpers import context_to_airflow_vars
 from tests.providers.apache.hive import TestHiveEnvironment
@@ -41,10 +41,10 @@ class TestHiveToMySqlTransfer(TestHiveEnvironment):
         )
         super().setUp()
 
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook')
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook')
     def test_execute(self, mock_hive_hook, mock_mysql_hook):
-        HiveToMySqlTransferOperator(**self.kwargs).execute(context={})
+        HiveToMySqlOperator(**self.kwargs).execute(context={})
 
         mock_hive_hook.assert_called_once_with(hiveserver2_conn_id=self.kwargs['hiveserver2_conn_id'])
         mock_hive_hook.return_value.get_records.assert_called_once_with('sql', hive_conf={})
@@ -54,33 +54,33 @@ class TestHiveToMySqlTransfer(TestHiveEnvironment):
             rows=mock_hive_hook.return_value.get_records.return_value
         )
 
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook')
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook')
     def test_execute_mysql_preoperator(self, mock_hive_hook, mock_mysql_hook):
         self.kwargs.update(dict(mysql_preoperator='preoperator'))
 
-        HiveToMySqlTransferOperator(**self.kwargs).execute(context={})
+        HiveToMySqlOperator(**self.kwargs).execute(context={})
 
         mock_mysql_hook.return_value.run.assert_called_once_with(self.kwargs['mysql_preoperator'])
 
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook')
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook')
     def test_execute_with_mysql_postoperator(self, mock_hive_hook, mock_mysql_hook):
         self.kwargs.update(dict(mysql_postoperator='postoperator'))
 
-        HiveToMySqlTransferOperator(**self.kwargs).execute(context={})
+        HiveToMySqlOperator(**self.kwargs).execute(context={})
 
         mock_mysql_hook.return_value.run.assert_called_once_with(self.kwargs['mysql_postoperator'])
 
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook')
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.NamedTemporaryFile')
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.NamedTemporaryFile')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook')
     def test_execute_bulk_load(self, mock_hive_hook, mock_tmp_file, mock_mysql_hook):
         type(mock_tmp_file).name = PropertyMock(return_value='tmp_file')
         context = {}
         self.kwargs.update(dict(bulk_load=True))
 
-        HiveToMySqlTransferOperator(**self.kwargs).execute(context=context)
+        HiveToMySqlOperator(**self.kwargs).execute(context=context)
 
         mock_tmp_file.assert_called_once_with()
         mock_hive_hook.return_value.to_csv.assert_called_once_with(
@@ -97,7 +97,7 @@ class TestHiveToMySqlTransfer(TestHiveEnvironment):
         )
         mock_tmp_file.return_value.close.assert_called_once_with()
 
-    @patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook')
     def test_execute_with_hive_conf(self, mock_mysql_hook):
         context = {}
         mock_hive_hook = MockHiveServer2Hook()
@@ -105,9 +105,9 @@ class TestHiveToMySqlTransfer(TestHiveEnvironment):
 
         self.kwargs.update(dict(hive_conf={'mapreduce.job.queuename': 'fake_queue'}))
 
-        with patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook',
+        with patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook',
                    return_value=mock_hive_hook):
-            HiveToMySqlTransferOperator(**self.kwargs).execute(context=context)
+            HiveToMySqlOperator(**self.kwargs).execute(context=context)
 
             hive_conf = context_to_airflow_vars(context)
             hive_conf.update(self.kwargs['hive_conf'])
@@ -130,12 +130,12 @@ class TestHiveToMySqlTransfer(TestHiveEnvironment):
         mock_mysql_hook.run = MagicMock()
         mock_mysql_hook.insert_rows = MagicMock()
 
-        with patch('airflow.providers.apache.hive.operators.hive_to_mysql.HiveServer2Hook',
+        with patch('airflow.providers.apache.hive.transfers.hive_to_mysql.HiveServer2Hook',
                    return_value=mock_hive_hook):
-            with patch('airflow.providers.apache.hive.operators.hive_to_mysql.MySqlHook',
+            with patch('airflow.providers.apache.hive.transfers.hive_to_mysql.MySqlHook',
                        return_value=mock_mysql_hook):
 
-                op = HiveToMySqlTransferOperator(
+                op = HiveToMySqlOperator(
                     mysql_conn_id='airflow_db',
                     task_id='hive_to_mysql_check',
                     sql="""
diff --git a/tests/providers/apache/hive/operators/test_hive_to_samba.py b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
similarity index 87%
rename from tests/providers/apache/hive/operators/test_hive_to_samba.py
rename to tests/providers/apache/hive/transfers/test_hive_to_samba.py
index f14ae6d..dbd71f5 100644
--- a/tests/providers/apache/hive/operators/test_hive_to_samba.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
@@ -19,7 +19,7 @@ import os
 import unittest
 from unittest.mock import MagicMock, Mock, PropertyMock, patch
 
-from airflow.providers.apache.hive.operators.hive_to_samba import Hive2SambaOperator
+from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
 from airflow.utils.operator_helpers import context_to_airflow_vars
 from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
 from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockSambaHook
@@ -37,15 +37,15 @@ class TestHive2SambaOperator(TestHiveEnvironment):
         )
         super().setUp()
 
-    @patch('airflow.providers.apache.hive.operators.hive_to_samba.SambaHook')
-    @patch('airflow.providers.apache.hive.operators.hive_to_samba.HiveServer2Hook')
-    @patch('airflow.providers.apache.hive.operators.hive_to_samba.NamedTemporaryFile')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_samba.SambaHook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_samba.HiveServer2Hook')
+    @patch('airflow.providers.apache.hive.transfers.hive_to_samba.NamedTemporaryFile')
     def test_execute(self, mock_tmp_file, mock_hive_hook, mock_samba_hook):
         type(mock_tmp_file).name = PropertyMock(return_value='tmp_file')
         mock_tmp_file.return_value.__enter__ = Mock(return_value=mock_tmp_file)
         context = {}
 
-        Hive2SambaOperator(**self.kwargs).execute(context)
+        HiveToSambaOperator(**self.kwargs).execute(context)
 
         mock_hive_hook.assert_called_once_with(
             hiveserver2_conn_id=self.kwargs['hiveserver2_conn_id'])
@@ -63,7 +63,7 @@ class TestHive2SambaOperator(TestHiveEnvironment):
         "Skipped because AIRFLOW_RUNALL_TESTS is not set")
     @patch('tempfile.tempdir', '/tmp/')
     @patch('tempfile._RandomNameSequence.__next__')
-    @patch('airflow.providers.apache.hive.operators.hive_to_samba.HiveServer2Hook',
+    @patch('airflow.providers.apache.hive.transfers.hive_to_samba.HiveServer2Hook',
            side_effect=MockHiveServer2Hook)
     def test_hive2samba(self, mock_hive_server_hook, mock_temp_dir):
         mock_temp_dir.return_value = "tst"
@@ -71,10 +71,10 @@ class TestHive2SambaOperator(TestHiveEnvironment):
         samba_hook = MockSambaHook(self.kwargs['samba_conn_id'])
         samba_hook.upload = MagicMock()
 
-        with patch('airflow.providers.apache.hive.operators.hive_to_samba.SambaHook',
+        with patch('airflow.providers.apache.hive.transfers.hive_to_samba.SambaHook',
                    return_value=samba_hook):
             samba_hook.conn.upload = MagicMock()
-            op = Hive2SambaOperator(
+            op = HiveToSambaOperator(
                 task_id='hive2samba_check',
                 samba_conn_id='tableau_samba',
                 hql="SELECT * FROM airflow.static_babynames LIMIT 10000",
diff --git a/tests/providers/apache/hive/operators/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
similarity index 83%
rename from tests/providers/apache/hive/operators/test_mssql_to_hive.py
rename to tests/providers/apache/hive/transfers/test_mssql_to_hive.py
index fb7d2ce..e6795ea 100644
--- a/tests/providers/apache/hive/operators/test_mssql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
@@ -26,7 +26,7 @@ from airflow import PY38
 if PY38:
     MsSqlToHiveTransferOperator = None
 else:
-    from airflow.providers.apache.hive.operators.mssql_to_hive import MsSqlToHiveTransferOperator
+    from airflow.providers.apache.hive.transfers.mssql_to_hive import MsSqlToHiveOperator
 
 try:
     import pymssql
@@ -48,32 +48,32 @@ class TestMsSqlToHiveTransfer(unittest.TestCase):
 
     # pylint: disable=c-extension-no-member
     def test_type_map_binary(self):
-        mapped_type = MsSqlToHiveTransferOperator(
+        mapped_type = MsSqlToHiveOperator(
             **self.kwargs).type_map(pymssql.BINARY.value)  # pylint: disable=c-extension-no-member
 
         self.assertEqual(mapped_type, 'INT')
 
     def test_type_map_decimal(self):
-        mapped_type = MsSqlToHiveTransferOperator(
+        mapped_type = MsSqlToHiveOperator(
             **self.kwargs).type_map(pymssql.DECIMAL.value)  # pylint: disable=c-extension-no-member
 
         self.assertEqual(mapped_type, 'FLOAT')
 
     def test_type_map_number(self):
-        mapped_type = MsSqlToHiveTransferOperator(
+        mapped_type = MsSqlToHiveOperator(
             **self.kwargs).type_map(pymssql.NUMBER.value)  # pylint: disable=c-extension-no-member
 
         self.assertEqual(mapped_type, 'INT')
 
     def test_type_map_string(self):
-        mapped_type = MsSqlToHiveTransferOperator(**self.kwargs).type_map(None)
+        mapped_type = MsSqlToHiveOperator(**self.kwargs).type_map(None)
 
         self.assertEqual(mapped_type, 'STRING')
 
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.csv')
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.NamedTemporaryFile')
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlHook')
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.HiveCliHook')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.csv')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.NamedTemporaryFile')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlHook')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.HiveCliHook')
     def test_execute(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv):
         type(mock_tmp_file).name = PropertyMock(return_value='tmp_file')
         mock_tmp_file.return_value.__enter__ = Mock(return_value=mock_tmp_file)
@@ -81,7 +81,7 @@ class TestMsSqlToHiveTransfer(unittest.TestCase):
         mock_mssql_hook_cursor = mock_mssql_hook_get_conn.return_value.cursor.return_value.__enter__
         mock_mssql_hook_cursor.return_value.description = [('te', 'st')]
 
-        mssql_to_hive_transfer = MsSqlToHiveTransferOperator(**self.kwargs)
+        mssql_to_hive_transfer = MsSqlToHiveOperator(**self.kwargs)
         mssql_to_hive_transfer.execute(context={})
 
         mock_mssql_hook_cursor.return_value.execute.assert_called_once_with(mssql_to_hive_transfer.sql)
@@ -101,10 +101,10 @@ class TestMsSqlToHiveTransfer(unittest.TestCase):
             recreate=mssql_to_hive_transfer.recreate,
             tblproperties=mssql_to_hive_transfer.tblproperties)
 
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.csv')
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.NamedTemporaryFile')
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.MsSqlHook')
-    @patch('airflow.providers.apache.hive.operators.mssql_to_hive.HiveCliHook')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.csv')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.NamedTemporaryFile')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlHook')
+    @patch('airflow.providers.apache.hive.transfers.mssql_to_hive.HiveCliHook')
     def test_execute_empty_description_field(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv):
         type(mock_tmp_file).name = PropertyMock(return_value='tmp_file')
         mock_tmp_file.return_value.__enter__ = Mock(return_value=mock_tmp_file)
@@ -112,7 +112,7 @@ class TestMsSqlToHiveTransfer(unittest.TestCase):
         mock_mssql_hook_cursor = mock_mssql_hook_get_conn.return_value.cursor.return_value.__enter__
         mock_mssql_hook_cursor.return_value.description = [('', '')]
 
-        mssql_to_hive_transfer = MsSqlToHiveTransferOperator(**self.kwargs)
+        mssql_to_hive_transfer = MsSqlToHiveOperator(**self.kwargs)
         mssql_to_hive_transfer.execute(context={})
 
         field_dict = OrderedDict()
diff --git a/tests/providers/apache/hive/operators/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
similarity index 98%
rename from tests/providers/apache/hive/operators/test_mysql_to_hive.py
rename to tests/providers/apache/hive/transfers/test_mysql_to_hive.py
index 680bc25..e5d6ec6 100644
--- a/tests/providers/apache/hive/operators/test_mysql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
@@ -23,7 +23,7 @@ from unittest import mock
 import pytest
 
 from airflow.models.dag import DAG
-from airflow.providers.apache.hive.operators.mysql_to_hive import MySqlToHiveTransferOperator
+from airflow.providers.apache.hive.transfers.mysql_to_hive import MySqlToHiveOperator
 from airflow.providers.mysql.hooks.mysql import MySqlHook
 from airflow.utils import timezone
 from tests.test_utils.mock_hooks import MockHiveServer2Hook
@@ -134,7 +134,7 @@ class TestTransfer(unittest.TestCase):
 
         with mock.patch.dict('os.environ', self.env_vars):
             sql = "SELECT * FROM baby_names LIMIT 1000;"
-            op = MySqlToHiveTransferOperator(
+            op = MySqlToHiveOperator(
                 task_id='test_m2h',
                 hive_cli_conn_id='hive_cli_default',
                 sql=sql,
@@ -172,7 +172,7 @@ class TestTransfer(unittest.TestCase):
 
         with mock.patch.dict('os.environ', self.env_vars):
             sql = "SELECT * FROM baby_names LIMIT 1000;"
-            op = MySqlToHiveTransferOperator(
+            op = MySqlToHiveOperator(
                 task_id='test_m2h',
                 hive_cli_conn_id='hive_cli_default',
                 sql=sql,
@@ -212,7 +212,7 @@ class TestTransfer(unittest.TestCase):
 
         with mock.patch.dict('os.environ', self.env_vars):
             sql = "SELECT * FROM baby_names LIMIT 1000;"
-            op = MySqlToHiveTransferOperator(
+            op = MySqlToHiveOperator(
                 task_id='test_m2h',
                 hive_cli_conn_id='hive_cli_default',
                 sql=sql,
@@ -261,7 +261,7 @@ class TestTransfer(unittest.TestCase):
                     )
                 """.format(mysql_table))
 
-            op = MySqlToHiveTransferOperator(
+            op = MySqlToHiveOperator(
                 task_id='test_m2h',
                 hive_cli_conn_id='hive_cli_default',
                 sql="SELECT * FROM {}".format(mysql_table),
@@ -318,7 +318,7 @@ class TestTransfer(unittest.TestCase):
 
             with mock.patch.dict('os.environ', self.env_vars):
                 import unicodecsv as csv
-                op = MySqlToHiveTransferOperator(
+                op = MySqlToHiveOperator(
                     task_id='test_m2h',
                     hive_cli_conn_id='hive_cli_default',
                     sql="SELECT * FROM {}".format(mysql_table),
@@ -410,7 +410,7 @@ class TestTransfer(unittest.TestCase):
                 """.format(mysql_table, *minmax))
 
             with mock.patch.dict('os.environ', self.env_vars):
-                op = MySqlToHiveTransferOperator(
+                op = MySqlToHiveOperator(
                     task_id='test_m2h',
                     hive_cli_conn_id='hive_cli_default',
                     sql="SELECT * FROM {}".format(mysql_table),
diff --git a/tests/providers/apache/hive/operators/test_s3_to_hive.py b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
similarity index 93%
rename from tests/providers/apache/hive/operators/test_s3_to_hive.py
rename to tests/providers/apache/hive/transfers/test_s3_to_hive.py
index d475b25..67aec2a 100644
--- a/tests/providers/apache/hive/operators/test_s3_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
@@ -30,7 +30,7 @@ from tempfile import NamedTemporaryFile, mkdtemp
 import mock
 
 from airflow.exceptions import AirflowException
-from airflow.providers.apache.hive.operators.s3_to_hive import S3ToHiveTransferOperator
+from airflow.providers.apache.hive.transfers.s3_to_hive import S3ToHiveOperator
 
 try:
     import boto3
@@ -157,18 +157,18 @@ class TestS3ToHiveTransfer(unittest.TestCase):
         self.kwargs['check_headers'] = True
         self.kwargs['headers'] = False
         self.assertRaisesRegex(AirflowException, "To check_headers.*",
-                               S3ToHiveTransferOperator, **self.kwargs)
+                               S3ToHiveOperator, **self.kwargs)
 
     def test__get_top_row_as_list(self):
         self.kwargs['delimiter'] = '\t'
         fn_txt = self._get_fn('.txt', True)
-        header_list = S3ToHiveTransferOperator(**self.kwargs). \
+        header_list = S3ToHiveOperator(**self.kwargs). \
             _get_top_row_as_list(fn_txt)
         self.assertEqual(header_list, ['Sno', 'Some,Text'],
                          msg="Top row from file doesnt matched expected value")
 
         self.kwargs['delimiter'] = ','
-        header_list = S3ToHiveTransferOperator(**self.kwargs). \
+        header_list = S3ToHiveOperator(**self.kwargs). \
             _get_top_row_as_list(fn_txt)
         self.assertEqual(header_list, ['Sno\tSome', 'Text'],
                          msg="Top row from file doesnt matched expected value")
@@ -176,20 +176,20 @@ class TestS3ToHiveTransfer(unittest.TestCase):
     def test__match_headers(self):
         self.kwargs['field_dict'] = OrderedDict([('Sno', 'BIGINT'),
                                                  ('Some,Text', 'STRING')])
-        self.assertTrue(S3ToHiveTransferOperator(**self.kwargs).
+        self.assertTrue(S3ToHiveOperator(**self.kwargs).
                         _match_headers(['Sno', 'Some,Text']),
                         msg="Header row doesnt match expected value")
         # Testing with different column order
-        self.assertFalse(S3ToHiveTransferOperator(**self.kwargs).
+        self.assertFalse(S3ToHiveOperator(**self.kwargs).
                          _match_headers(['Some,Text', 'Sno']),
                          msg="Header row doesnt match expected value")
         # Testing with extra column in header
-        self.assertFalse(S3ToHiveTransferOperator(**self.kwargs).
+        self.assertFalse(S3ToHiveOperator(**self.kwargs).
                          _match_headers(['Sno', 'Some,Text', 'ExtraColumn']),
                          msg="Header row doesnt match expected value")
 
     def test__delete_top_row_and_compress(self):
-        s32hive = S3ToHiveTransferOperator(**self.kwargs)
+        s32hive = S3ToHiveOperator(**self.kwargs)
         # Testing gz file type
         fn_txt = self._get_fn('.txt', True)
         gz_txt_nh = s32hive._delete_top_row_and_compress(fn_txt,
@@ -208,7 +208,7 @@ class TestS3ToHiveTransfer(unittest.TestCase):
 
     @unittest.skipIf(mock is None, 'mock package not present')
     @unittest.skipIf(mock_s3 is None, 'moto package not present')
-    @mock.patch('airflow.providers.apache.hive.operators.s3_to_hive.HiveCliHook')
+    @mock.patch('airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook')
     @mock_s3
     def test_execute(self, mock_hiveclihook):
         conn = boto3.client('s3')
@@ -234,12 +234,12 @@ class TestS3ToHiveTransfer(unittest.TestCase):
                     self._check_file_equality(args[0], op_fn, ext),
                     msg='{0} output file not as expected'.format(ext))
             # Execute S3ToHiveTransfer
-            s32hive = S3ToHiveTransferOperator(**self.kwargs)
+            s32hive = S3ToHiveOperator(**self.kwargs)
             s32hive.execute(None)
 
     @unittest.skipIf(mock is None, 'mock package not present')
     @unittest.skipIf(mock_s3 is None, 'moto package not present')
-    @mock.patch('airflow.providers.apache.hive.operators.s3_to_hive.HiveCliHook')
+    @mock.patch('airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook')
     @mock_s3
     def test_execute_with_select_expression(self, mock_hiveclihook):
         conn = boto3.client('s3')
@@ -278,7 +278,7 @@ class TestS3ToHiveTransfer(unittest.TestCase):
             with mock.patch('airflow.providers.amazon.aws.hooks.s3.S3Hook.select_key',
                             return_value="") as mock_select_key:
                 # Execute S3ToHiveTransfer
-                s32hive = S3ToHiveTransferOperator(**self.kwargs)
+                s32hive = S3ToHiveOperator(**self.kwargs)
                 s32hive.execute(None)
 
                 mock_select_key.assert_called_once_with(
diff --git a/tests/providers/apache/hive/transfers/test_vertica_to_hive.py b/tests/providers/apache/hive/transfers/test_vertica_to_hive.py
new file mode 100644
index 0000000..a454030
--- /dev/null
+++ b/tests/providers/apache/hive/transfers/test_vertica_to_hive.py
@@ -0,0 +1,68 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import unittest
+from unittest import mock
+
+from airflow.models.dag import DAG
+from airflow.providers.apache.hive.transfers.vertica_to_hive import VerticaToHiveOperator
+
+
+def mock_get_conn():
+    commit_mock = mock.MagicMock(
+    )
+    cursor_mock = mock.MagicMock(
+        execute=[],
+        fetchall=[['1', '2', '3']],
+        description=['a', 'b', 'c'],
+        iterate=[['1', '2', '3']],
+    )
+    conn_mock = mock.MagicMock(
+        commit=commit_mock,
+        cursor=cursor_mock,
+    )
+    return conn_mock
+
+
+class TestVerticaToHiveTransfer(unittest.TestCase):
+    def setUp(self):
+        args = {
+            'owner': 'airflow',
+            'start_date': datetime.datetime(2017, 1, 1)
+        }
+        self.dag = DAG('test_dag_id', default_args=args)
+
+    @mock.patch(
+        'airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaHook.get_conn',
+        side_effect=mock_get_conn)
+    @mock.patch(
+        'airflow.providers.apache.hive.transfers.vertica_to_hive.HiveCliHook.load_file')
+    def test_select_insert_transfer(self, *args):
+        """
+        Test check selection from vertica into memory and
+        after that inserting into mysql
+        """
+        task = VerticaToHiveOperator(
+            task_id='test_task_id',
+            sql='select a, b, c',
+            hive_table='test_table',
+            vertica_conn_id='test_vertica_conn_id',
+            hive_cli_conn_id='hive_cli_default',
+            dag=self.dag)
+        task.execute(None)
diff --git a/tests/providers/google/ads/operators/test_ads.py b/tests/providers/google/ads/operators/test_ads.py
index 5154e66..43f07ef 100644
--- a/tests/providers/google/ads/operators/test_ads.py
+++ b/tests/providers/google/ads/operators/test_ads.py
@@ -16,7 +16,7 @@
 # under the License.
 from unittest import mock
 
-from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator, GoogleAdsToGcsOperator
+from airflow.providers.google.ads.operators.ads import GoogleAdsListAccountsOperator
 
 CLIENT_IDS = ["1111111111", "2222222222"]
 BUCKET = "gs://test-google-ads-bucket"
@@ -38,33 +38,6 @@ gcp_conn_id = "gcp_conn_id"
 google_ads_conn_id = "google_ads_conn_id"
 
 
-class TestGoogleAdsToGcsOperator:
-    @mock.patch("airflow.providers.google.ads.operators.ads.GoogleAdsHook")
-    @mock.patch("airflow.providers.google.ads.operators.ads.GCSHook")
-    def test_execute(self, mock_gcs_hook, mock_ads_hook):
-        op = GoogleAdsToGcsOperator(
-            gcp_conn_id=gcp_conn_id,
-            google_ads_conn_id=google_ads_conn_id,
-            client_ids=CLIENT_IDS,
-            query=QUERY,
-            attributes=FIELDS_TO_EXTRACT,
-            obj=GCS_OBJ_PATH,
-            bucket=BUCKET,
-            task_id="run_operator",
-        )
-        op.execute({})
-        mock_ads_hook.assert_called_once_with(
-            gcp_conn_id=gcp_conn_id, google_ads_conn_id=google_ads_conn_id
-        )
-        mock_ads_hook.return_value.search.assert_called_once_with(
-            client_ids=CLIENT_IDS, query=QUERY, page_size=10000
-        )
-        mock_gcs_hook.assert_called_once_with(gcp_conn_id=gcp_conn_id)
-        mock_gcs_hook.return_value.upload.assert_called_once_with(
-            bucket_name=BUCKET, object_name=GCS_OBJ_PATH, filename=mock.ANY, gzip=False
-        )
-
-
 class TestGoogleAdsListAccountsOperator:
     @mock.patch("airflow.providers.google.ads.operators.ads.GoogleAdsHook")
     @mock.patch("airflow.providers.google.ads.operators.ads.GCSHook")
diff --git a/tests/providers/oracle/operators/__init__.py b/tests/providers/google/ads/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to tests/providers/google/ads/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/tests/providers/google/ads/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/tests/providers/google/ads/transfers/test_ads_to_gcs.py b/tests/providers/google/ads/transfers/test_ads_to_gcs.py
new file mode 100644
index 0000000..947f414
--- /dev/null
+++ b/tests/providers/google/ads/transfers/test_ads_to_gcs.py
@@ -0,0 +1,50 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from unittest import mock
+
+from airflow.providers.google.ads.transfers.ads_to_gcs import GoogleAdsToGcsOperator
+from tests.providers.google.ads.operators.test_ads import (
+    BUCKET, CLIENT_IDS, FIELDS_TO_EXTRACT, GCS_OBJ_PATH, QUERY, gcp_conn_id, google_ads_conn_id,
+)
+
+
+class TestGoogleAdsToGcsOperator:
+    @mock.patch("airflow.providers.google.ads.transfers.ads_to_gcs.GoogleAdsHook")
+    @mock.patch("airflow.providers.google.ads.transfers.ads_to_gcs.GCSHook")
+    def test_execute(self, mock_gcs_hook, mock_ads_hook):
+        op = GoogleAdsToGcsOperator(
+            gcp_conn_id=gcp_conn_id,
+            google_ads_conn_id=google_ads_conn_id,
+            client_ids=CLIENT_IDS,
+            query=QUERY,
+            attributes=FIELDS_TO_EXTRACT,
+            obj=GCS_OBJ_PATH,
+            bucket=BUCKET,
+            task_id="run_operator",
+        )
+        op.execute({})
+        mock_ads_hook.assert_called_once_with(
+            gcp_conn_id=gcp_conn_id, google_ads_conn_id=google_ads_conn_id
+        )
+        mock_ads_hook.return_value.search.assert_called_once_with(
+            client_ids=CLIENT_IDS, query=QUERY, page_size=10000
+        )
+        mock_gcs_hook.assert_called_once_with(gcp_conn_id=gcp_conn_id)
+        mock_gcs_hook.return_value.upload.assert_called_once_with(
+            bucket_name=BUCKET, object_name=GCS_OBJ_PATH, filename=mock.ANY, gzip=False
+        )
diff --git a/tests/providers/google/cloud/operators/test_gcs.py b/tests/providers/google/cloud/operators/test_gcs.py
index e73de5f..067157e 100644
--- a/tests/providers/google/cloud/operators/test_gcs.py
+++ b/tests/providers/google/cloud/operators/test_gcs.py
@@ -23,7 +23,7 @@ import mock
 from airflow.providers.google.cloud.operators.gcs import (
     GCSBucketCreateAclEntryOperator, GCSCreateBucketOperator, GCSDeleteBucketOperator,
     GCSDeleteObjectsOperator, GCSFileTransformOperator, GCSListObjectsOperator,
-    GCSObjectCreateAclEntryOperator, GCSToLocalOperator,
+    GCSObjectCreateAclEntryOperator, GCSSynchronizeBucketsOperator,
 )
 
 TASK_ID = "test-gcs-operator"
@@ -145,22 +145,6 @@ class TestGoogleCloudStorageDeleteOperator(unittest.TestCase):
         )
 
 
-class TestGoogleCloudStorageDownloadOperator(unittest.TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
-    def test_execute(self, mock_hook):
-        operator = GCSToLocalOperator(
-            task_id=TASK_ID,
-            bucket=TEST_BUCKET,
-            object_name=TEST_OBJECT,
-            filename=LOCAL_FILE_PATH,
-        )
-
-        operator.execute(None)
-        mock_hook.return_value.download.assert_called_once_with(
-            bucket_name=TEST_BUCKET, object_name=TEST_OBJECT, filename=LOCAL_FILE_PATH
-        )
-
-
 class TestGoogleCloudStorageListOperator(unittest.TestCase):
     @mock.patch("airflow.providers.google.cloud.operators.gcs.GCSHook")
     def test_execute(self, mock_hook):
@@ -241,3 +225,35 @@ class TestGCSDeleteBucketOperator(unittest.TestCase):
 
         operator.execute(None)
         mock_hook.return_value.delete_bucket.assert_called_once_with(bucket_name=TEST_BUCKET, force=True)
+
+
+class TestGoogleCloudStorageSync(unittest.TestCase):
+
+    @mock.patch('airflow.providers.google.cloud.operators.gcs.GCSHook')
+    def test_execute(self, mock_hook):
+        task = GCSSynchronizeBucketsOperator(
+            task_id="task-id",
+            source_bucket="SOURCE_BUCKET",
+            destination_bucket="DESTINATION_BUCKET",
+            source_object="SOURCE_OBJECT",
+            destination_object="DESTINATION_OBJECT",
+            recursive=True,
+            delete_extra_files=True,
+            allow_overwrite=True,
+            gcp_conn_id="GCP_CONN_ID",
+            delegate_to="DELEGATE_TO",
+        )
+        task.execute({})
+        mock_hook.assert_called_once_with(
+            google_cloud_storage_conn_id='GCP_CONN_ID',
+            delegate_to='DELEGATE_TO'
+        )
+        mock_hook.return_value.sync.assert_called_once_with(
+            source_bucket='SOURCE_BUCKET',
+            source_object='SOURCE_OBJECT',
+            destination_bucket='DESTINATION_BUCKET',
+            destination_object='DESTINATION_OBJECT',
+            delete_extra_files=True,
+            recursive=True,
+            allow_overwrite=True,
+        )
diff --git a/tests/providers/oracle/operators/__init__.py b/tests/providers/google/cloud/transfers/__init__.py
similarity index 99%
copy from tests/providers/oracle/operators/__init__.py
copy to tests/providers/google/cloud/transfers/__init__.py
index 217e5db..13a8339 100644
--- a/tests/providers/oracle/operators/__init__.py
+++ b/tests/providers/google/cloud/transfers/__init__.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
diff --git a/tests/providers/google/cloud/operators/test_adls_to_gcs.py b/tests/providers/google/cloud/transfers/test_adls_to_gcs.py
similarity index 94%
rename from tests/providers/google/cloud/operators/test_adls_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_adls_to_gcs.py
index ae16af2..72589a1 100644
--- a/tests/providers/google/cloud/operators/test_adls_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_adls_to_gcs.py
@@ -20,7 +20,7 @@ import unittest
 
 import mock
 
-from airflow.providers.google.cloud.operators.adls_to_gcs import ADLSToGCSOperator
+from airflow.providers.google.cloud.transfers.adls_to_gcs import ADLSToGCSOperator
 
 TASK_ID = 'test-adls-gcs-operator'
 ADLS_PATH_1 = '*'
@@ -51,10 +51,10 @@ class TestAdlsToGoogleCloudStorageOperator(unittest.TestCase):
         self.assertEqual(operator.gcp_conn_id, GCS_CONN_ID)
         self.assertEqual(operator.azure_data_lake_conn_id, AZURE_CONN_ID)
 
-    @mock.patch('airflow.providers.google.cloud.operators.adls_to_gcs.AzureDataLakeHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.adls_to_gcs.AzureDataLakeHook')
     @mock.patch('airflow.providers.microsoft.azure.operators.adls_list.AzureDataLakeHook')
     @mock.patch(
-        'airflow.providers.google.cloud.operators.adls_to_gcs.GCSHook')
+        'airflow.providers.google.cloud.transfers.adls_to_gcs.GCSHook')
     def test_execute(self, gcs_mock_hook, adls_one_mock_hook, adls_two_mock_hook):
         """Test the execute function when the run is successful."""
 
@@ -100,10 +100,10 @@ class TestAdlsToGoogleCloudStorageOperator(unittest.TestCase):
         # we expect MOCK_FILES to be uploaded
         self.assertEqual(sorted(MOCK_FILES), sorted(uploaded_files))
 
-    @mock.patch('airflow.providers.google.cloud.operators.adls_to_gcs.AzureDataLakeHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.adls_to_gcs.AzureDataLakeHook')
     @mock.patch('airflow.providers.microsoft.azure.operators.adls_list.AzureDataLakeHook')
     @mock.patch(
-        'airflow.providers.google.cloud.operators.adls_to_gcs.GCSHook')
+        'airflow.providers.google.cloud.transfers.adls_to_gcs.GCSHook')
     def test_execute_with_gzip(self, gcs_mock_hook, adls_one_mock_hook, adls_two_mock_hook):
         """Test the execute function when the run is successful."""
 
diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_bigquery.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
similarity index 95%
rename from tests/providers/google/cloud/operators/test_bigquery_to_bigquery.py
rename to tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
index 740d1ad..049befd 100644
--- a/tests/providers/google/cloud/operators/test_bigquery_to_bigquery.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
@@ -20,7 +20,7 @@ import unittest
 
 import mock
 
-from airflow.providers.google.cloud.operators.bigquery_to_bigquery import BigQueryToBigQueryOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator
 
 TASK_ID = 'test-bq-create-table-operator'
 TEST_DATASET = 'test-dataset'
@@ -28,7 +28,7 @@ TEST_TABLE_ID = 'test-table-id'
 
 
 class TestBigQueryToBigQueryOperator(unittest.TestCase):
-    @mock.patch('airflow.providers.google.cloud.operators.bigquery_to_bigquery.BigQueryHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_bigquery.BigQueryHook')
     def test_execute(self, mock_hook):
         source_project_dataset_tables = '{}.{}'.format(
             TEST_DATASET, TEST_TABLE_ID)
diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_bigquery_system.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_bigquery_to_bigquery_system.py
rename to tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py
diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_gcs.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py
similarity index 94%
rename from tests/providers/google/cloud/operators/test_bigquery_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py
index d58228f..e95d623 100644
--- a/tests/providers/google/cloud/operators/test_bigquery_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py
@@ -20,7 +20,7 @@ import unittest
 
 import mock
 
-from airflow.providers.google.cloud.operators.bigquery_to_gcs import BigQueryToGCSOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_gcs import BigQueryToGCSOperator
 
 TASK_ID = 'test-bq-create-table-operator'
 TEST_DATASET = 'test-dataset'
@@ -28,7 +28,7 @@ TEST_TABLE_ID = 'test-table-id'
 
 
 class TestBigQueryToCloudStorageOperator(unittest.TestCase):
-    @mock.patch('airflow.providers.google.cloud.operators.bigquery_to_gcs.BigQueryHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryHook')
     def test_execute(self, mock_hook):
         source_project_dataset_table = '{}.{}'.format(
             TEST_DATASET, TEST_TABLE_ID)
diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_bigquery_to_gcs_system.py
rename to tests/providers/google/cloud/transfers/test_bigquery_to_gcs_system.py
diff --git a/tests/providers/google/cloud/operators/test_bigquery_to_mysql.py b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py
similarity index 93%
rename from tests/providers/google/cloud/operators/test_bigquery_to_mysql.py
rename to tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py
index 68b1745..f21f249 100644
--- a/tests/providers/google/cloud/operators/test_bigquery_to_mysql.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py
@@ -19,7 +19,7 @@ import unittest
 
 import mock
 
-from airflow.providers.google.cloud.operators.bigquery_to_mysql import BigQueryToMySqlOperator
+from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator
 
 TASK_ID = 'test-bq-create-table-operator'
 TEST_DATASET = 'test-dataset'
@@ -28,7 +28,7 @@ TEST_DAG_ID = 'test-bigquery-operators'
 
 
 class TestBigQueryToMySqlOperator(unittest.TestCase):
-    @mock.patch('airflow.providers.google.cloud.operators.bigquery_to_mysql.BigQueryHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_mysql.BigQueryHook')
     def test_execute_good_request_to_bq(self, mock_hook):
         destination_table = 'table'
         operator = BigQueryToMySqlOperator(
diff --git a/tests/providers/google/cloud/operators/test_cassandra_to_gcs.py b/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py
similarity index 93%
rename from tests/providers/google/cloud/operators/test_cassandra_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py
index 390a9fe..7eca09d 100644
--- a/tests/providers/google/cloud/operators/test_cassandra_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py
@@ -21,17 +21,17 @@ from unittest import mock
 
 from mock import call
 
-from airflow.providers.google.cloud.operators.cassandra_to_gcs import CassandraToGCSOperator
+from airflow.providers.google.cloud.transfers.cassandra_to_gcs import CassandraToGCSOperator
 
 TMP_FILE_NAME = "temp-file"
 
 
 class TestCassandraToGCS(unittest.TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.cassandra_to_gcs.NamedTemporaryFile")
+    @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile")
     @mock.patch(
-        "airflow.providers.google.cloud.operators.cassandra_to_gcs.GCSHook.upload"
+        "airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload"
     )
-    @mock.patch("airflow.providers.google.cloud.operators.cassandra_to_gcs.CassandraHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook")
     def test_execute(self, mock_hook, mock_upload, mock_tempfile):
         test_bucket = "test-bucket"
         schema = "schema.json"
diff --git a/tests/providers/google/cloud/operators/test_facebook_ads_to_gcs.py b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py
similarity index 93%
rename from tests/providers/google/cloud/operators/test_facebook_ads_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py
index a2b8b6b..fdf18f9 100644
--- a/tests/providers/google/cloud/operators/test_facebook_ads_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs.py
@@ -16,7 +16,7 @@
 # under the License.
 from unittest import mock
 
-from airflow.providers.google.cloud.operators.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator
+from airflow.providers.google.cloud.transfers.facebook_ads_to_gcs import FacebookAdsReportToGcsOperator
 
 GCS_BUCKET = "airflow_bucket_fb"
 GCS_OBJ_PATH = "Temp/this_is_my_report_json.json"
@@ -47,8 +47,8 @@ FACEBOOK_RETURN_VALUE = [
 
 class TestFacebookAdsReportToGcsOperator:
 
-    @mock.patch("airflow.providers.google.cloud.operators.facebook_ads_to_gcs.FacebookAdsReportingHook")
-    @mock.patch("airflow.providers.google.cloud.operators.facebook_ads_to_gcs.GCSHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportingHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.facebook_ads_to_gcs.GCSHook")
     def test_execute(self, mock_gcs_hook, mock_ads_hook):
         mock_ads_hook.return_value.bulk_facebook_report.return_value = FACEBOOK_RETURN_VALUE
         op = FacebookAdsReportToGcsOperator(facebook_conn_id=FACEBOOK_ADS_CONN_ID,
diff --git a/tests/providers/google/cloud/operators/test_facebook_ads_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_facebook_ads_to_gcs_system.py
rename to tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py
diff --git a/tests/providers/google/cloud/operators/test_gcs_to_bigquery.py b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py
similarity index 93%
rename from tests/providers/google/cloud/operators/test_gcs_to_bigquery.py
rename to tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py
index dd9a0ba..b327cec 100644
--- a/tests/providers/google/cloud/operators/test_gcs_to_bigquery.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py
@@ -20,7 +20,7 @@ import unittest
 
 import mock
 
-from airflow.providers.google.cloud.operators.gcs_to_bigquery import GCSToBigQueryOperator
+from airflow.providers.google.cloud.transfers.gcs_to_bigquery import GCSToBigQueryOperator
 
 TASK_ID = 'test-gcs-to-bq-operator'
 TEST_EXPLICIT_DEST = 'test-project.dataset.table'
@@ -31,7 +31,7 @@ TEST_SOURCE_OBJECTS = ['test/objects/*']
 
 class TestGoogleCloudStorageToBigQueryOperator(unittest.TestCase):
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_bigquery.BigQueryHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
     def test_execute_explicit_project_legacy(self, bq_hook):
         operator = GCSToBigQueryOperator(task_id=TASK_ID,
                                          bucket=TEST_BUCKET,
@@ -50,7 +50,7 @@ class TestGoogleCloudStorageToBigQueryOperator(unittest.TestCase):
             .execute \
             .assert_called_once_with("SELECT MAX(id) FROM [test-project.dataset.table]")
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_bigquery.BigQueryHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_bigquery.BigQueryHook')
     def test_execute_explicit_project(self, bq_hook):
         operator = GCSToBigQueryOperator(task_id=TASK_ID,
                                          bucket=TEST_BUCKET,
diff --git a/tests/providers/google/cloud/operators/test_gcs_to_bigquery_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_gcs_to_bigquery_system.py
rename to tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py
diff --git a/tests/providers/google/cloud/operators/test_gcs_to_gcs.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
similarity index 87%
rename from tests/providers/google/cloud/operators/test_gcs_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
index 73815ee..49c067b 100644
--- a/tests/providers/google/cloud/operators/test_gcs_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
@@ -22,9 +22,7 @@ from datetime import datetime
 import mock
 
 from airflow.exceptions import AirflowException
-from airflow.providers.google.cloud.operators.gcs_to_gcs import (
-    WILDCARD, GCSSynchronizeBucketsOperator, GCSToGCSOperator,
-)
+from airflow.providers.google.cloud.transfers.gcs_to_gcs import WILDCARD, GCSToGCSOperator
 
 TASK_ID = 'test-gcs-to-gcs-operator'
 TEST_BUCKET = 'test-bucket'
@@ -64,7 +62,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         Also tests the destination_object as prefix when the wildcard is used.
         """
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_no_prefix(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -76,7 +74,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             TEST_BUCKET, prefix="", delimiter="test_object"
         )
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_no_suffix(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -88,7 +86,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             TEST_BUCKET, prefix="test_object", delimiter=""
         )
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_prefix_and_suffix(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -101,7 +99,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         )
 
     # copy with wildcard
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_wildcard_with_destination_object(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -119,7 +117,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_wildcard_with_destination_object_retained_prefix(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -139,7 +137,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_retained)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_wildcard_without_destination_object(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -156,7 +154,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_wildcard_empty_destination_object(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -174,7 +172,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_empty)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_last_modified_time(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -192,7 +190,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_wc_with_last_modified_time_with_all_true_cond(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         mock_hook.return_value.is_updated_after.side_effect = [True, True, True]
@@ -211,7 +209,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_wc_with_last_modified_time_with_one_true_cond(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         mock_hook.return_value.is_updated_after.side_effect = [True, False, False]
@@ -226,7 +224,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             TEST_BUCKET, 'test_object/file1.txt',
             DESTINATION_BUCKET, 'test_object/file1.txt')
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_wc_with_no_last_modified_time(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -244,7 +242,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_no_prefix_with_last_modified_time_with_true_cond(self, mock_hook):
         mock_hook.return_value.is_updated_after.return_value = True
         operator = GCSToGCSOperator(
@@ -258,7 +256,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         mock_hook.return_value.rewrite.assert_called_once_with(
             TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt')
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_no_prefix_with_maximum_modified_time_with_true_cond(self, mock_hook):
         mock_hook.return_value.is_updated_before.return_value = True
         operator = GCSToGCSOperator(
@@ -272,7 +270,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         mock_hook.return_value.rewrite.assert_called_once_with(
             TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt')
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_exe_last_modified_time_and_maximum_modified_time_with_true_cond(self, mock_hook):
         mock_hook.return_value.is_updated_between.return_value = True
         operator = GCSToGCSOperator(
@@ -287,7 +285,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         mock_hook.return_value.rewrite.assert_called_once_with(
             TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt')
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_no_prefix_with_no_last_modified_time(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -300,7 +298,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         mock_hook.return_value.rewrite.assert_called_once_with(
             TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt')
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_no_prefix_with_last_modified_time_with_false_cond(self, mock_hook):
         mock_hook.return_value.is_updated_after.return_value = False
         operator = GCSToGCSOperator(
@@ -313,7 +311,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         operator.execute(None)
         mock_hook.return_value.rewrite.assert_not_called()
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_is_older_than_with_true_cond(self, mock_hook):
         mock_hook.return_value.is_older_than.return_value = True
         operator = GCSToGCSOperator(
@@ -329,7 +327,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         mock_hook.return_value.rewrite.assert_called_once_with(
             TEST_BUCKET, 'test_object.txt', DESTINATION_BUCKET, 'test_object.txt')
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_more_than_1_wildcard(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -346,7 +344,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         with self.assertRaisesRegex(AirflowException, error_msg):
             operator.execute(None)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_execute_with_empty_destination_bucket(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_FILES_LIST
         operator = GCSToGCSOperator(
@@ -364,7 +362,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             self.assertEqual(operator.destination_bucket, operator.source_bucket)
 
     # Tests the use of delimiter and source object as list
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_empty_source_objects(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -375,7 +373,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             TEST_BUCKET, prefix='', delimiter=None
         )
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_raises_exception_with_two_empty_list_inside_source_objects(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_OBJECTS_LIST
         operator = GCSToGCSOperator(
@@ -386,7 +384,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
                                     "You can't have two empty strings inside source_object"):
             operator.execute(None)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_single_item_in_source_objects(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -396,7 +394,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             TEST_BUCKET, prefix=SOURCE_OBJECTS_SINGLE_FILE[0], delimiter=None
         )
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_multiple_items_in_source_objects(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -410,7 +408,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
             any_order=True
         )
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_a_delimiter(self, mock_hook):
         operator = GCSToGCSOperator(
             task_id=TASK_ID, source_bucket=TEST_BUCKET,
@@ -421,7 +419,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         )
 
     # COPY
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_delimiter_and_destination_object(self, mock_hook):
         mock_hook.return_value.list.return_value = ['test_object/file3.json']
         operator = GCSToGCSOperator(
@@ -438,7 +436,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_different_delimiter_and_destination_object(self, mock_hook):
         mock_hook.return_value.list.return_value = ['test_object/file1.txt', 'test_object/file2.txt']
         operator = GCSToGCSOperator(
@@ -458,7 +456,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
         ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_executes_with_no_destination_bucket_and_no_destination_object(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_OBJECTS_LIST
         operator = GCSToGCSOperator(
@@ -474,7 +472,7 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
                       TEST_BUCKET, 'test_object/file3.json'), ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls)
 
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.gcs_to_gcs.GCSHook')
     def test_wc_with_last_modified_time_with_all_true_cond_no_file(self, mock_hook):
         mock_hook.return_value.list.return_value = SOURCE_OBJECTS_LIST
         mock_hook.return_value.is_updated_after.side_effect = [True, True, True]
@@ -499,35 +497,3 @@ class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
                 DESTINATION_BUCKET, 'test_object/file3.json'
             ), ]
         mock_hook.return_value.rewrite.assert_has_calls(mock_calls_none)
-
-
-class TestGoogleCloudStorageSync(unittest.TestCase):
-
-    @mock.patch('airflow.providers.google.cloud.operators.gcs_to_gcs.GCSHook')
-    def test_execute(self, mock_hook):
-        task = GCSSynchronizeBucketsOperator(
-            task_id="task-id",
-            source_bucket="SOURCE_BUCKET",
-            destination_bucket="DESTINATION_BUCKET",
-            source_object="SOURCE_OBJECT",
-            destination_object="DESTINATION_OBJECT",
-            recursive=True,
-            delete_extra_files=True,
-            allow_overwrite=True,
-            gcp_conn_id="GCP_CONN_ID",
-            delegate_to="DELEGATE_TO",
-        )
-        task.execute({})
-        mock_hook.assert_called_once_with(
-            google_cloud_storage_conn_id='GCP_CONN_ID',
-            delegate_to='DELEGATE_TO'
-        )
-        mock_hook.return_value.sync.assert_called_once_with(
-            source_bucket='SOURCE_BUCKET',
-            source_object='SOURCE_OBJECT',
-            destination_bucket='DESTINATION_BUCKET',
-            destination_object='DESTINATION_OBJECT',
-            delete_extra_files=True,
-            recursive=True,
-            allow_overwrite=True,
-        )
diff --git a/tests/providers/google/cloud/operators/test_gcs_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_gcs_to_gcs_system.py
rename to tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py
diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_local.py b/tests/providers/google/cloud/transfers/test_gcs_to_local.py
new file mode 100644
index 0000000..4763cae
--- /dev/null
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_local.py
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import unittest
+
+import mock
+
+from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator
+
+TASK_ID = "test-gcs-operator"
+TEST_BUCKET = "test-bucket"
+TEST_PROJECT = "test-project"
+DELIMITER = ".csv"
+PREFIX = "TEST"
+MOCK_FILES = ["TEST1.csv", "TEST2.csv", "TEST3.csv"]
+TEST_OBJECT = "dir1/test-object"
+LOCAL_FILE_PATH = "/home/airflow/gcp/test-object"
+
+
+class TestGoogleCloudStorageDownloadOperator(unittest.TestCase):
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_local.GCSHook")
+    def test_execute(self, mock_hook):
+        operator = GCSToLocalFilesystemOperator(
+            task_id=TASK_ID,
+            bucket=TEST_BUCKET,
+            object_name=TEST_OBJECT,
+            filename=LOCAL_FILE_PATH,
+        )
+
+        operator.execute(None)
+        mock_hook.return_value.download.assert_called_once_with(
+            bucket_name=TEST_BUCKET, object_name=TEST_OBJECT, filename=LOCAL_FILE_PATH
+        )
diff --git a/tests/providers/google/cloud/operators/test_gcs_to_sftp.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py
similarity index 90%
rename from tests/providers/google/cloud/operators/test_gcs_to_sftp.py
rename to tests/providers/google/cloud/transfers/test_gcs_to_sftp.py
index 9b5b42e..5b15b41 100644
--- a/tests/providers/google/cloud/operators/test_gcs_to_sftp.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py
@@ -23,7 +23,7 @@ import unittest
 import mock
 
 from airflow.exceptions import AirflowException
-from airflow.providers.google.cloud.operators.gcs_to_sftp import GCSToSFTPOperator
+from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator
 
 TASK_ID = "test-gcs-to-sftp-operator"
 GCP_CONN_ID = "GCP_CONN_ID"
@@ -46,8 +46,8 @@ DESTINATION_SFTP = "destination_path"
 
 # pylint: disable=unused-argument
 class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook")
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_copy_single_file(self, sftp_hook, gcs_hook):
         task = GCSToSFTPOperator(
             task_id=TASK_ID,
@@ -76,8 +76,8 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
 
         gcs_hook.return_value.delete.assert_not_called()
 
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook")
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_move_single_file(self, sftp_hook, gcs_hook):
         task = GCSToSFTPOperator(
             task_id=TASK_ID,
@@ -108,8 +108,8 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
             TEST_BUCKET, SOURCE_OBJECT_NO_WILDCARD
         )
 
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook")
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_copy_with_wildcard(self, sftp_hook, gcs_hook):
         gcs_hook.return_value.list.return_value = SOURCE_FILES_LIST[:2]
         operator = GCSToSFTPOperator(
@@ -135,8 +135,8 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
         self.assertEqual(call_two[1]["bucket_name"], TEST_BUCKET)
         self.assertEqual(call_two[1]["object_name"], "test_object/file2.txt")
 
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook")
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_move_with_wildcard(self, sftp_hook, gcs_hook):
         gcs_hook.return_value.list.return_value = SOURCE_FILES_LIST[:2]
         operator = GCSToSFTPOperator(
@@ -159,8 +159,8 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
         self.assertEqual(call_one[0], (TEST_BUCKET, "test_object/file1.txt"))
         self.assertEqual(call_two[0], (TEST_BUCKET, "test_object/file2.txt"))
 
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.GCSHook")
-    @mock.patch("airflow.providers.google.cloud.operators.gcs_to_sftp.SFTPHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
+    @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_more_than_one_wildcard_exception(self, sftp_hook, gcs_hook):
         gcs_hook.return_value.list.return_value = SOURCE_FILES_LIST[:2]
         operator = GCSToSFTPOperator(
diff --git a/tests/providers/google/cloud/operators/test_gcs_to_sftp_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_gcs_to_sftp_system.py
rename to tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py
diff --git a/tests/providers/google/cloud/operators/test_local_to_gcs.py b/tests/providers/google/cloud/transfers/test_local_to_gcs.py
similarity index 95%
rename from tests/providers/google/cloud/operators/test_local_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_local_to_gcs.py
index d8d4f97..95bee74 100644
--- a/tests/providers/google/cloud/operators/test_local_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_local_to_gcs.py
@@ -23,7 +23,7 @@ import unittest
 import mock
 
 from airflow.models.dag import DAG
-from airflow.providers.google.cloud.operators.local_to_gcs import LocalFilesystemToGCSOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
 
 
 class TestFileToGcsOperator(unittest.TestCase):
@@ -55,7 +55,7 @@ class TestFileToGcsOperator(unittest.TestCase):
         self.assertEqual(operator.mime_type, self._config['mime_type'])
         self.assertEqual(operator.gzip, self._config['gzip'])
 
-    @mock.patch('airflow.providers.google.cloud.operators.local_to_gcs.GCSHook',
+    @mock.patch('airflow.providers.google.cloud.transfers.local_to_gcs.GCSHook',
                 autospec=True)
     def test_execute(self, mock_hook):
         mock_instance = mock_hook.return_value
diff --git a/tests/providers/google/cloud/operators/test_local_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_local_to_gcs_system.py
similarity index 100%
rename from tests/providers/google/cloud/operators/test_local_to_gcs_system.py
rename to tests/providers/google/cloud/transfers/test_local_to_gcs_system.py
diff --git a/tests/providers/google/cloud/operators/test_mssql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py
similarity index 92%
rename from tests/providers/google/cloud/operators/test_mssql_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_mssql_to_gcs.py
index 08a3e3f..e23836f 100644
--- a/tests/providers/google/cloud/operators/test_mssql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py
@@ -23,7 +23,7 @@ import mock
 from airflow import PY38
 
 if not PY38:
-    from airflow.providers.google.cloud.operators.mssql_to_gcs import MSSQLToGCSOperator
+    from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator
 
 TASK_ID = 'test-mssql-to-gcs'
 MSSQL_CONN_ID = 'mssql_conn_test'
@@ -65,8 +65,8 @@ class TestMsSqlToGoogleCloudStorageOperator(unittest.TestCase):
         self.assertEqual(op.bucket, BUCKET)
         self.assertEqual(op.filename, JSON_FILENAME)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mssql_to_gcs.MsSqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mssql_to_gcs.MsSqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_exec_success_json(self, gcs_hook_mock_class, mssql_hook_mock_class):
         """Test successful run of execute function for JSON"""
         op = MSSQLToGCSOperator(
@@ -97,8 +97,8 @@ class TestMsSqlToGoogleCloudStorageOperator(unittest.TestCase):
         mssql_hook_mock_class.assert_called_once_with(mssql_conn_id=MSSQL_CONN_ID)
         mssql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mssql_to_gcs.MsSqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mssql_to_gcs.MsSqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_file_splitting(self, gcs_hook_mock_class, mssql_hook_mock_class):
         """Test that ndjson is split by approx_max_file_size_bytes param."""
         mssql_hook_mock = mssql_hook_mock_class.return_value
@@ -128,8 +128,8 @@ class TestMsSqlToGoogleCloudStorageOperator(unittest.TestCase):
             approx_max_file_size_bytes=len(expected_upload[JSON_FILENAME.format(0)]))
         op.execute(None)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mssql_to_gcs.MsSqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mssql_to_gcs.MsSqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_schema_file(self, gcs_hook_mock_class, mssql_hook_mock_class):
         """Test writing schema files."""
         mssql_hook_mock = mssql_hook_mock_class.return_value
diff --git a/tests/providers/google/cloud/operators/test_mysql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
similarity index 91%
rename from tests/providers/google/cloud/operators/test_mysql_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
index 03a75d3..abd8a18 100644
--- a/tests/providers/google/cloud/operators/test_mysql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
@@ -24,7 +24,7 @@ import mock
 from _mysql_exceptions import ProgrammingError
 from parameterized import parameterized
 
-from airflow.providers.google.cloud.operators.mysql_to_gcs import MySQLToGCSOperator
+from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator
 
 TASK_ID = 'test-mysql-to-gcs'
 MYSQL_CONN_ID = 'mysql_conn_test'
@@ -110,8 +110,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
             op.convert_type(value, schema_type),
             expected)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_exec_success_json(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test successful run of execute function for JSON"""
         op = MySQLToGCSOperator(
@@ -142,8 +142,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
         mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_exec_success_csv(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test successful run of execute function for CSV"""
         op = MySQLToGCSOperator(
@@ -175,8 +175,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
         mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_exec_success_csv_ensure_utc(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test successful run of execute function for CSV"""
         op = MySQLToGCSOperator(
@@ -209,8 +209,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
         mysql_hook_mock.get_conn().cursor().execute.assert_has_calls([mock.call(TZ_QUERY), mock.call(SQL)])
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_exec_success_csv_with_delimiter(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test successful run of execute function for CSV with a field delimiter"""
         op = MySQLToGCSOperator(
@@ -243,8 +243,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         mysql_hook_mock_class.assert_called_once_with(mysql_conn_id=MYSQL_CONN_ID)
         mysql_hook_mock.get_conn().cursor().execute.assert_called_once_with(SQL)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_file_splitting(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test that ndjson is split by approx_max_file_size_bytes param."""
         mysql_hook_mock = mysql_hook_mock_class.return_value
@@ -274,8 +274,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
             approx_max_file_size_bytes=len(expected_upload[JSON_FILENAME.format(0)]))
         op.execute(None)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_schema_file(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test writing schema files."""
         mysql_hook_mock = mysql_hook_mock_class.return_value
@@ -303,8 +303,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         # once for the file and once for the schema
         self.assertEqual(2, gcs_hook_mock.upload.call_count)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_schema_file_with_custom_schema(self, gcs_hook_mock_class, mysql_hook_mock_class):
         """Test writing schema files with customized schema"""
         mysql_hook_mock = mysql_hook_mock_class.return_value
@@ -333,8 +333,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         # once for the file and once for the schema
         self.assertEqual(2, gcs_hook_mock.upload.call_count)
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_query_with_error(self, mock_gcs_hook, mock_mysql_hook):
         mock_mysql_hook.return_value.get_conn.\
             return_value.cursor.return_value.execute.side_effect = ProgrammingError
@@ -347,8 +347,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         with self.assertRaises(ProgrammingError):
             op.query()
 
-    @mock.patch('airflow.providers.google.cloud.operators.mysql_to_gcs.MySqlHook')
-    @mock.patch('airflow.providers.google.cloud.operators.sql_to_gcs.GCSHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.mysql_to_gcs.MySqlHook')
+    @mock.patch('airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook')
     def test_execute_with_query_error(self, mock_gcs_hook, mock_mysql_hook):
         mock_mysql_hook.return_value.get_conn.\
             return_value.cursor.return_value.execute.side_effect = ProgrammingError
diff --git a/tests/providers/google/cloud/operators/test_postgres_to_gcs.py b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py
similarity index 95%
rename from tests/providers/google/cloud/operators/test_postgres_to_gcs.py
rename to tests/providers/google/cloud/transfers/test_postgres_to_gcs.py
index 9587676..9fc0da5 100644
--- a/tests/providers/google/cloud/operators/test_postgres_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py
@@ -21,7 +21,7 @@ import unittest
 import pytest
 from mock import patch
 
-from airflow.providers.google.cloud.operators.postgres_to_gcs import PostgresToGCSOperator
+from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator
 from airflow.providers.postgres.hooks.postgres import PostgresHook
 
 TABLES = {'postgres_to_gcs_operator', 'postgres_to_gcs_operator_empty'}
@@ -84,7 +84,7 @@ class TestPostgresToGoogleCloudStorageOperator(unittest.TestCase):
         self.assertEqual(op.bucket, BUCKET)
         self.assertEqual(op.filename, FILENAME)
... 1244 lines suppressed ...