You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2020/12/28 18:10:38 UTC

[airflow] branch master updated: Refactored setup.py to better reflect changes in providers (#13314)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new 0d21457  Refactored setup.py to better reflect changes in providers (#13314)
0d21457 is described below

commit 0d214575a144356a8a83a462d6d9fb68bf4999c7
Author: Jarek Potiuk <ja...@polidea.com>
AuthorDate: Mon Dec 28 19:10:27 2020 +0100

    Refactored setup.py to better reflect changes in providers (#13314)
    
    This is a complete refactor of the setup.py providers/dependencies.
    
    It much better reflects the current setup where we have most of
    the extras 1-1 reflecting providers but also some extras that do
    not have their own providers.
    
    The pre-commits that were verifying setup versus documentation
    can now be vastly simplified (no more need to parse the
    comments so we can import setup.py variables directly rather
    than parse it via regexps. Also we can better categorize the
    extras - separate out (and verify) whether we correctly
    described deprecated extras and to mark extras that install
    additional providers as such.
    
    Fixes: #13309
---
 .pre-commit-config.yaml                            |   2 +
 CONTRIBUTING.rst                                   |  18 +-
 INSTALL                                            |  18 +-
 airflow/providers/mysql/ADDITIONAL_INFO.md         |  24 +
 dev/provider_packages/prepare_provider_packages.py |  13 +-
 docs/apache-airflow/extra-packages-ref.rst         | 409 ++++++++--------
 .../pre_commit_check_extras_have_providers.py      |  69 ++-
 .../ci/pre_commit/pre_commit_check_order_setup.py  | 129 +++--
 .../pre_commit_check_setup_extra_packages_ref.py   | 273 ++++++++---
 setup.cfg                                          |   4 -
 setup.py                                           | 536 +++++++++------------
 tests/build_provider_packages_dependencies.py      |   4 +-
 12 files changed, 792 insertions(+), 707 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 6d4aea5..b8e10d5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -239,6 +239,7 @@ repos:
         files: ^setup.py$|^setup.cfg$
         pass_filenames: false
         entry: ./scripts/ci/pre_commit/pre_commit_check_order_setup.py
+        additional_dependencies: ['rich']
       - id: setup-extra-packages
         name: Checks setup extra packages
         description: Checks if all the libraries in setup.py are listed in extra-packages-ref.rst file
@@ -482,6 +483,7 @@ repos:
         files: "setup.py|^airflow/providers/.*.py"
         pass_filenames: false
         require_serial: true
+        additional_dependencies: ['rich']
       - id: markdownlint
         name: Run markdownlint
         description: "Checks the style of Markdown files."
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index b5ea216..10ab03f 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -618,17 +618,17 @@ This is the full list of those extras:
 
   .. START EXTRAS HERE
 
-all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs,
+all, all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs,
 apache.hive, apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop,
 apache.webhdfs, async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes,
-crypto, dask, databricks, datadog, dingding, discord, docker, druid, elasticsearch, exasol,
-facebook, ftp, gcp, gcp_api, github_enterprise, google, google_auth, grpc, hashicorp, hdfs, hive,
-http, imap, jdbc, jenkins, jira, kerberos, kubernetes, ldap, microsoft.azure, microsoft.mssql,
-microsoft.winrm, mongo, mssql, mysql, odbc, openfaas, opsgenie, oracle, pagerduty, papermill,
-password, pinot, plexus, postgres, presto, qds, qubole, rabbitmq, redis, s3, salesforce, samba,
-segment, sendgrid, sentry, sftp, singularity, slack, snowflake, spark, sqlite, ssh, statsd, tableau,
-telegram, vertica, virtualenv, webhdfs, winrm, yandex, zendesk, all, devel, devel_hadoop, doc,
-devel_all, devel_ci
+crypto, dask, databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc,
+docker, druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google,
+google_auth, grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes,
+ldap, microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, odbc, openfaas,
+opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole,
+rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack,
+snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm,
+yandex, zendesk
 
   .. END EXTRAS HERE
 
diff --git a/INSTALL b/INSTALL
index 128cf97..d87173b 100644
--- a/INSTALL
+++ b/INSTALL
@@ -71,17 +71,17 @@ pip install -e . \
 # You can also install Airflow with extras specified. The list of available extras:
 # START EXTRAS HERE
 
-all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs,
+all, all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs,
 apache.hive, apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop,
 apache.webhdfs, async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes,
-crypto, dask, databricks, datadog, dingding, discord, docker, druid, elasticsearch, exasol,
-facebook, ftp, gcp, gcp_api, github_enterprise, google, google_auth, grpc, hashicorp, hdfs, hive,
-http, imap, jdbc, jenkins, jira, kerberos, kubernetes, ldap, microsoft.azure, microsoft.mssql,
-microsoft.winrm, mongo, mssql, mysql, odbc, openfaas, opsgenie, oracle, pagerduty, papermill,
-password, pinot, plexus, postgres, presto, qds, qubole, rabbitmq, redis, s3, salesforce, samba,
-segment, sendgrid, sentry, sftp, singularity, slack, snowflake, spark, sqlite, ssh, statsd, tableau,
-telegram, vertica, virtualenv, webhdfs, winrm, yandex, zendesk, all, devel, devel_hadoop, doc,
-devel_all, devel_ci
+crypto, dask, databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc,
+docker, druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google,
+google_auth, grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes,
+ldap, microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, odbc, openfaas,
+opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole,
+rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack,
+snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm,
+yandex, zendesk
 
 # END EXTRAS HERE
 
diff --git a/airflow/providers/mysql/ADDITIONAL_INFO.md b/airflow/providers/mysql/ADDITIONAL_INFO.md
new file mode 100644
index 0000000..1c96198
--- /dev/null
+++ b/airflow/providers/mysql/ADDITIONAL_INFO.md
@@ -0,0 +1,24 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied.  See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+
+# Mysql client requirements
+
+The version of MySQL server has to be 5.6.4+. The exact version upper bound depends
+on the version of ``mysqlclient`` package. For example, ``mysqlclient`` 1.3.12 can only be
+used with MySQL server 5.6.4 through 5.7.
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index 297f090..2459a58 100644
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -57,7 +57,7 @@ sys.path.insert(0, SOURCE_DIR_PATH)
 # running the script
 import tests.deprecated_classes  # noqa # isort:skip
 from dev.import_all_classes import import_all_classes  # noqa # isort:skip
-from setup import PROVIDERS_REQUIREMENTS  # noqa # isort:skip
+from setup import PROVIDERS_REQUIREMENTS, PREINSTALLED_PROVIDERS  # noqa # isort:skip
 
 # Note - we do not test protocols as they are not really part of the official API of
 # Apache Airflow
@@ -287,8 +287,9 @@ def get_install_requirements(provider_package_id: str, backport_packages: bool)
             else 'apache-airflow>=1.10.12, <2.0.0'
         )
     else:
-        airflow_dependency = 'apache-airflow>=2.0.0a0'
-    install_requires = [airflow_dependency]
+        airflow_dependency = 'apache-airflow>=2.0.0'
+    # Avoid circular dependency for the preinstalled packages
+    install_requires = [airflow_dependency] if provider_package_id not in PREINSTALLED_PROVIDERS else []
     install_requires.extend(dependencies)
     return install_requires
 
@@ -324,12 +325,12 @@ def get_package_extras(provider_package_id: str, backport_packages: bool) -> Dic
     return extras_dict
 
 
-def get_provider_packages():
+def get_provider_packages() -> List[str]:
     """
     Returns all provider packages.
 
     """
-    return list(PROVIDERS_REQUIREMENTS)
+    return list(PROVIDERS_REQUIREMENTS.keys())
 
 
 def usage() -> None:
@@ -795,7 +796,7 @@ def convert_cross_package_dependencies_to_table(
     """
     Converts cross-package dependencies to a markdown table
     :param cross_package_dependencies: list of cross-package dependencies
-    :param base_url: base url to use for links
+    :param backport_packages: whether we are preparing backport packages
     :return: markdown-formatted table
     """
     from tabulate import tabulate
diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst
index 37ec2fd..44158fb 100644
--- a/docs/apache-airflow/extra-packages-ref.rst
+++ b/docs/apache-airflow/extra-packages-ref.rst
@@ -18,227 +18,237 @@
 Extra Packages Reference
 ''''''''''''''''''''''''
 
-Here's the list of the :ref:`subpackages <installation:extra_packages>` and what they enable:
+Here's the list of the :ref:`subpackages <installation:extra_packages>` and what they enable.
 
+The entries with ``*`` in the ``Providers`` column indicate that one or more provider will be installed
+automatically when those extras are installed. In those cases, there is a dependency between corresponding
+provider packages and ``apache-airflow`` package (the provider package depends on ``apache-airflow>=2.0.0``).
+For ``provider`` extras - they usually install single ``provider`` package, but for extras that are groups
+of other extras (for example ``all`` or ``devel_all`` or ``all_dbs``) there might be more than one provider
+installed together with the extra.
+
+The entries with ``*`` in the ``Preinstalled`` column indicate that those extras (with providers) are always
+pre-installed when Airflow is installed. In this case dependencies are reverted - the ``apache-airflow``
+package depends on the corresponding providers packages. This is in order to avoid circular dependency that
+can be reported by some tools (even if it is harmless).
+
+.. note::
+  You can disable automated installation of the providers with extras when installing Airflow. You need to
+  have ``INSTALL_PROVIDERS_FROM_SOURCES`` environment variable to ``true`` before running ``pip install``
+  command. Contributors need to set it, if they are installing Airflow locally, and want to develop
+  providers directly via Airflow sources. This variable is automatically set in ``Breeze``
+  development environment.
 
 **Fundamentals:**
 
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| subpackage          | install command                                     | enables                                                              |
-+=====================+=====================================================+======================================================================+
-| all                 | ``pip install 'apache-airflow[all]'``               | All Airflow user facing features (no devel and doc requirements)     |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| all_dbs             | ``pip install 'apache-airflow[all_dbs]'``           | All databases integrations                                           |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| devel               | ``pip install 'apache-airflow[devel]'``             | Minimum dev tools requirements (without ``all``)                     |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| devel_hadoop        | ``pip install 'apache-airflow[devel_hadoop]'``      | Same as ``devel`` + dependencies for developing the Hadoop stack     |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| devel_all           | ``pip install 'apache-airflow[devel_all]'``         | Everything needed for development (``devel_hadoop`` + ``all``)       |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| devel_ci            | ``pip install 'apache-airflow[devel_ci]'``          | All dependencies required for CI build.                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| doc                 | ``pip install 'apache-airflow[doc]'``               | Packages needed to build docs (included in ``devel``)                |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| password            | ``pip install 'apache-airflow[password]'``          | Password authentication for users                                    |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| extra               | install command                                     | enables                                                              | Providers |
++=====================+=====================================================+======================================================================+===========+
+| all                 | ``pip install 'apache-airflow[all]'``               | All Airflow user facing features (no devel and doc requirements)     |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| all_dbs             | ``pip install 'apache-airflow[all_dbs]'``           | All databases integrations                                           |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| devel               | ``pip install 'apache-airflow[devel]'``             | Minimum dev tools requirements (without providers)                   |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| devel_hadoop        | ``pip install 'apache-airflow[devel_hadoop]'``      | Same as ``devel`` + dependencies for developing the Hadoop stack     |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| devel_all           | ``pip install 'apache-airflow[devel_all]'``         | Everything needed for development (``devel_hadoop`` +  providers)    |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| devel_ci            | ``pip install 'apache-airflow[devel_ci]'``          | All dependencies required for CI build.                              |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| doc                 | ``pip install 'apache-airflow[doc]'``               | Packages needed to build docs (included in ``devel``)                |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| password            | ``pip install 'apache-airflow[password]'``          | Password authentication for users                                    |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
 
 
 **Apache Software:**
 
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| subpackage          | install command                                     | enables                                                              |
-+=====================+=====================================================+======================================================================+
-| atlas               | ``pip install 'apache-airflow[apache.atlas]'``      | Apache Atlas to use Data Lineage feature                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| beam                | ``pip install 'apache-airflow[apache.beam]'``       | Apache Beam operators & hooks                                        |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| cassandra           | ``pip install 'apache-airflow[apache.cassandra]'``  | Cassandra related operators & hooks                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| druid               | ``pip install 'apache-airflow[apache.druid]'``      | Druid related operators & hooks                                      |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| hdfs                | ``pip install 'apache-airflow[apache.hdfs]'``       | HDFS hooks and operators                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| hive                | ``pip install 'apache-airflow[apache.hive]'``       | All Hive related operators                                           |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| kylin               | ``pip install 'apache-airflow[apache.kylin]'``      | All Kylin related operators & hooks                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| livy                | ``pip install 'apache-airflow[apache.livy]'``       | All Livy related operators, hooks & sensors                          |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| pig                 | ``pip install 'apache-airflow[apache.pig]'``        | All Pig related operators & hooks                                    |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| pinot               | ``pip install 'apache-airflow[apache.pinot]'``      | All Pinot related hooks                                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| spark               | ``pip install 'apache-airflow[apache.spark]'``      | All Spark related operators & hooks                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| sqoop               | ``pip install 'apache-airflow[apache.sqoop]'``      | All Sqoop related operators & hooks                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| webhdfs             | ``pip install 'apache-airflow[apache.webhdfs]'``    | HDFS hooks and operators                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| extra               | install command                                     | enables                                                              | Providers |
++=====================+=====================================================+======================================================================+===========+
+| apache.atlas        | ``pip install 'apache-airflow[apache.atlas]'``      | Apache Atlas to use Data Lineage feature                             |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.beam         | ``pip install 'apache-airflow[apache.beam]'``       | Apache Beam operators & hooks                                        |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.cassandra    | ``pip install 'apache-airflow[apache.cassandra]'``  | Cassandra related operators & hooks                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.druid        | ``pip install 'apache-airflow[apache.druid]'``      | Druid related operators & hooks                                      |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.hdfs         | ``pip install 'apache-airflow[apache.hdfs]'``       | HDFS hooks and operators                                             |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.hive         | ``pip install 'apache-airflow[apache.hive]'``       | All Hive related operators                                           |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.kylin        | ``pip install 'apache-airflow[apache.kylin]'``      | All Kylin related operators & hooks                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.livy         | ``pip install 'apache-airflow[apache.livy]'``       | All Livy related operators, hooks & sensors                          |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.pig          | ``pip install 'apache-airflow[apache.pig]'``        | All Pig related operators & hooks                                    |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.pinot        | ``pip install 'apache-airflow[apache.pinot]'``      | All Pinot related hooks                                              |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.spark        | ``pip install 'apache-airflow[apache.spark]'``      | All Spark related operators & hooks                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.sqoop        | ``pip install 'apache-airflow[apache.sqoop]'``      | All Sqoop related operators & hooks                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
+| apache.webhdfs      | ``pip install 'apache-airflow[apache.webhdfs]'``    | HDFS hooks and operators                                             |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+
 
 
 **Services:**
 
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| subpackage          | install command                                     | enables                                                                    |
-+=====================+=====================================================+============================================================================+
-| aws                 | ``pip install 'apache-airflow[amazon]'``            | Amazon Web Services                                                        |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| azure               | ``pip install 'apache-airflow[microsoft.azure]'``   | Microsoft Azure                                                            |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| cloudant            | ``pip install 'apache-airflow[cloudant]'``          | Cloudant hook                                                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| databricks          | ``pip install 'apache-airflow[databricks]'``        | Databricks hooks and operators                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| datadog             | ``pip install 'apache-airflow[datadog]'``           | Datadog hooks and sensors                                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| dingding            | ``pip install 'apache-airflow[dingding]'``          | Dingding hooks and sensors                                                 |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| discord             | ``pip install 'apache-airflow[discord]'``           | Discord hooks and sensors                                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| facebook            | ``pip install 'apache-airflow[facebook]'``          | Facebook Social                                                            |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| gcp                 | ``pip install 'apache-airflow[google]'``            | Google Cloud                                                               |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| github_enterprise   | ``pip install 'apache-airflow[github_enterprise]'`` | GitHub Enterprise auth backend                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| google_auth         | ``pip install 'apache-airflow[google_auth]'``       | Google auth backend                                                        |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| hashicorp           | ``pip install 'apache-airflow[hashicorp]'``         | Hashicorp Services (Vault)                                                 |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| jira                | ``pip install 'apache-airflow[jira]'``              | Jira hooks and operators                                                   |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| opsgenie            | ``pip install 'apache-airflow[opsgenie]'``          | OpsGenie hooks and operators                                               |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| pagerduty           | ``pip install 'apache-airflow[pagerduty]'``         | Pagerduty hook                                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| plexus              | ``pip install 'apache-airflow[plexus]'``            | Plexus service of CoreScientific.com AI platform                           |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| qds                 | ``pip install 'apache-airflow[qds]'``               | Enable QDS (Qubole Data Service) support                                   |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| salesforce          | ``pip install 'apache-airflow[salesforce]'``        | Salesforce hook                                                            |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| sendgrid            | ``pip install 'apache-airflow[sendgrid]'``          | Send email using sendgrid                                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| segment             | ``pip install 'apache-airflow[segment]'``           | Segment hooks and sensors                                                  |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| sentry              | ``pip install 'apache-airflow[sentry]'``            | Sentry service for application logging and monitoring                      |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| slack               | ``pip install 'apache-airflow[slack]'``             | :class:`airflow.providers.slack.operators.slack.SlackAPIOperator`          |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| snowflake           | ``pip install 'apache-airflow[snowflake]'``         | Snowflake hooks and operators                                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| telegram            | ``pip install 'apache-airflow[telegram]'``          | :class:`airflow.providers.telegram.operators.telegram.TelegramOperator`    |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| vertica             | ``pip install 'apache-airflow[vertica]'``           | Vertica hook support as an Airflow backend                                 |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| yandex              | ``pip install 'apache-airflow[yandex]'``            | Yandex.cloud hooks and operators                                           |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
-| zendesk             | ``pip install 'apache-airflow[zendesk]'``           | Zendesk hooks                                                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| extra               | install command                                     | enables                                                                    | Providers |
++=====================+=====================================================+============================================================================+===========+
+| amazon              | ``pip install 'apache-airflow[amazon]'``            | Amazon Web Services                                                        |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| azure               | ``pip install 'apache-airflow[microsoft.azure]'``   | Microsoft Azure                                                            |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| cloudant            | ``pip install 'apache-airflow[cloudant]'``          | Cloudant hook                                                              |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| databricks          | ``pip install 'apache-airflow[databricks]'``        | Databricks hooks and operators                                             |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| datadog             | ``pip install 'apache-airflow[datadog]'``           | Datadog hooks and sensors                                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| dingding            | ``pip install 'apache-airflow[dingding]'``          | Dingding hooks and sensors                                                 |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| discord             | ``pip install 'apache-airflow[discord]'``           | Discord hooks and sensors                                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| facebook            | ``pip install 'apache-airflow[facebook]'``          | Facebook Social                                                            |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| google              | ``pip install 'apache-airflow[google]'``            | Google Cloud                                                               |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| github_enterprise   | ``pip install 'apache-airflow[github_enterprise]'`` | GitHub Enterprise auth backend                                             |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| google_auth         | ``pip install 'apache-airflow[google_auth]'``       | Google auth backend                                                        |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| hashicorp           | ``pip install 'apache-airflow[hashicorp]'``         | Hashicorp Services (Vault)                                                 |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| jira                | ``pip install 'apache-airflow[jira]'``              | Jira hooks and operators                                                   |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| opsgenie            | ``pip install 'apache-airflow[opsgenie]'``          | OpsGenie hooks and operators                                               |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| pagerduty           | ``pip install 'apache-airflow[pagerduty]'``         | Pagerduty hook                                                             |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| plexus              | ``pip install 'apache-airflow[plexus]'``            | Plexus service of CoreScientific.com AI platform                           |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| qubole              | ``pip install 'apache-airflow[qubole]'``            | Enable QDS (Qubole Data Service) support                                   |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| salesforce          | ``pip install 'apache-airflow[salesforce]'``        | Salesforce hook                                                            |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| sendgrid            | ``pip install 'apache-airflow[sendgrid]'``          | Send email using sendgrid                                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| segment             | ``pip install 'apache-airflow[segment]'``           | Segment hooks and sensors                                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| sentry              | ``pip install 'apache-airflow[sentry]'``            | Sentry service for application logging and monitoring                      |           |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| slack               | ``pip install 'apache-airflow[slack]'``             | Slack hooks and operators                                                  |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| snowflake           | ``pip install 'apache-airflow[snowflake]'``         | Snowflake hooks and operators                                              |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| telegram            | ``pip install 'apache-airflow[telegram]'``          | Telegram hooks and operators                                               |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| vertica             | ``pip install 'apache-airflow[vertica]'``           | Vertica hook support as an Airflow backend                                 |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| yandex              | ``pip install 'apache-airflow[yandex]'``            | Yandex.cloud hooks and operators                                           |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
+| zendesk             | ``pip install 'apache-airflow[zendesk]'``           | Zendesk hooks                                                              |     *     |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+
 
 
 **Software:**
 
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| subpackage          | install command                                     | enables                                                                            |
-+=====================+=====================================================+====================================================================================+
-| async               | ``pip install 'apache-airflow[async]'``             | Async worker classes for Gunicorn                                                  |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| celery              | ``pip install 'apache-airflow[celery]'``            | CeleryExecutor                                                                     |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| dask                | ``pip install 'apache-airflow[dask]'``              | DaskExecutor                                                                       |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| docker              | ``pip install 'apache-airflow[docker]'``            | Docker hooks and operators                                                         |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| elasticsearch       | ``pip install 'apache-airflow[elasticsearch]'``     | Elasticsearch hooks and Log Handler                                                |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| exasol              | ``pip install 'apache-airflow[exasol]'``            | Exasol hooks and operators                                                         |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| jenkins             | ``pip install 'apache-airflow[jenkins]'``           | Jenkins hooks and operators                                                        |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| kubernetes          | ``pip install 'apache-airflow[cncf.kubernetes]'``   | Kubernetes Executor and operator                                                   |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| mongo               | ``pip install 'apache-airflow[mongo]'``             | Mongo hooks and operators                                                          |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| mssql (deprecated)  | ``pip install 'apache-airflow[microsoft.mssql]'``   | Microsoft SQL Server operators and hook,                                           |
-|                     |                                                     | support as an Airflow backend.  Uses pymssql.                                      |
-|                     |                                                     | Will be replaced by subpackage ``odbc``.                                           |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| mysql               | ``pip install 'apache-airflow[mysql]'``             | MySQL operators and hook, support as an Airflow                                    |
-|                     |                                                     | backend. The version of MySQL server has to be                                     |
-|                     |                                                     | 5.6.4+. The exact version upper bound depends                                      |
-|                     |                                                     | on version of ``mysqlclient`` package. For                                         |
-|                     |                                                     | example, ``mysqlclient`` 1.3.12 can only be                                        |
-|                     |                                                     | used with MySQL server 5.6.4 through 5.7.                                          |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| odbc                | ``pip install 'apache-airflow[odbc]'``              | ODBC data sources including MS SQL Server.  Can use MsSqlOperator,                 |
-|                     |                                                     | or as metastore database backend.  Uses pyodbc.                                    |
-|                     |                                                     | See :doc:`apache-airflow-providers-odbc:index` for more info.                      |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| openfaas            | ``pip install 'apache-airflow[openfaas]'``          | OpenFaaS hooks                                                                     |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| oracle              | ``pip install 'apache-airflow[oracle]'``            | Oracle hooks and operators                                                         |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| postgres            | ``pip install 'apache-airflow[postgres]'``          | PostgreSQL operators and hook, support as an                                       |
-|                     |                                                     | Airflow backend                                                                    |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| presto              | ``pip install 'apache-airflow[presto]'``            | All Presto related operators & hooks                                               |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| rabbitmq            | ``pip install 'apache-airflow[rabbitmq]'``          | RabbitMQ support as a Celery backend                                               |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| redis               | ``pip install 'apache-airflow[redis]'``             | Redis hooks and sensors                                                            |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| samba               | ``pip install 'apache-airflow[samba]'``             | :class:`airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator` |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| singularity         | ``pip install 'apache-airflow[singularity]'``       | Singularity container operator                                                     |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| statsd              | ``pip install 'apache-airflow[statsd]'``            | Needed by StatsD metrics                                                           |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| tableau             | ``pip install 'apache-airflow[tableau]'``           | Tableau visualization integration                                                  |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
-| virtualenv          | ``pip install 'apache-airflow[virtualenv]'``        | Running python tasks in local virtualenv                                           |
-+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| extra               | install command                                     | enables                                                                            | Providers |
++=====================+=====================================================+====================================================================================+===========+
+| async               | ``pip install 'apache-airflow[async]'``             | Async worker classes for Gunicorn                                                  |           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| celery              | ``pip install 'apache-airflow[celery]'``            | CeleryExecutor                                                                     |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| dask                | ``pip install 'apache-airflow[dask]'``              | DaskExecutor                                                                       |           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| docker              | ``pip install 'apache-airflow[docker]'``            | Docker hooks and operators                                                         |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| elasticsearch       | ``pip install 'apache-airflow[elasticsearch]'``     | Elasticsearch hooks and Log Handler                                                |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| exasol              | ``pip install 'apache-airflow[exasol]'``            | Exasol hooks and operators                                                         |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| jenkins             | ``pip install 'apache-airflow[jenkins]'``           | Jenkins hooks and operators                                                        |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| cncf.kubernetes     | ``pip install 'apache-airflow[cncf.kubernetes]'``   | Kubernetes Executor and operator                                                   |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| mongo               | ``pip install 'apache-airflow[mongo]'``             | Mongo hooks and operators                                                          |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| microsoft.mssql     | ``pip install 'apache-airflow[microsoft.mssql]'``   | Microsoft SQL Server operators and hook.                                           |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| mysql               | ``pip install 'apache-airflow[mysql]'``             | MySQL operators and hook                                                           |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| odbc                | ``pip install 'apache-airflow[odbc]'``              | ODBC data sources including MS SQL Server                                          |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| openfaas            | ``pip install 'apache-airflow[openfaas]'``          | OpenFaaS hooks                                                                     |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| oracle              | ``pip install 'apache-airflow[oracle]'``            | Oracle hooks and operators                                                         |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| postgres            | ``pip install 'apache-airflow[postgres]'``          | PostgreSQL operators and hook                                                      |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| presto              | ``pip install 'apache-airflow[presto]'``            | All Presto related operators & hooks                                               |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| rabbitmq            | ``pip install 'apache-airflow[rabbitmq]'``          | RabbitMQ support as a Celery backend                                               |           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| redis               | ``pip install 'apache-airflow[redis]'``             | Redis hooks and sensors                                                            |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| samba               | ``pip install 'apache-airflow[samba]'``             | Samba hooks and operators                                                          |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| singularity         | ``pip install 'apache-airflow[singularity]'``       | Singularity container operator                                                     |     *     |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| statsd              | ``pip install 'apache-airflow[statsd]'``            | Needed by StatsD metrics                                                           |           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| tableau             | ``pip install 'apache-airflow[tableau]'``           | Tableau visualization integration                                                  |           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
+| virtualenv          | ``pip install 'apache-airflow[virtualenv]'``        | Running python tasks in local virtualenv                                           |           |
++---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+
 
 
 **Other:**
 
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| subpackage          | install command                                     | enables                                                              |
-+=====================+=====================================================+======================================================================+
-| cgroups             | ``pip install 'apache-airflow[cgroups]'``           | Needed To use CgroupTaskRunner                                       |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| ftp                 | ``pip install 'apache-airflow[ftp]'``               | FTP hooks and operators                                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| grpc                | ``pip install 'apache-airflow[grpc]'``              | Grpc hooks and operators                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| http                | ``pip install 'apache-airflow[http]'``              | HTTP hooks, operators and sensors                                    |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| imap                | ``pip install 'apache-airflow[imap]'``              | IMAP hooks and sensors                                               |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| jdbc                | ``pip install 'apache-airflow[jdbc]'``              | JDBC hooks and operators                                             |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| kerberos            | ``pip install 'apache-airflow[kerberos]'``          | Kerberos integration for Kerberized Hadoop                           |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| ldap                | ``pip install 'apache-airflow[ldap]'``              | LDAP authentication for users                                        |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| papermill           | ``pip install 'apache-airflow[papermill]'``         | Papermill hooks and operators                                        |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| sftp                | ``pip install 'apache-airflow[sftp]'``              | SFTP hooks, operators and sensors                                    |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| sqlite              | ``pip install 'apache-airflow[sqlite]'``            | SQLite hooks and operators                                           |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| ssh                 | ``pip install 'apache-airflow[ssh]'``               | SSH hooks and operators                                              |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
-| winrm               | ``pip install 'apache-airflow[microsoft.winrm]'``   | WinRM hooks and operators                                            |
-+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| extra               | install command                                     | enables                                                              | Providers | Preinstalled |
++=====================+=====================================================+======================================================================+===========+==============+
+| cgroups             | ``pip install 'apache-airflow[cgroups]'``           | Needed To use CgroupTaskRunner                                       |           |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| ftp                 | ``pip install 'apache-airflow[ftp]'``               | FTP hooks and operators                                              |     *     |      *       |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| grpc                | ``pip install 'apache-airflow[grpc]'``              | Grpc hooks and operators                                             |     *     |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| http                | ``pip install 'apache-airflow[http]'``              | HTTP hooks, operators and sensors                                    |     *     |      *       |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| imap                | ``pip install 'apache-airflow[imap]'``              | IMAP hooks and sensors                                               |     *     |      *       |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| jdbc                | ``pip install 'apache-airflow[jdbc]'``              | JDBC hooks and operators                                             |     *     |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| kerberos            | ``pip install 'apache-airflow[kerberos]'``          | Kerberos integration for Kerberized Hadoop                           |           |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| ldap                | ``pip install 'apache-airflow[ldap]'``              | LDAP authentication for users                                        |           |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| papermill           | ``pip install 'apache-airflow[papermill]'``         | Papermill hooks and operators                                        |     *     |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| sftp                | ``pip install 'apache-airflow[sftp]'``              | SFTP hooks, operators and sensors                                    |     *     |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| sqlite              | ``pip install 'apache-airflow[sqlite]'``            | SQLite hooks and operators                                           |     *     |      *       |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| ssh                 | ``pip install 'apache-airflow[ssh]'``               | SSH hooks and operators                                              |     *     |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
+| microsoft.winrm     | ``pip install 'apache-airflow[microsoft.winrm]'``   | WinRM hooks and operators                                            |     *     |              |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+
 
 
 **Deprecated 1.10 Extras**
 
-Those are the extras that have been deprecated in 2.0 and will be removed
-(current plan is to remove them in 2.1):
+Those are the extras that have been deprecated in 2.0 and will be removed in Airflow 3.0.0. They were
+all replaced by new extras, which have naming consistent with the names of provider packages.
 
+The ``crypto`` extra is not needed any more, because all crypto dependencies are part of airflow package,
+so there is no replacement for ``crypto`` extra.
 
 +---------------------+-----------------------------+
 | Deprecated extra    | Extra to be used instead    |
@@ -251,7 +261,7 @@ Those are the extras that have been deprecated in 2.0 and will be removed
 +---------------------+-----------------------------+
 | cassandra           | apache.cassandra            |
 +---------------------+-----------------------------+
-| crypto              | \- \*                       |
+| crypto              |                             |
 +---------------------+-----------------------------+
 | druid               | apache.druid                |
 +---------------------+-----------------------------+
@@ -273,10 +283,9 @@ Those are the extras that have been deprecated in 2.0 and will be removed
 +---------------------+-----------------------------+
 | s3                  | amazon                      |
 +---------------------+-----------------------------+
+| spark               | apache.spark                |
++---------------------+-----------------------------+
 | webhdfs             | apache.webhdfs              |
 +---------------------+-----------------------------+
 | winrm               | microsoft.winrm             |
 +---------------------+-----------------------------+
-
-
-\* crypto extra is not needed as cryptography is installed by default in Airflow 2.0
diff --git a/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py b/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py
index a117d1a9..55ad3d6 100755
--- a/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py
+++ b/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py
@@ -20,48 +20,67 @@
 import os
 import sys
 from os.path import dirname
+from pathlib import Path
 from typing import List
 
+from rich import print
+
 AIRFLOW_SOURCES_DIR = os.path.abspath(os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir))
 
 sys.path.insert(0, AIRFLOW_SOURCES_DIR)
 # flake8: noqa: F401
 # pylint: disable=wrong-import-position
-from setup import EXTRAS_PROVIDERS_PACKAGES  # noqa
+from setup import ALL_PROVIDERS  # noqa
 
 sys.path.append(AIRFLOW_SOURCES_DIR)
 
+errors: List[str] = []
+
+PROVIDERS_DIR = os.path.join(AIRFLOW_SOURCES_DIR, "airflow", "providers")
+
+
+def get_provider_directory(provider: str) -> str:
+    """Returns provider directory derived from name"""
+    return os.path.join(PROVIDERS_DIR, *provider.split('.'))
+
 
-def get_provider_directory(provider: str):
-    return os.path.join(AIRFLOW_SOURCES_DIR, "airflow", "providers", *provider.split('.'))
+def check_all_providers_listed_have_directory() -> None:
+    for provider in ALL_PROVIDERS:
+        provider_directory = get_provider_directory(provider)
+        if not os.path.isdir(provider_directory):
+            errors.append(
+                f"The provider {provider} is defined in setup.py: [bold]PROVIDERS_REQUIREMENTS[/] but it "
+                + f"has missing {provider_directory} directory: [red]NOK[/]"
+            )
+            continue
+        if not os.path.exists(os.path.join(provider_directory, "__init__.py")):
+            errors.append(
+                f"The {provider} does not have the __init__.py "
+                + f"file in the {provider_directory} directory [red]NOK[/]"
+            )
+        if not os.path.exists(os.path.join(provider_directory, "provider.yaml")):
+            errors.append(
+                f"The provider {provider} does not have the provider.yaml "
+                + f"in the {provider_directory} directory: [red]NOK[/]"
+            )
 
 
-def check_all_providers() -> List[str]:
-    errors: List[str] = []
-    for extra, providers in EXTRAS_PROVIDERS_PACKAGES.items():
-        for provider in providers:
-            provider_directory = get_provider_directory(provider)
-            if not os.path.isdir(provider_directory):
-                errors.append(
-                    f"The {extra} has provider {provider} that has missing {provider_directory} directory"
-                )
-                continue
-            if not os.path.exists(os.path.join(provider_directory, "__init__.py")):
-                errors.append(
-                    f"The {extra} has provider {provider} that has"
-                    f" missing __init__.py in the {provider_directory} directory"
-                )
-            if not os.path.exists(os.path.join(provider_directory, "README.md")):
-                errors.append(
-                    f"The {extra} has provider {provider} that has"
-                    f" missing README.md in the {provider_directory} directory"
-                )
-    return errors
+def check_all_providers_are_listed_in_setup_py() -> None:
+    for path in Path(PROVIDERS_DIR).rglob('provider.yaml'):
+        provider_name = str(path.parent.relative_to(PROVIDERS_DIR)).replace(os.sep, ".")
+        if provider_name not in ALL_PROVIDERS:
+            errors.append(
+                f"The provider {provider_name} is missing in setup.py "
+                + "[bold]PROVIDERS_REQUIREMENTS[/]: [red]NOK[/]"
+            )
 
 
 if __name__ == '__main__':
-    errors = check_all_providers()
+    check_all_providers_listed_have_directory()
+    check_all_providers_are_listed_in_setup_py()
     if errors:
         for message in errors:
             print(message, file=sys.stderr)
         sys.exit(1)
+    else:
+        print("All providers are correctly defined in setup.py [green]OK[/]")
diff --git a/scripts/ci/pre_commit/pre_commit_check_order_setup.py b/scripts/ci/pre_commit/pre_commit_check_order_setup.py
index ddbf83c..169482e 100755
--- a/scripts/ci/pre_commit/pre_commit_check_order_setup.py
+++ b/scripts/ci/pre_commit/pre_commit_check_order_setup.py
@@ -25,6 +25,8 @@ import sys
 from os.path import abspath, dirname
 from typing import List
 
+from rich import print
+
 errors = []
 
 MY_DIR_PATH = os.path.dirname(__file__)
@@ -33,14 +35,17 @@ sys.path.insert(0, SOURCE_DIR_PATH)
 
 
 def _check_list_sorted(the_list: List[str], message: str) -> None:
+    print(the_list)
     sorted_list = sorted(the_list)
     if the_list == sorted_list:
-        print(f"{message} is ok")
+        print(f"{message} is [green]ok[/]")
+        print()
         return
     i = 0
     while sorted_list[i] == the_list[i]:
         i += 1
-    print(f"{message} NOK")
+    print(f"{message} [red]NOK[/]")
+    print()
     errors.append(
         f"ERROR in {message}. First wrongly sorted element" f" {the_list[i]}. Should be {sorted_list[i]}"
     )
@@ -58,12 +63,13 @@ def check_main_dependent_group(setup_context: str) -> None:
     Test for an order of dependencies groups between mark
     '# Start dependencies group' and '# End dependencies group' in setup.py
     """
+    print("[blue]Checking main dependency group[/]")
     pattern_main_dependent_group = re.compile(
         '# Start dependencies group\n(.*)# End dependencies group', re.DOTALL
     )
     main_dependent_group = pattern_main_dependent_group.findall(setup_context)[0]
 
-    pattern_sub_dependent = re.compile(' = \\[.*?\\]\n', re.DOTALL)
+    pattern_sub_dependent = re.compile(r' = \[.*?]\n', re.DOTALL)
     main_dependent = pattern_sub_dependent.sub(',', main_dependent_group)
 
     src = main_dependent.strip(',').split(',')
@@ -75,19 +81,20 @@ def check_sub_dependent_group(setup_context: str) -> None:
     Test for an order of each dependencies groups declare like
     `^dependent_group_name = [.*?]\n` in setup.py
     """
-    pattern_dependent_group_name = re.compile('^(\\w+) = \\[', re.MULTILINE)
+    pattern_dependent_group_name = re.compile(r'^(\w+) = \[', re.MULTILINE)
     dependent_group_names = pattern_dependent_group_name.findall(setup_context)
 
-    pattern_dependent_version = re.compile('[~|><=;].*')
+    pattern_dependent_version = re.compile(r'[~|><=;].*')
 
     for group_name in dependent_group_names:
-        pattern_sub_dependent = re.compile(f'{group_name} = \\[(.*?)\\]', re.DOTALL)
+        print(f"[blue]Checking dependency group {group_name}[/]")
+        pattern_sub_dependent = re.compile(fr'{group_name} = \[(.*?)]\n', re.DOTALL)
         sub_dependent = pattern_sub_dependent.findall(setup_context)[0]
-        pattern_dependent = re.compile('\'(.*?)\'')
+        pattern_dependent = re.compile(r"'(.*?)'")
         dependent = pattern_dependent.findall(sub_dependent)
 
         src = [pattern_dependent_version.sub('', p) for p in dependent]
-        _check_list_sorted(src, f"Order of sub-dependencies group: {group_name}")
+        _check_list_sorted(src, f"Order of dependency group: {group_name}")
 
 
 def check_alias_dependent_group(setup_context: str) -> None:
@@ -99,89 +106,77 @@ def check_alias_dependent_group(setup_context: str) -> None:
     dependents = pattern.findall(setup_context)
 
     for dependent in dependents:
+        print(f"[blue]Checking alias-dependent group {dependent}[/]")
         src = dependent.split(' + ')
         _check_list_sorted(src, f"Order of alias dependencies group: {dependent}")
 
 
-def check_install_and_setup_requires() -> None:
+def check_provider_requirements(setup_context: str) -> None:
     """
-    Test for an order of dependencies in function do_setup section
-    install_requires and setup_requires in setup.py
+    Test for an order of dependencies in PROVIDERS_REQUIREMENTS in setup.py
     """
-
-    from setuptools.config import read_configuration
-
-    path = abspath(os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir, 'setup.cfg'))
-    config = read_configuration(path)
-
-    pattern_dependent_version = re.compile('[~|><=;].*')
-
-    for key in ('install_requires', 'setup_requires'):
-        deps = config['options'][key]
-        dists = [pattern_dependent_version.sub('', p) for p in deps]
-        _check_list_sorted(dists, f"Order of dependencies in do_setup section: {key}")
+    print("[blue]Checking providers_requirements[/]")
+    pattern_providers_requirements = re.compile(r'PROVIDERS_REQUIREMENTS: [^{]*\{(.*?)}\n', re.DOTALL)
+    providers_requirements = pattern_providers_requirements.findall(setup_context)[0]
+    pattern_dependent = re.compile("'(.*?)'")
+    src = pattern_dependent.findall(providers_requirements)
+    _check_list_sorted(src, "Order of dependencies in: providers_require")
 
 
 def check_extras_require(setup_context: str) -> None:
     """
-    Test for an order of dependencies in function do_setup section
-    extras_require in setup.py
+    Test for an order of dependencies in EXTRAS_REQUIREMENTS in setup.py
     """
-    pattern_extras_requires = re.compile(r'EXTRAS_REQUIREMENTS: Dict\[str, List\[str\]] = {(.*?)}', re.DOTALL)
+    print("[blue]Checking extras_requirements[/]")
+    pattern_extras_requires = re.compile(r'EXTRAS_REQUIREMENTS: [^{]*{(.*?)}\n', re.DOTALL)
     extras_requires = pattern_extras_requires.findall(setup_context)[0]
-
-    pattern_dependent = re.compile('\'(.*?)\'')
+    pattern_dependent = re.compile(r"'(.*?)'")
     src = pattern_dependent.findall(extras_requires)
     _check_list_sorted(src, "Order of dependencies in: extras_require")
 
 
-def check_provider_requirements(setup_context: str) -> None:
+def check_extras_deprecated_aliases(setup_context: str) -> None:
     """
-    Test for an order of dependencies in function do_setup section
-    providers_require in setup.py
+    Test for an order of dependencies in EXTRAS_DEPRECATED_ALIASES in setup.py
     """
-    pattern_extras_providers_packages = re.compile(
-        r'PROVIDERS_REQUIREMENTS: Dict\[str, Iterable\[str\]\] = {(.*?)}', re.DOTALL
-    )
-    extras_requires = pattern_extras_providers_packages.findall(setup_context)[0]
+    print("[blue]Checking extras deprecated aliases[/]")
+    pattern_extras_deprecated_aliases = re.compile(r'EXTRAS_DEPRECATED_ALIASES: [^{]*{(.*?)}\n', re.DOTALL)
+    extras_deprecated_aliases = pattern_extras_deprecated_aliases.findall(setup_context)[0]
+    pattern_dependent = re.compile("'(.*?)',")
+    src = pattern_dependent.findall(extras_deprecated_aliases)
+    _check_list_sorted(src, "Order of dependencies in: extras_deprecated_aliases")
 
-    pattern_dependent = re.compile('"(.*?)"')
-    src = pattern_dependent.findall(extras_requires)
-    _check_list_sorted(src, "Order of dependencies in: providers_require")
+
+def check_preinstalled_providers(setup_context: str) -> None:
+    """
+    Test for an order of providers in PREINSTALLED_PROVIDERS in setup.py
+    """
+    print("[blue]Checking preinstalled providers[/]")
+    pattern_preinstalled_providers = re.compile(r'PREINSTALLED_PROVIDERS = \[(.*?)]\n', re.DOTALL)
+    preinstalled_providers = pattern_preinstalled_providers.findall(setup_context)[0]
+    pattern_dependent = re.compile("'(.*?)',")
+    src = pattern_dependent.findall(preinstalled_providers)
+    _check_list_sorted(src, "Order of dependencies in: preinstalled_providers")
 
 
-def check_extras_provider_packages(setup_context: str) -> None:
+def check_install_and_setup_requires() -> None:
     """
     Test for an order of dependencies in function do_setup section
-    providers_require in setup.py
+    install_requires and setup_requires in setup.cfg
     """
-    pattern_extras_requires = re.compile(
-        r'EXTRAS_PROVIDERS_PACKAGES: Dict\[str, Iterable\[str\]\] = {(.*?)}', re.DOTALL
-    )
-    extras_requires = pattern_extras_requires.findall(setup_context)[0]
-
-    pattern_dependent = re.compile('"(.*?)":')
-    src = pattern_dependent.findall(extras_requires)
-    _check_list_sorted(src, "Order of dependencies in: extras_provider_packages")
 
+    from setuptools.config import read_configuration
 
-def checks_extra_with_providers_exist() -> None:
+    path = abspath(os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir, 'setup.cfg'))
+    config = read_configuration(path)
 
-    from setup import EXTRAS_REQUIREMENTS, EXTRAS_PROVIDERS_PACKAGES  # noqa # isort:skip
+    pattern_dependent_version = re.compile('[~|><=;].*')
 
-    message = 'Check if all extras have providers defined in: EXTRAS_PROVIDERS_PACKAGES'
-    local_error = False
-    for key in EXTRAS_REQUIREMENTS.keys():  # noqa
-        if key not in EXTRAS_PROVIDERS_PACKAGES.keys():  # noqa
-            if not local_error:
-                local_error = True
-                print(f"Extra {key} NOK")
-            errors.append(
-                f"ERROR in {message}. The {key} extras is missing there."
-                " If you do not want to install any providers with this extra set it to []"
-            )
-    if not local_error:
-        print(f"{message} is ok")
+    for key in ('install_requires', 'setup_requires'):
+        print(f"[blue]Checking setup.cfg group {key}[/]")
+        deps = config['options'][key]
+        dists = [pattern_dependent_version.sub('', p) for p in deps]
+        _check_list_sorted(dists, f"Order of dependencies in do_setup section: {key}")
 
 
 if __name__ == '__main__':
@@ -189,11 +184,11 @@ if __name__ == '__main__':
     check_main_dependent_group(setup_context_main)
     check_alias_dependent_group(setup_context_main)
     check_sub_dependent_group(setup_context_main)
-    check_install_and_setup_requires()
-    check_extras_require(setup_context_main)
     check_provider_requirements(setup_context_main)
-    check_extras_provider_packages(setup_context_main)
-    checks_extra_with_providers_exist()
+    check_extras_require(setup_context_main)
+    check_extras_deprecated_aliases(setup_context_main)
+    check_preinstalled_providers(setup_context_main)
+    check_install_and_setup_requires()
 
     print()
     print()
diff --git a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py
index 1878bf1..e9a9410 100755
--- a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py
+++ b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py
@@ -26,15 +26,26 @@ import sys
 from os.path import dirname
 from typing import Dict, List
 
-from rich import print as rprint
+from rich import print
 from rich.console import Console
 from rich.table import Table
 
 AIRFLOW_SOURCES_DIR = os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir)
 SETUP_PY_FILE = 'setup.py'
-DOCS_FILE = 'extra-packages-ref.rst'
+DOCS_FILE = os.path.join('docs', 'apache-airflow', 'extra-packages-ref.rst')
 PY_IDENTIFIER = r'[a-zA-Z_][a-zA-Z0-9_\.]*'
 
+sys.path.insert(0, AIRFLOW_SOURCES_DIR)
+
+from setup import (  # noqa # isort:skip
+    add_all_provider_packages,
+    EXTRAS_DEPRECATED_ALIASES,
+    EXTRAS_REQUIREMENTS,
+    EXTRAS_WITH_PROVIDERS,
+    PROVIDERS_REQUIREMENTS,
+    PREINSTALLED_PROVIDERS,
+)
+
 
 def get_file_content(*path_elements: str) -> str:
     file_path = os.path.join(AIRFLOW_SOURCES_DIR, *path_elements)
@@ -42,92 +53,220 @@ def get_file_content(*path_elements: str) -> str:
         return file_to_read.read()
 
 
-def get_extras_from_setup() -> Dict[str, List[str]]:
+def get_extras_from_setup() -> Dict[str, str]:
     """
-    Returns an array EXTRAS_REQUIREMENTS with aliases from setup.py file in format:
-    {'package name': ['alias1', 'alias2'], ...}
+    Returns a dict of regular extras from setup (with value = '' for non-provider extra and '*' for
+    provider extra
     """
-    setup_content = get_file_content(SETUP_PY_FILE)
+    all_regular_extras = set(EXTRAS_REQUIREMENTS.keys()) - set(EXTRAS_DEPRECATED_ALIASES.keys())
+    setup_extra_dict = {}
+    for setup_regular_extra in all_regular_extras:
+        setup_extra_dict[setup_regular_extra] = '*' if setup_regular_extra in EXTRAS_WITH_PROVIDERS else ''
+    return setup_extra_dict
 
-    extras_section_regex = re.compile(r'^EXTRAS_REQUIREMENTS: Dict[^{]+{([^}]+)}', re.MULTILINE)
-    extras_section = extras_section_regex.findall(setup_content)[0]
 
-    extras_regex = re.compile(
-        rf'^\s*[\"\']({PY_IDENTIFIER})[\"\']:\s*({PY_IDENTIFIER}|\[\])[^#\n]*(#\s*.*)?$', re.MULTILINE
+def get_regular_extras_from_docs() -> Dict[str, str]:
+    """
+    Returns a dict of regular extras from doce (with value = '' for non-provider extra and '*' for
+    provider extra
+    """
+    docs_content = get_file_content(DOCS_FILE)
+    extras_section_regex = re.compile(
+        rf'\|[^|]+\|.*pip install .apache-airflow\[({PY_IDENTIFIER})][^|]+\|[^|]+\|\s+(\*?)\s+\|',
+        re.MULTILINE,
     )
+    doc_extra_dict = {}
+    for doc_regular_extra in extras_section_regex.findall(docs_content):
+        doc_extra_dict[doc_regular_extra[0]] = doc_regular_extra[1]
+    return doc_extra_dict
+
 
-    extras_dict: Dict[str, List[str]] = {}
-    for extras in extras_regex.findall(extras_section):
-        package = extras[1]
-        alias = extras[0]
-        if alias == 'crypto':
-            # Skip crypto package - it is there just for backwards compatibility
-            continue
-        # if there are no packages, use the extras alias itself
-        if package == '[]':
-            package = alias
-        if not extras_dict.get(package):
-            extras_dict[package] = []
-        extras_dict[package].append(alias)
-
-    updates_sections_regex = re.compile(r"^EXTRAS_REQUIREMENTS\.update[^{]+{([^}]+)}", re.MULTILINE)
-    updates_sections = updates_sections_regex.findall(setup_content)
-    for update in updates_sections:
-        for extra in extras_regex.findall(update):
-            package = extra[0]
-            if not extras_dict.get(package):
-                extras_dict[package] = [extra[0]]
-
-    return extras_dict
-
-
-def get_extras_from_docs() -> List[str]:
+def get_preinstalled_providers_from_docs() -> List[str]:
     """
-    Returns an array of install packages names from installation.rst.
+    Returns list of pre-installed providers from the doc.
     """
-    docs_content = get_file_content('docs', 'apache-airflow', DOCS_FILE)
+    docs_content = get_file_content(DOCS_FILE)
+    preinstalled_section_regex = re.compile(
+        rf'\|\s*({PY_IDENTIFIER})\s*\|[^|]+pip install[^|]+\|[^|]+\|[^|]+\|\s+\*\s+\|$',
+        re.MULTILINE,
+    )
+    return preinstalled_section_regex.findall(docs_content)
 
-    extras_section_regex = re.compile(
-        rf'^\|[^|]+\|.*pip install .apache-airflow\[({PY_IDENTIFIER})\].', re.MULTILINE
+
+def get_deprecated_extras_from_docs() -> Dict[str, str]:
+    """
+    Returns dict of deprecated extras from docs (alias -> target extra)
+    """
+    deprecated_extras = {}
+    docs_content = get_file_content(DOCS_FILE)
+
+    deprecated_extras_section_regex = re.compile(
+        r'\| Deprecated extra    \| Extra to be used instead    \|\n(.*)\n', re.DOTALL  # noqa
     )
-    extras = extras_section_regex.findall(docs_content)
+    deprecated_extras_content = deprecated_extras_section_regex.findall(docs_content)[0]
 
-    return extras
+    deprecated_extras_regexp = re.compile(r'\|\s(\S+)\s+\|\s(\S*)\s+\|$', re.MULTILINE)
+    for extras in deprecated_extras_regexp.findall(deprecated_extras_content):
+        deprecated_extras[extras[0]] = extras[1]
+    return deprecated_extras
 
 
-if __name__ == '__main__':
-    setup_packages = get_extras_from_setup()
-    docs_packages = get_extras_from_docs()
+def check_regular_extras(console: Console) -> bool:
+    """
+    Checks if regular extras match setup vs. doc.
+    :param console: print table there in case of errors
+    :return: True if all ok, False otherwise
+    """
+    regular_extras_table = Table()
+    regular_extras_table.add_column("NAME", justify="right", style="cyan")
+    regular_extras_table.add_column("SETUP", justify="center", style="magenta")
+    regular_extras_table.add_column("SETUP_PROVIDER", justify="center", style="magenta")
+    regular_extras_table.add_column("DOCS", justify="center", style="yellow")
+    regular_extras_table.add_column("DOCS_PROVIDER", justify="center", style="yellow")
+    regular_setup_extras = get_extras_from_setup()
+    regular_docs_extras = get_regular_extras_from_docs()
+    for extra in regular_setup_extras.keys():
+        if extra not in regular_docs_extras:
+            regular_extras_table.add_row(extra, "V", regular_setup_extras[extra], "", "")
+        elif regular_docs_extras[extra] != regular_setup_extras[extra]:
+            regular_extras_table.add_row(
+                extra, "V", regular_setup_extras[extra], "V", regular_docs_extras[extra]
+            )
+    for extra in regular_docs_extras.keys():
+        if extra not in regular_setup_extras:
+            regular_extras_table.add_row(extra, "", "", "V", regular_docs_extras[extra])
+    if regular_extras_table.row_count != 0:
+        print(
+            f"""\
+[red bold]ERROR!![/red bold]
 
-    table = Table()
-    table.add_column("NAME", justify="right", style="cyan")
-    table.add_column("SETUP", justify="center", style="magenta")
-    table.add_column("INSTALLATION", justify="center", style="green")
+The "[bold]EXTRAS_REQUIREMENTS[/bold]" and "[bold]PROVIDERS_REQUIREMENTS[/bold]"
+sections in the setup file: [bold yellow]{SETUP_PY_FILE}[/bold yellow]
+should be synchronized with the "Extra Packages Reference"
+in the documentation file: [bold yellow]{DOCS_FILE}[/bold yellow].
 
-    for extras in sorted(setup_packages.keys()):
-        if not set(setup_packages[extras]).intersection(docs_packages):
-            table.add_row(extras, "V", "")
+Below is the list of extras that:
 
-    setup_packages_str = str(setup_packages)
-    for extras in sorted(docs_packages):
-        if f"'{extras}'" not in setup_packages_str:
-            table.add_row(extras, "", "V")
+  * are used but are not documented,
+  * are documented but not used,
+  * or have different provider flag in documentation/setup file.
 
-    if table.row_count == 0:
-        sys.exit(0)
+[bold]Please synchronize setup/documentation files![/bold]
+
+"""
+        )
+        console.print(regular_extras_table)
+        return False
+    return True
+
+
+def check_deprecated_extras(console: Console) -> bool:
+    """
+    Checks if deprecated extras match setup vs. doc.
+    :param console: print table there in case of errors
+    :return: True if all ok, False otherwise
+    """
+    deprecated_setup_extras = EXTRAS_DEPRECATED_ALIASES
+    deprecated_docs_extras = get_deprecated_extras_from_docs()
+
+    deprecated_extras_table = Table()
+    deprecated_extras_table.add_column("DEPRECATED_IN_SETUP", justify="right", style="cyan")
+    deprecated_extras_table.add_column("TARGET_IN_SETUP", justify="center", style="magenta")
+    deprecated_extras_table.add_column("DEPRECATED_IN_DOCS", justify="right", style="cyan")
+    deprecated_extras_table.add_column("TARGET_IN_DOCS", justify="center", style="magenta")
+
+    for extra in deprecated_setup_extras.keys():
+        if extra not in deprecated_docs_extras:
+            deprecated_extras_table.add_row(extra, deprecated_setup_extras[extra], "", "")
+        elif deprecated_docs_extras[extra] != deprecated_setup_extras[extra]:
+            deprecated_extras_table.add_row(
+                extra, deprecated_setup_extras[extra], extra, deprecated_docs_extras[extra]
+            )
 
-    rprint(
-        f"""\
+    for extra in deprecated_docs_extras.keys():
+        if extra not in deprecated_setup_extras:
+            deprecated_extras_table.add_row("", "", extra, deprecated_docs_extras[extra])
+
+    if deprecated_extras_table.row_count != 0:
+        print(
+            f"""\
 [red bold]ERROR!![/red bold]
 
-"EXTRAS_REQUIREMENTS" section in [bold yellow]{SETUP_PY_FILE}[/bold yellow] should be synchronized
-with "Extra Packages" section in documentation file [bold yellow]doc/{DOCS_FILE}[/bold yellow].
+The "[bold]EXTRAS_DEPRECATED_ALIASES[/bold]" section in the setup file:\
+[bold yellow]{SETUP_PY_FILE}[/bold yellow]
+should be synchronized with the "Extra Packages Reference"
+in the documentation file: [bold yellow]{DOCS_FILE}[/bold yellow].
+
+Below is the list of deprecated extras that:
+
+  * are used but are not documented,
+  * are documented but not used,
+  * or have different target extra specified in the documentation or setup.
+
+[bold]Please synchronize setup/documentation files![/bold]
+
+"""
+        )
+        console.print(deprecated_extras_table)
+        return False
+    return True
+
 
-Here is a list of packages that are used but are not documented, or
-documented although not used.
+def check_preinstalled_extras(console: Console) -> bool:
     """
-    )
-    console = Console()
-    console.print(table)
+    Checks if preinstalled extras match setup vs. doc.
+    :param console: print table there in case of errors
+    :return: True if all ok, False otherwise
+    """
+    preinstalled_providers_from_docs = get_preinstalled_providers_from_docs()
+    preinstalled_providers_from_setup = PREINSTALLED_PROVIDERS
+
+    preinstalled_providers_table = Table()
+    preinstalled_providers_table.add_column("PREINSTALLED_IN_SETUP", justify="right", style="cyan")
+    preinstalled_providers_table.add_column("PREINSTALLED_IN_DOCS", justify="center", style="magenta")
+
+    for provider in preinstalled_providers_from_setup:
+        if provider not in preinstalled_providers_from_docs:
+            preinstalled_providers_table.add_row(provider, "")
+
+    for provider in preinstalled_providers_from_docs:
+        if provider not in preinstalled_providers_from_setup:
+            preinstalled_providers_table.add_row("", provider)
 
+    if preinstalled_providers_table.row_count != 0:
+        print(
+            f"""\
+[red bold]ERROR!![/red bold]
+
+The "[bold]PREINSTALLED_PROVIDERS[/bold]" section in the setup file:\
+[bold yellow]{SETUP_PY_FILE}[/bold yellow]
+should be synchronized with the "Extra Packages Reference"
+in the documentation file: [bold yellow]{DOCS_FILE}[/bold yellow].
+
+Below is the list of preinstalled providers that:
+  * are used but are not documented,
+  * or are documented but not used.
+
+[bold]Please synchronize setup/documentation files![/bold]
+
+"""
+        )
+        console.print(preinstalled_providers_table)
+        return False
+    return True
+
+
+if __name__ == '__main__':
+    status: List[bool] = []
+    # force adding all provider package dependencies, to check providers status
+    add_all_provider_packages()
+
+    main_console = Console()
+    status.append(check_regular_extras(main_console))
+    status.append(check_deprecated_extras(main_console))
+    status.append(check_preinstalled_extras(main_console))
+
+    if all(status):
+        print("All extras are synchronized: [green]OK[/]")
+        sys.exit(0)
     sys.exit(1)
diff --git a/setup.cfg b/setup.cfg
index c6905d1..5db33a4 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -80,10 +80,6 @@ setup_requires =
 #####################################################################################################
 install_requires =
     alembic>=1.2, <2.0
-    apache-airflow-providers-ftp
-    apache-airflow-providers-http
-    apache-airflow-providers-imap
-    apache-airflow-providers-sqlite
     argcomplete~=1.10
     attrs>=20.0, <21.0
     cached_property~=1.5
diff --git a/setup.py b/setup.py
index e6cad74..d5fcdb9 100644
--- a/setup.py
+++ b/setup.py
@@ -20,11 +20,10 @@
 import logging
 import os
 import subprocess
-import sys
 import unittest
 from os.path import dirname
 from textwrap import wrap
-from typing import Dict, Iterable, List
+from typing import Dict, List, Set, Tuple
 
 from setuptools import Command, Distribution, find_namespace_packages, setup
 
@@ -32,8 +31,6 @@ logger = logging.getLogger(__name__)
 
 version = '2.0.0'
 
-PY3 = sys.version_info[0] == 3
-
 my_dir = dirname(__file__)
 
 
@@ -215,15 +212,15 @@ datadog = [
 ]
 doc = [
     'sphinx>=2.1.2',
+    f'sphinx-airflow-theme{_SPHINX_AIRFLOW_THEME_URL}',
     'sphinx-argparse>=0.1.13',
     'sphinx-autoapi==1.0.0',
     'sphinx-copybutton',
     'sphinx-jinja~=1.1',
     'sphinx-rtd-theme>=0.1.6',
     'sphinxcontrib-httpdomain>=1.7.0',
-    "sphinxcontrib-redoc>=1.6.0",
-    "sphinxcontrib-spelling==5.2.1",
-    f"sphinx-airflow-theme{_SPHINX_AIRFLOW_THEME_URL}",
+    'sphinxcontrib-redoc>=1.6.0',
+    'sphinxcontrib-spelling==5.2.1',
 ]
 docker = [
     'docker~=3.0',
@@ -340,8 +337,8 @@ pagerduty = [
     'pdpyras>=4.1.2,<5',
 ]
 papermill = [
-    'papermill[all]>=1.2.1',
     'nteract-scrapbook[all]>=0.3.1',
+    'papermill[all]>=1.2.1',
 ]
 password = [
     'bcrypt>=2.0.0',
@@ -440,22 +437,6 @@ zendesk = [
 ]
 # End dependencies group
 
-all_dbs = (
-    cassandra
-    + cloudant
-    + druid
-    + exasol
-    + hdfs
-    + hive
-    + mongo
-    + mssql
-    + mysql
-    + pinot
-    + postgres
-    + presto
-    + vertica
-)
-
 ############################################################################################################
 # IMPORTANT NOTE!!!!!!!!!!!!!!!
 # IF you are removing dependencies from this list, please make sure that you also increase
@@ -481,6 +462,7 @@ devel = [
     'jira',
     'mongomock',
     'moto',
+    'mypy==0.770',
     'parameterized',
     'paramiko',
     'pipdeptree',
@@ -507,15 +489,9 @@ devel = [
 # DEPENDENCIES_EPOCH_NUMBER in the Dockerfile.ci
 ############################################################################################################
 
-if PY3:
-    devel += ['mypy==0.770']
-else:
-    devel += ['unittest2']
-
 devel_minreq = cgroups + devel + doc + kubernetes + mysql + password
 devel_hadoop = devel_minreq + hdfs + hive + kerberos + presto + webhdfs
 
-
 ############################################################################################################
 # IMPORTANT NOTE!!!!!!!!!!!!!!!
 # If you have a 'pip check' problem with dependencies, it might be because some dependency has been
@@ -526,133 +502,44 @@ devel_hadoop = devel_minreq + hdfs + hive + kerberos + presto + webhdfs
 # This should be done with appropriate comment explaining why the requirement was added.
 ############################################################################################################
 
-# Those are requirements that each provider package has
-PROVIDERS_REQUIREMENTS: Dict[str, Iterable[str]] = {
-    "amazon": amazon,
-    "apache.cassandra": cassandra,
-    "apache.druid": druid,
-    "apache.hdfs": hdfs,
-    "apache.hive": hive,
-    "apache.kylin": kylin,
-    "apache.livy": [],
-    "apache.pig": [],
-    "apache.pinot": pinot,
-    "apache.spark": spark,
-    "apache.sqoop": [],
-    "celery": celery,
-    "cloudant": cloudant,
-    "cncf.kubernetes": kubernetes,
-    "databricks": databricks,
-    "datadog": datadog,
-    "dingding": [],
-    "discord": [],
-    "docker": docker,
-    "elasticsearch": elasticsearch,
-    "exasol": exasol,
-    "facebook": facebook,
-    "ftp": [],
-    "google": google,
-    "grpc": grpc,
-    "hashicorp": hashicorp,
-    "http": [],
-    "imap": [],
-    "jdbc": jdbc,
-    "jenkins": jenkins,
-    "jira": jira,
-    "microsoft.azure": azure,
-    "microsoft.mssql": mssql,
-    "microsoft.winrm": winrm,
-    "mongo": mongo,
-    "mysql": mysql,
-    "odbc": odbc,
-    "openfaas": [],
-    "opsgenie": [],
-    "oracle": oracle,
-    "pagerduty": pagerduty,
-    "papermill": papermill,
-    "plexus": plexus,
-    "postgres": postgres,
-    "presto": presto,
-    "qubole": qubole,
-    "redis": redis,
-    "salesforce": salesforce,
-    "samba": samba,
-    "segment": segment,
-    "sendgrid": sendgrid,
-    "sftp": ssh,
-    "singularity": singularity,
-    "slack": slack,
-    "snowflake": snowflake,
-    "sqlite": [],
-    "ssh": ssh,
-    "telegram": telegram,
-    "vertica": vertica,
-    "yandex": yandex,
-    "zendesk": zendesk,
-}
 
-# Those are requirements that each extra has. For extras that match the providers
-# the requirements are identical as in the list above, but we have still a few aliases
-# that have different set of requirements.
-EXTRAS_REQUIREMENTS: Dict[str, List[str]] = {
-    'all_dbs': all_dbs,
+# Dict of all providers which are part of the Apache Airflow repository together with their requirements
+PROVIDERS_REQUIREMENTS: Dict[str, List[str]] = {
     'amazon': amazon,
-    'apache.atlas': atlas,
-    'apache.beam': apache_beam,
-    "apache.cassandra": cassandra,
-    "apache.druid": druid,
-    "apache.hdfs": hdfs,
-    "apache.hive": hive,
-    "apache.kylin": kylin,
-    "apache.livy": [],
-    "apache.pig": [],
-    "apache.pinot": pinot,
-    "apache.spark": spark,
-    "apache.sqoop": [],
-    "apache.webhdfs": webhdfs,
-    'async': async_packages,
-    'atlas': atlas,  # TODO: remove this in Airflow 3.0
-    'aws': amazon,  # TODO: remove this in Airflow 3.0
-    'azure': azure,  # TODO: remove this in Airflow 3.0
-    'cassandra': cassandra,  # TODO: remove this in Airflow 3.0
+    'apache.cassandra': cassandra,
+    'apache.druid': druid,
+    'apache.hdfs': hdfs,
+    'apache.hive': hive,
+    'apache.kylin': kylin,
+    'apache.livy': [],
+    'apache.pig': [],
+    'apache.pinot': pinot,
+    'apache.spark': spark,
+    'apache.sqoop': [],
     'celery': celery,
-    'cgroups': cgroups,
     'cloudant': cloudant,
     'cncf.kubernetes': kubernetes,
-    'crypto': [],  # TODO: remove this in Airflow 3.0
-    'dask': dask,
     'databricks': databricks,
     'datadog': datadog,
     'dingding': [],
     'discord': [],
     'docker': docker,
-    'druid': druid,  # TODO: remove this in Airflow 3.0
     'elasticsearch': elasticsearch,
     'exasol': exasol,
     'facebook': facebook,
     'ftp': [],
-    'gcp': google,  # TODO: remove this in Airflow 3.0
-    'gcp_api': google,  # TODO: remove this in Airflow 3.0
-    'github_enterprise': flask_oauth,
     'google': google,
-    'google_auth': flask_oauth,
     'grpc': grpc,
     'hashicorp': hashicorp,
-    'hdfs': hdfs,  # TODO: remove this in Airflow 3.0
-    'hive': hive,  # TODO: remove this in Airflow 3.0
     'http': [],
     'imap': [],
     'jdbc': jdbc,
-    'jenkins': [],
+    'jenkins': jenkins,
     'jira': jira,
-    'kerberos': kerberos,
-    'kubernetes': kubernetes,  # TODO: remove this in Airflow 3.0
-    'ldap': ldap,
-    "microsoft.azure": azure,
-    "microsoft.mssql": mssql,
-    "microsoft.winrm": winrm,
+    'microsoft.azure': azure,
+    'microsoft.mssql': mssql,
+    'microsoft.winrm': winrm,
     'mongo': mongo,
-    'mssql': mssql,  # TODO: remove this in Airflow 3.0
     'mysql': mysql,
     'odbc': odbc,
     'openfaas': [],
@@ -660,196 +547,157 @@ EXTRAS_REQUIREMENTS: Dict[str, List[str]] = {
     'oracle': oracle,
     'pagerduty': pagerduty,
     'papermill': papermill,
-    'password': password,
-    'pinot': pinot,  # TODO: remove this in Airflow 3.0
     'plexus': plexus,
     'postgres': postgres,
     'presto': presto,
-    'qds': qubole,  # TODO: remove this in Airflow 3.0
     'qubole': qubole,
-    'rabbitmq': rabbitmq,
     'redis': redis,
-    's3': amazon,  # TODO: remove this in Airflow 3.0
     'salesforce': salesforce,
     'samba': samba,
     'segment': segment,
     'sendgrid': sendgrid,
-    'sentry': sentry,
-    'sftp': [],
+    'sftp': ssh,
     'singularity': singularity,
     'slack': slack,
     'snowflake': snowflake,
-    'spark': spark,
     'sqlite': [],
     'ssh': ssh,
-    'statsd': statsd,
-    'tableau': tableau,
     'telegram': telegram,
     'vertica': vertica,
-    'virtualenv': virtualenv,
-    'webhdfs': webhdfs,  # TODO: remove this in Airflow 3.0
-    'winrm': winrm,  # TODO: remove this in Airflow 3.0
     'yandex': yandex,
-    'zendesk': [],
+    'zendesk': zendesk,
 }
 
-# Those are airflow providers added for the extras in many cases extra = provider
-# But for aliases and some special aliases (like all_dbs) the list might be longer.
-EXTRAS_PROVIDERS_PACKAGES: Dict[str, Iterable[str]] = {
-    'all': list(PROVIDERS_REQUIREMENTS.keys()),
-    # this is not 100% accurate with devel_ci and devel_all definition, but we really want
-    # to have all providers when devel_ci extra is installed!
-    'devel_ci': list(PROVIDERS_REQUIREMENTS.keys()),
-    'devel_all': list(PROVIDERS_REQUIREMENTS.keys()),
-    'all_dbs': [
-        "apache.cassandra",
-        "apache.druid",
-        "apache.hdfs",
-        "apache.hive",
-        "apache.pinot",
-        "cloudant",
-        "exasol",
-        "mongo",
-        "microsoft.mssql",
-        "mysql",
-        "postgres",
-        "presto",
-        "vertica",
-    ],
-    'amazon': ["amazon"],
-    'apache.atlas': [],
-    'apache.beam': [],
-    "apache.cassandra": ["apache.cassandra"],
-    "apache.druid": ["apache.druid"],
-    "apache.hdfs": ["apache.hdfs"],
-    "apache.hive": ["apache.hive"],
-    "apache.kylin": ["apache.kylin"],
-    "apache.livy": ["apache.livy"],
-    "apache.pig": ["apache.pig"],
-    "apache.pinot": ["apache.pinot"],
-    "apache.spark": ["apache.spark"],
-    "apache.sqoop": ["apache.sqoop"],
-    "apache.webhdfs": ["apache.hdfs"],
-    'async': [],
-    'atlas': [],  # TODO: remove this in Airflow 3.0
-    'aws': ["amazon"],  # TODO: remove this in Airflow 3.0
-    'azure': ["microsoft.azure"],  # TODO: remove this in Airflow 3.0
-    'cassandra': ["apache.cassandra"],  # TODO: remove this in Airflow 3.0
-    'celery': ["celery"],
-    'cgroups': [],
-    'cloudant': ["cloudant"],
-    'cncf.kubernetes': ["cncf.kubernetes"],
-    'crypto': [],  # TODO: remove this in Airflow 3.0
-    'dask': [],
-    'databricks': ["databricks"],
-    'datadog': ["datadog"],
-    'devel': ["cncf.kubernetes", "mysql"],
-    'devel_hadoop': ["apache.hdfs", "apache.hive", "presto"],
-    'dingding': ["dingding"],
-    'discord': ["discord"],
-    'doc': [],
-    'docker': ["docker"],
-    'druid': ["apache.druid"],  # TODO: remove this in Airflow 3.0
-    'elasticsearch': ["elasticsearch"],
-    'exasol': ["exasol"],
-    'facebook': ["facebook"],
-    'ftp': ["ftp"],
-    'gcp': ["google"],  # TODO: remove this in Airflow 3.0
-    'gcp_api': ["google"],  # TODO: remove this in Airflow 3.0
-    'github_enterprise': [],
-    'google': ["google"],
-    'google_auth': [],
-    'grpc': ["grpc"],
-    'hashicorp': ["hashicorp"],
-    'hdfs': ["apache.hdfs"],  # TODO: remove this in Airflow 3.0
-    'hive': ["apache.hive"],  # TODO: remove this in Airflow 3.0
-    'http': ["http"],
-    'imap': ["imap"],
-    'jdbc': ["jdbc"],
-    'jenkins': ["jenkins"],
-    'jira': ["jira"],
-    'kerberos': [],
-    'kubernetes': ["cncf.kubernetes"],  # TODO: remove this in Airflow 3.0
-    'ldap': [],
-    "microsoft.azure": ["microsoft.azure"],
-    "microsoft.mssql": ["microsoft.mssql"],
-    "microsoft.winrm": ["microsoft.winrm"],
-    'mongo': ["mongo"],
-    'mssql': ["microsoft.mssql"],  # TODO: remove this in Airflow 3.0
-    'mysql': ["mysql"],
-    'odbc': ["odbc"],
-    'openfaas': ["openfaas"],
-    'opsgenie': ["opsgenie"],
-    'oracle': ["oracle"],
-    'pagerduty': ["pagerduty"],
-    'papermill': ["papermill"],
-    'password': [],
-    'pinot': ["apache.pinot"],  # TODO: remove this in Airflow 3.0
-    'plexus': ["plexus"],
-    'postgres': ["postgres"],
-    'presto': ["presto"],
-    'qds': ["qubole"],  # TODO: remove this in Airflow 3.0
-    'qubole': ["qubole"],
-    'rabbitmq': [],
-    'redis': ["redis"],
-    's3': ["amazon"],  # TODO: remove this in Airflow 3.0
-    'salesforce': ["salesforce"],
-    'samba': ["samba"],
-    'segment': ["segment"],
-    'sendgrid': ["sendgrid"],
-    'sentry': [],
-    'sftp': ["sftp"],
-    'singularity': ["singularity"],
-    'slack': ["slack"],
-    'snowflake': ["snowflake"],
-    'spark': ["apache.spark"],
-    'sqlite': ["sqlite"],
-    'ssh': ["ssh"],
-    'statsd': [],
-    'tableau': [],
-    'telegram': ["telegram"],
-    'vertica': ["vertica"],
-    'virtualenv': [],
-    'webhdfs': ["apache.hdfs"],  # TODO: remove this in Airflow 3.0
-    'winrm': ["microsoft.winrm"],  # TODO: remove this in Airflow 3.0
-    'yandex': ["yandex"],
-    'zendesk': ["zendesk"],
+
+# Those are all extras which do not have own 'providers'
+EXTRAS_REQUIREMENTS: Dict[str, List[str]] = {
+    'apache.atlas': atlas,
+    'apache.beam': apache_beam,
+    'apache.webhdfs': webhdfs,
+    'async': async_packages,
+    'cgroups': cgroups,
+    'dask': dask,
+    'github_enterprise': flask_oauth,
+    'google_auth': flask_oauth,
+    'kerberos': kerberos,
+    'ldap': ldap,
+    'password': password,
+    'rabbitmq': rabbitmq,
+    'sentry': sentry,
+    'statsd': statsd,
+    'tableau': tableau,
+    'virtualenv': virtualenv,
 }
 
+# Add extras for all providers. For all providers the extras name = providers name
+for provider_name, provider_requirement in PROVIDERS_REQUIREMENTS.items():
+    EXTRAS_REQUIREMENTS[provider_name] = provider_requirement
+
+#############################################################################################################
+#  The whole section can be removed in Airflow 3.0 as those old aliases are deprecated in 2.* series
+#############################################################################################################
+
+# Dictionary of aliases from 1.10 - deprecated in Airflow 2.*
+EXTRAS_DEPRECATED_ALIASES: Dict[str, str] = {
+    'atlas': 'apache.atlas',
+    'aws': 'amazon',
+    'azure': 'microsoft.azure',
+    'cassandra': 'apache.cassandra',
+    'crypto': '',  # All crypto requirements are installation requirements of core Airflow
+    'druid': 'apache.druid',
+    'gcp': 'google',
+    'gcp_api': 'google',
+    'hdfs': 'apache.hdfs',
+    'hive': 'apache.hive',
+    'kubernetes': 'cncf.kubernetes',
+    'mssql': 'microsoft.mssql',
+    'pinot': 'apache.pinot',
+    'qds': 'qubole',
+    's3': 'amazon',
+    'spark': 'apache.spark',
+    'webhdfs': 'apache.webhdfs',
+    'winrm': 'microsoft.winrm',
+}
 
-# Those are all "users" extras (no devel extras)
-all_ = list(
-    set(
-        [req for req_list in EXTRAS_REQUIREMENTS.values() for req in req_list]
-        + [req for req_list in PROVIDERS_REQUIREMENTS.values() for req in req_list]
-    )
-)
 
-# Those are special extras
-EXTRAS_REQUIREMENTS.update(
-    {
-        'all': all_,
-        'devel': devel_minreq,  # includes doc
-        'devel_hadoop': devel_hadoop,  # includes devel_minreq
-        'doc': doc,
-    }
-)
-# This can be simplify to devel_hadoop + all_ due to inclusions
-# but we keep it for explicit sake
-devel_all = list(set(all_ + doc + devel_minreq + devel_hadoop))
+def find_requirements_for_alias(alias_to_look_for: Tuple[str, str]) -> List[str]:
+    """Finds requirements for an alias"""
+    deprecated_extra = alias_to_look_for[0]
+    new_extra = alias_to_look_for[1]
+    if new_extra == '':  # Handle case for crypto
+        return []
+    try:
+        return EXTRAS_REQUIREMENTS[new_extra]
+    except KeyError:  # noqa
+        raise Exception(f"The extra {new_extra} is missing for alias {deprecated_extra}")
+
+
+# Add extras for all deprecated aliases. Requirements for those deprecated aliases are the same
+# as the extras they are replaced with
+for alias, extra in EXTRAS_DEPRECATED_ALIASES.items():
+    requirements = EXTRAS_REQUIREMENTS.get(extra) if extra != '' else []
+    if requirements is None:
+        raise Exception(f"The extra {extra} is missing for deprecated alias {alias}")
+    # Note the requirements are not copies - those are the same lists as for the new extras. This is intended.
+    # Thanks to that if the original extras are later extended with providers, aliases are extended as well.
+    EXTRAS_REQUIREMENTS[alias] = requirements
+
+#############################################################################################################
+#  End of deprecated section
+#############################################################################################################
+
+# This is list of all providers. It's a shortcut for anyone who would like to easily get list of
+# All providers. It is used by pre-commits.
+ALL_PROVIDERS = list(PROVIDERS_REQUIREMENTS.keys())
+
+ALL_DB_PROVIDERS = [
+    'apache.cassandra',
+    'apache.druid',
+    'apache.hdfs',
+    'apache.hive',
+    'apache.pinot',
+    'cloudant',
+    'exasol',
+    'microsoft.mssql',
+    'mongo',
+    'mysql',
+    'postgres',
+    'presto',
+    'vertica',
+]
+
+# Special requirements for all database-related providers. They are de-duplicated.
+all_dbs = list({req for db_provider in ALL_DB_PROVIDERS for req in PROVIDERS_REQUIREMENTS[db_provider]})
+
+# Requirements for all "user" extras (no devel). They are de-duplicated. Note that we do not need
+# to separately add providers requirements - they have been already added as 'providers' extras above
+_all_requirements = list({req for extras_reqs in EXTRAS_REQUIREMENTS.values() for req in extras_reqs})
+
+# All user extras here
+EXTRAS_REQUIREMENTS["all"] = _all_requirements
+
+# All db user extras here
+EXTRAS_REQUIREMENTS["all_dbs"] = all_dbs
+
+# This can be simplified to devel_hadoop + _all_requirements due to inclusions
+# but we keep it for explicit sake. We are de-duplicating it anyway.
+devel_all = list(set(_all_requirements + doc + devel_minreq + devel_hadoop))
 
 # Those are packages excluded for "all" dependencies
 PACKAGES_EXCLUDED_FOR_ALL = []
-
-if PY3:
-    PACKAGES_EXCLUDED_FOR_ALL.extend(
-        [
-            'snakebite',
-        ]
-    )
+PACKAGES_EXCLUDED_FOR_ALL.extend(
+    [
+        'snakebite',
+    ]
+)
 
 # Those packages are excluded because they break tests (downgrading mock) and they are
-# not needed to run our test suite.
+# not needed to run our test suite. This can be removed as soon as we get non-conflicting
+# requirements for the apache-beam as well. This waits for azure + snowflake fixes:
+#
+# * Azure: https://github.com/apache/airflow/issues/11968
+# * Snowflake: https://github.com/apache/airflow/issues/12881
+#
 PACKAGES_EXCLUDED_FOR_CI = [
     'apache-beam',
 ]
@@ -880,16 +728,50 @@ devel_ci = [
     )
 ]
 
-# Those are development requirements that install all useful devel tools
-EXTRAS_REQUIREMENTS.update(
-    {
-        'devel_all': devel_all,
-        'devel_ci': devel_ci,
-    }
-)
+
+# Those are extras that we have to add for development purposes
+# They can be use to install some predefined set of dependencies.
+EXTRAS_REQUIREMENTS["doc"] = doc
+EXTRAS_REQUIREMENTS["devel"] = devel_minreq  # devel_minreq already includes doc
+EXTRAS_REQUIREMENTS["devel_hadoop"] = devel_hadoop  # devel_hadoop already includes devel_minreq
+EXTRAS_REQUIREMENTS["devel_all"] = devel_all
+EXTRAS_REQUIREMENTS["devel_ci"] = devel_ci
+
+# For Python 3.6+ the dictionary order remains when keys() are retrieved.
+# Sort both: extras and list of dependencies to make it easier to analyse problems
+# external packages will be first, then if providers are added they are added at the end of the lists.
+EXTRAS_REQUIREMENTS = dict(sorted(EXTRAS_REQUIREMENTS.items()))  # noqa
+for extra_list in EXTRAS_REQUIREMENTS.values():
+    extra_list.sort()
+
+# A set that keeps all extras that install some providers.
+# It is used by pre-commit that verifies if documentation in docs/apache-airflow/extra-packages-ref.rst
+# are synchronized.
+EXTRAS_WITH_PROVIDERS: Set[str] = set()
+
+# Those providers are pre-installed always when airflow is installed.
+# Those providers do not have dependency on airflow2.0 because that would lead to circular dependencies.
+# This is not a problem for PIP but some tools (pipdeptree) show that as a warning.
+PREINSTALLED_PROVIDERS = [
+    'ftp',
+    'http',
+    'imap',
+    'sqlite',
+]
+
+
+def get_provider_package_from_package_id(package_id: str):
+    """
+    Builds the name of provider package out of the package id provided/
+
+    :param package_id: id of the package (like amazon or microsoft.azure)
+    :return: full name of package in PyPI
+    """
+    package_suffix = package_id.replace(".", "-")
+    return f"apache-airflow-providers-{package_suffix}"
 
 
-class AirflowDistribtuion(Distribution):
+class AirflowDistribution(Distribution):
     """setuptools.Distribution subclass with Airflow specific behaviour"""
 
     # https://github.com/PyCQA/pylint/issues/3737
@@ -900,20 +782,42 @@ class AirflowDistribtuion(Distribution):
         """
         super().parse_config_files(*args, **kwargs)
         if os.getenv('INSTALL_PROVIDERS_FROM_SOURCES') == 'true':
-            self.install_requires = [  # pylint: disable=attribute-defined-outside-init
+            self.install_requires = [  # noqa  pylint: disable=attribute-defined-outside-init
                 req for req in self.install_requires if not req.startswith('apache-airflow-providers-')
             ]
+        else:
+            self.install_requires.extend(
+                [get_provider_package_from_package_id(package_id) for package_id in PREINSTALLED_PROVIDERS]
+            )
 
 
-def get_provider_package_from_package_id(package_id: str):
+def add_provider_packages_to_requirements(extra_with_providers: str, providers: List[str]):
     """
-    Builds the name of provider package out of the package id provided/
+    Adds provider packages to requirements
 
-    :param package_id: id of the package (like amazon or microsoft.azure)
-    :return: full name of package in PyPI
+    :param extra_with_providers: Name of the extra to add providers to
+    :param providers: list of provider names
     """
-    package_suffix = package_id.replace(".", "-")
-    return f"apache-airflow-providers-{package_suffix}"
+    EXTRAS_WITH_PROVIDERS.add(extra_with_providers)
+    EXTRAS_REQUIREMENTS[extra_with_providers].extend(
+        [get_provider_package_from_package_id(package_name) for package_name in providers]
+    )
+
+
+def add_all_provider_packages():
+    """
+    In case of regular installation (when INSTALL_PROVIDERS_FROM_SOURCES is false), we should
+    add extra dependencies to Airflow - to get the providers automatically installed when
+    those extras are installed.
+
+    """
+    for provider in ALL_PROVIDERS:
+        add_provider_packages_to_requirements(provider, [provider])
+    add_provider_packages_to_requirements("all", ALL_PROVIDERS)
+    add_provider_packages_to_requirements("devel_ci", ALL_PROVIDERS)
+    add_provider_packages_to_requirements("devel_all", ALL_PROVIDERS)
+    add_provider_packages_to_requirements("all_dbs", ALL_DB_PROVIDERS)
+    add_provider_packages_to_requirements("devel_hadoop", ["apache.hdfs", "apache.hive", "presto"])
 
 
 def do_setup():
@@ -925,14 +829,10 @@ def do_setup():
         # setup.cfg control this (kwargs in setup() call take priority)
         setup_kwargs['packages'] = find_namespace_packages(include=['airflow*'])
     else:
-        for key, value in EXTRAS_PROVIDERS_PACKAGES.items():
-            EXTRAS_REQUIREMENTS[key].extend(
-                [get_provider_package_from_package_id(package_name) for package_name in value]
-            )
-
+        add_all_provider_packages()
     write_version()
     setup(
-        distclass=AirflowDistribtuion,
+        distclass=AirflowDistribution,
         # Most values come from setup.cfg -- see
         # https://setuptools.readthedocs.io/en/latest/userguide/declarative_config.html
         version=version,
diff --git a/tests/build_provider_packages_dependencies.py b/tests/build_provider_packages_dependencies.py
index a9584f8..1e7bdbc 100644
--- a/tests/build_provider_packages_dependencies.py
+++ b/tests/build_provider_packages_dependencies.py
@@ -45,7 +45,7 @@ dependencies: Dict[str, List[str]] = defaultdict(list)
 def find_provider(provider_elements: List[str]) -> Optional[str]:
     """
     Finds provider name from the list of elements provided. It looks the providers up
-    in PROVIDERS_DEPENDENCIES map taken from the provider's package setup.
+    in PROVIDERS_REQUIREMENTS dict taken from the setup.py.
 
     :param provider_elements: array of elements of the path (split)
     :return: provider name or None if no provider could be found
@@ -182,7 +182,7 @@ def parse_arguments():
         description='Checks if dependencies between packages are handled correctly.'
     )
     parser.add_argument(
-        "-f", "--provider-dependencies-file", help="Stores dependencies between providers in the file"
+        "-f", "--provider-dependencies-file", help="Stores dependencies between providers in the file(.json)"
     )
     parser.add_argument(
         "-d", "--documentation-file", help="Updates package documentation in the file specified (.rst)"