You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2021/02/27 12:28:44 UTC

[airflow] branch master updated: Fix spellings (#14483)

This is an automated email from the ASF dual-hosted git repository.

kaxilnaik pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new 50a1504  Fix spellings (#14483)
50a1504 is described below

commit 50a1504c524257e53eff06589b71973cfab5bf54
Author: John Bampton <jb...@users.noreply.github.com>
AuthorDate: Sat Feb 27 22:28:33 2021 +1000

    Fix spellings (#14483)
---
 BREEZE.rst                                             |  4 ++--
 CONTRIBUTING.rst                                       |  2 +-
 Dockerfile                                             |  2 +-
 IMAGES.rst                                             |  2 +-
 UPDATING.md                                            |  4 ++--
 airflow/cli/commands/webserver_command.py              |  2 +-
 airflow/executors/celery_executor.py                   |  2 +-
 airflow/models/dag.py                                  |  2 +-
 airflow/models/dagbag.py                               |  4 ++--
 airflow/models/dagrun.py                               |  4 ++--
 .../google/cloud/example_dags/example_life_sciences.py |  4 ++--
 airflow/providers/google/cloud/operators/compute.py    |  2 +-
 airflow/providers/google/cloud/transfers/sql_to_gcs.py |  2 +-
 .../google/common/utils/id_token_credentials.py        |  4 ++--
 airflow/settings.py                                    |  2 +-
 airflow/utils/db.py                                    |  2 +-
 airflow/www/static/js/datetime_utils.js                |  2 +-
 airflow/www/static/js/gantt_chart_d3v2.js              |  6 +++---
 breeze                                                 |  4 ++--
 ...ws-secrets-manaager.rst => aws-secrets-manager.rst} |  0
 docs/exts/docs_build/docs_builder.py                   |  4 ++--
 docs/exts/docs_build/github_action_utils.py            |  2 +-
 scripts/ci/libraries/_push_pull_remove_images.sh       |  2 +-
 tests/api/common/experimental/test_mark_tasks.py       |  2 +-
 tests/api_connexion/test_error_handling.py             |  2 +-
 tests/cli/commands/test_task_command.py                |  8 ++++----
 tests/jobs/test_local_task_job.py                      |  6 +++---
 tests/jobs/test_scheduler_job.py                       |  2 +-
 tests/kubernetes/test_pod_generator.py                 |  2 +-
 tests/models/test_dag.py                               |  4 ++--
 tests/models/test_dagcode.py                           |  2 +-
 tests/operators/test_python.py                         |  2 +-
 tests/plugins/test_plugin.py                           |  2 +-
 .../providers/amazon/aws/hooks/test_lambda_function.py |  2 +-
 .../providers/apache/spark/hooks/test_spark_submit.py  |  4 ++--
 .../apache/spark/operators/test_spark_submit.py        |  6 +++---
 tests/providers/google/cloud/hooks/test_compute_ssh.py |  2 +-
 tests/providers/google/cloud/hooks/test_dataflow.py    | 14 +++++++-------
 tests/providers/google/cloud/hooks/test_gcs.py         |  8 ++++----
 .../google/cloud/hooks/test_speech_to_text.py          |  2 +-
 .../google/cloud/log/test_stackdriver_task_handler.py  |  8 ++++----
 .../providers/google/cloud/operators/test_dataflow.py  |  2 +-
 .../google/cloud/operators/test_datafusion.py          | 18 +++++++++---------
 .../google/cloud/operators/test_natural_language.py    |  4 ++--
 tests/providers/google/cloud/sensors/test_dataflow.py  |  2 +-
 .../marketing_platform/hooks/test_display_video.py     |  2 +-
 .../marketing_platform/operators/test_display_video.py |  2 +-
 tests/providers/google/suite/hooks/test_sheets.py      |  2 +-
 tests/providers/qubole/sensors/test_qubole.py          |  2 +-
 tests/providers/salesforce/hooks/test_salesforce.py    |  2 +-
 tests/providers/sendgrid/utils/test_emailer.py         |  8 ++++----
 .../snowflake/transfers/test_snowflake_to_slack.py     |  2 +-
 tests/sensors/test_smart_sensor_operator.py            |  8 ++++----
 tests/test_utils/perf/sql_queries.py                   |  2 +-
 tests/ti_deps/deps/test_runnable_exec_date_dep.py      |  2 +-
 55 files changed, 100 insertions(+), 100 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index dc6f62e..5a5d762 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -333,7 +333,7 @@ Managing CI environment:
     * Generate constraints with ``breeze generate-constraints``
     * Execute arbitrary command in the test environment with ``breeze shell`` command
     * Execute arbitrary docker-compose command with ``breeze docker-compose`` command
-    * Push docker images with ``breeze push-image`` command (require committer's rights to push images)
+    * Push docker images with ``breeze push-image`` command (require committers rights to push images)
 
 You can optionally reset the Airflow metada database if specified as extra ``--db-reset`` flag and for CI image
 you can also start integrations (separate Docker images) if specified as extra ``--integration`` flags. You can also
@@ -356,7 +356,7 @@ Managing Prod environment (with ``--production-image`` flag):
     * Restart running interactive environment with ``breeze restart`` command
     * Execute arbitrary command in the test environment with ``breeze shell`` command
     * Execute arbitrary docker-compose command with ``breeze docker-compose`` command
-    * Push docker images with ``breeze push-image`` command (require committer's rights to push images)
+    * Push docker images with ``breeze push-image`` command (require committers rights to push images)
 
 You can optionally reset database if specified as extra ``--db-reset`` flag. You can also
 chose which backend database should be used with ``--backend`` flag and python version with ``--python`` flag.
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 9c3ade7..44b1eb5 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -983,7 +983,7 @@ If this function is designed to be called by "end-users" (i.e. DAG authors) then
       ...
       # You SHOULD not commit the session here. The wrapper will take care of commit()/rollback() if exception
 
-Don't use time() for duration calcuations
+Don't use time() for duration calculations
 -----------------------------------------
 
 If you wish to compute the time difference between two events with in the same process, use
diff --git a/Dockerfile b/Dockerfile
index e8d50c6..807f4bf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -256,7 +256,7 @@ ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD}
 ARG AIRFLOW_VERSION_SPECIFICATION=""
 ENV AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION}
 
-# We can seet this value to true in case we want to install .whl .tar.gz packages placed in the
+# We can set this value to true in case we want to install .whl .tar.gz packages placed in the
 # docker-context-files folder. This can be done for both - additional packages you want to install
 # and for airflow as well (you have to set INSTALL_FROM_PYPI to false in this case)
 ARG INSTALL_FROM_DOCKER_CONTEXT_FILES=""
diff --git a/IMAGES.rst b/IMAGES.rst
index 3bdb787..4c3d35f 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -228,7 +228,7 @@ Choosing image registry
 =======================
 
 By default images are pulled and pushed from and to DockerHub registry when you use Breeze's push-image
-or build commands. But as described in `CI Documentaton <CI.rst>`_, you can choose different image
+or build commands. But as described in `CI Documentation <CI.rst>`_, you can choose different image
 registry by setting ``GITHUB_REGISTRY`` to ``docker.pkg.github.com`` for Github Package Registry or
 ``ghcr.io`` for GitHub Container Registry.
 
diff --git a/UPDATING.md b/UPDATING.md
index 1219f03..77bad87 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -780,7 +780,7 @@ In previous versions, the `LatestOnlyOperator` forcefully skipped all (direct an
 
 No change is needed if only the default trigger rule `all_success` is being used.
 
-If the DAG relies on tasks with other trigger rules (i.e. `all_done`) being skipped by the `LatestOnlyOperator`, adjustments to the DAG need to be made to commodate the change in behaviour, i.e. with additional edges from the `LatestOnlyOperator`.
+If the DAG relies on tasks with other trigger rules (i.e. `all_done`) being skipped by the `LatestOnlyOperator`, adjustments to the DAG need to be made to accommodate the change in behaviour, i.e. with additional edges from the `LatestOnlyOperator`.
 
 The goal of this change is to achieve a more consistent and configurale cascading behaviour based on the `BaseBranchOperator` (see [AIRFLOW-2923](https://jira.apache.org/jira/browse/AIRFLOW-2923) and [AIRFLOW-1784](https://jira.apache.org/jira/browse/AIRFLOW-1784)).
 
@@ -1662,7 +1662,7 @@ ImapHook:
 #### `airflow.providers.http.hooks.http.HttpHook`
 
 The HTTPHook is now secured by default: `verify=True` (before: `verify=False`)
-This can be overwriten by using the extra_options param as `{'verify': False}`.
+This can be overwritten by using the extra_options param as `{'verify': False}`.
 
 #### `airflow.providers.cloudant.hooks.cloudant.CloudantHook`
 
diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py
index bf60c1a..c7a9c8b 100644
--- a/airflow/cli/commands/webserver_command.py
+++ b/airflow/cli/commands/webserver_command.py
@@ -188,7 +188,7 @@ class GunicornMonitor(LoggingMixin):
 
     def _reload_gunicorn(self) -> None:
         """
-        Send signal to reload the gunciron configuration. When gunciorn receive signals, it reload the
+        Send signal to reload the gunicorn configuration. When gunicorn receive signals, it reload the
         configuration, start the new worker processes with a new configuration and gracefully
         shutdown older workers.
         """
diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py
index 51cac04..7927dbd 100644
--- a/airflow/executors/celery_executor.py
+++ b/airflow/executors/celery_executor.py
@@ -363,7 +363,7 @@ class CeleryExecutor(BaseExecutor):
             # If the task gets updated to STARTED (which Celery does) or has
             # already finished, then it will be removed from this list -- so
             # the only time it's still in this list is when it a) never made it
-            # to celery in the first place (i.e. race condition somehwere in
+            # to celery in the first place (i.e. race condition somewhere in
             # the dying executor) or b) a really long celery queue and it just
             # hasn't started yet -- better cancel it and let the scheduler
             # re-queue rather than have this task risk stalling for ever
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index 15332f3..4af6ec7 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -1400,7 +1400,7 @@ class DAG(LoggingMixin):
         return count
 
     def __deepcopy__(self, memo):
-        # Swiwtcharoo to go around deepcopying objects coming through the
+        # Switcharoo to go around deepcopying objects coming through the
         # backdoor
         cls = self.__class__
         result = cls.__new__(cls)
diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py
index b8028de..e5f986a 100644
--- a/airflow/models/dagbag.py
+++ b/airflow/models/dagbag.py
@@ -525,7 +525,7 @@ class DagBag(LoggingMixin):
         from airflow.models.dag import DAG
         from airflow.models.serialized_dag import SerializedDagModel
 
-        def _serialze_dag_capturing_errors(dag, session):
+        def _serialize_dag_capturing_errors(dag, session):
             """
             Try to serialize the dag to the DB, but make a note of any errors.
 
@@ -561,7 +561,7 @@ class DagBag(LoggingMixin):
                 try:
                     # Write Serialized DAGs to DB, capturing errors
                     for dag in self.dags.values():
-                        serialize_errors.extend(_serialze_dag_capturing_errors(dag, session))
+                        serialize_errors.extend(_serialize_dag_capturing_errors(dag, session))
 
                     DAG.bulk_write_to_db(self.dags.values(), session=session)
                 except OperationalError:
diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py
index fae58e1..514a0ce 100644
--- a/airflow/models/dagrun.py
+++ b/airflow/models/dagrun.py
@@ -440,7 +440,7 @@ class DagRun(Base, LoggingMixin):
                     msg='task_failure',
                 )
 
-        # if all leafs succeeded and no unfinished tasks, the run succeeded
+        # if all leaves succeeded and no unfinished tasks, the run succeeded
         elif not unfinished_tasks and all(leaf_ti.state in State.success_states for leaf_ti in leaf_tis):
             self.log.info('Marking run %s successful', self)
             self.set_state(State.SUCCESS)
@@ -592,7 +592,7 @@ class DagRun(Base, LoggingMixin):
             dag = self.get_dag()
 
             if not self.dag.schedule_interval or self.dag.schedule_interval == "@once":
-                # We can't emit this metric if there is no following schedule to cacluate from!
+                # We can't emit this metric if there is no following schedule to calculate from!
                 return
 
             ordered_tis_by_start_date = [ti for ti in finished_tis if ti.start_date]
diff --git a/airflow/providers/google/cloud/example_dags/example_life_sciences.py b/airflow/providers/google/cloud/example_dags/example_life_sciences.py
index f17fa4a..c05e6c4 100644
--- a/airflow/providers/google/cloud/example_dags/example_life_sciences.py
+++ b/airflow/providers/google/cloud/example_dags/example_life_sciences.py
@@ -29,7 +29,7 @@ LOCATION = os.environ.get("GCP_LIFE_SCIENCES_LOCATION", 'us-central1')
 
 
 # [START howto_configure_simple_action_pipeline]
-SIMPLE_ACTION_PIEPELINE = {
+SIMPLE_ACTION_PIPELINE = {
     "pipeline": {
         "actions": [
             {"imageUri": "bash", "commands": ["-c", "echo Hello, world"]},
@@ -83,7 +83,7 @@ with models.DAG(
     # [START howto_run_pipeline]
     simple_life_science_action_pipeline = LifeSciencesRunPipelineOperator(
         task_id='simple-action-pipeline',
-        body=SIMPLE_ACTION_PIEPELINE,
+        body=SIMPLE_ACTION_PIPELINE,
         project_id=PROJECT_ID,
         location=LOCATION,
     )
diff --git a/airflow/providers/google/cloud/operators/compute.py b/airflow/providers/google/cloud/operators/compute.py
index 3610837..acfb2ad 100644
--- a/airflow/providers/google/cloud/operators/compute.py
+++ b/airflow/providers/google/cloud/operators/compute.py
@@ -355,7 +355,7 @@ GCE_INSTANCE_TEMPLATE_VALIDATION_PATCH_SPECIFICATION = [
                     dict(name="onHostMaintenance", optional=True),
                     dict(name="automaticRestart", optional=True),
                     dict(name="preemptible", optional=True),
-                    dict(name="nodeAffinitites", optional=True),  # not validating deeper
+                    dict(name="nodeAffinities", optional=True),  # not validating deeper
                 ],
             ),
             dict(name="labels", optional=True),
diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
index 2abda71..ec77112 100644
--- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
@@ -263,7 +263,7 @@ class BaseSQLToGCSOperator(BaseOperator):
 
     def _convert_parquet_schema(self, cursor):
         type_map = {
-            'INTERGER': pa.int64(),
+            'INTEGER': pa.int64(),
             'FLOAT': pa.float64(),
             'NUMERIC': pa.float64(),
             'BIGNUMERIC': pa.float64(),
diff --git a/airflow/providers/google/common/utils/id_token_credentials.py b/airflow/providers/google/common/utils/id_token_credentials.py
index d31fe2c..c14509e 100644
--- a/airflow/providers/google/common/utils/id_token_credentials.py
+++ b/airflow/providers/google/common/utils/id_token_credentials.py
@@ -205,8 +205,8 @@ def get_default_id_token_credentials(
 if __name__ == "__main__":
     from google.auth.transport import requests
 
-    request_adaapter = requests.Request()
+    request_adapter = requests.Request()
 
     creds = get_default_id_token_credentials(target_audience=None)
-    creds.refresh(request=request_adaapter)
+    creds.refresh(request=request_adapter)
     print(creds.token)
diff --git a/airflow/settings.py b/airflow/settings.py
index f4e67e5..bf68020 100644
--- a/airflow/settings.py
+++ b/airflow/settings.py
@@ -435,7 +435,7 @@ def initialize():
     configure_orm()
     configure_action_logging()
 
-    # Ensure we close DB connections at scheduler and gunicon worker terminations
+    # Ensure we close DB connections at scheduler and gunicorn worker terminations
     atexit.register(dispose_orm)
 
 
diff --git a/airflow/utils/db.py b/airflow/utils/db.py
index 20b4b0b..b3da06f 100644
--- a/airflow/utils/db.py
+++ b/airflow/utils/db.py
@@ -256,7 +256,7 @@ def create_default_connections(session=None):
             conn_id="facebook_default",
             conn_type="facebook_social",
             extra="""
-                {   "account_id": "<AD_ACCOUNNT_ID>",
+                {   "account_id": "<AD_ACCOUNT_ID>",
                     "app_id": "<FACEBOOK_APP_ID>",
                     "app_secret": "<FACEBOOK_APP_SECRET>",
                     "access_token": "<FACEBOOK_AD_ACCESS_TOKEN>"
diff --git a/airflow/www/static/js/datetime_utils.js b/airflow/www/static/js/datetime_utils.js
index 3d460bf..e63c7ae 100644
--- a/airflow/www/static/js/datetime_utils.js
+++ b/airflow/www/static/js/datetime_utils.js
@@ -76,7 +76,7 @@ export function updateAllDateTimes() {
     const dt = moment($el.attr('datetime'));
     $el.text(dt.format(defaultFormat));
     if ($el.attr('title') !== undefined) {
-      // If displayed date is not UTC, have the UTC date in a title attriubte
+      // If displayed date is not UTC, have the UTC date in a title attribute
       $el.attr('title', dt.isUTC() ? '' : `UTC: ${dt.clone().utc().format()}`);
     }
   });
diff --git a/airflow/www/static/js/gantt_chart_d3v2.js b/airflow/www/static/js/gantt_chart_d3v2.js
index 9e7839a..8dc191f 100644
--- a/airflow/www/static/js/gantt_chart_d3v2.js
+++ b/airflow/www/static/js/gantt_chart_d3v2.js
@@ -119,7 +119,7 @@ d3.gantt = function() {
     return "translate(" + (x(d.start_date.valueOf()) + yAxisLeftOffset) + "," + y(d.task_id) + ")";
   };
 
-  function tickFormater(d) {
+  function tickFormatter(d) {
     // We can't use d3.time.format as that uses local time, so instead we use
     // moment as that handles our "global" timezone.
     return moment(d).strftime(tickFormat);
@@ -129,7 +129,7 @@ d3.gantt = function() {
 
   var y = d3.scale.ordinal().domain(taskTypes).rangeRoundBands([ 0, height - margin.top - margin.bottom ], .1);
 
-  var xAxis = d3.svg.axis().scale(x).orient("bottom").tickFormat(tickFormater).tickSubdivide(true)
+  var xAxis = d3.svg.axis().scale(x).orient("bottom").tickFormat(tickFormatter).tickSubdivide(true)
   .tickSize(8).tickPadding(8);
 
   var yAxis = d3.svg.axis().scale(y).orient("left").tickSize(0);
@@ -157,7 +157,7 @@ d3.gantt = function() {
   var initAxis = function() {
     x = d3.time.scale().domain([ timeDomainStart, timeDomainEnd ]).range([ 0, width-yAxisLeftOffset ]).clamp(true);
     y = d3.scale.ordinal().domain(taskTypes).rangeRoundBands([ 0, height - margin.top - margin.bottom ], .1);
-    xAxis = d3.svg.axis().scale(x).orient("bottom").tickFormat(tickFormater).tickSubdivide(true)
+    xAxis = d3.svg.axis().scale(x).orient("bottom").tickFormat(tickFormatter).tickSubdivide(true)
     .tickSize(8).tickPadding(8);
 
     yAxis = d3.svg.axis().scale(y).orient("left").tickSize(0);
diff --git a/breeze b/breeze
index 98b30e4..040b0a4 100755
--- a/breeze
+++ b/breeze
@@ -1024,7 +1024,7 @@ function breeze::parse_arguments() {
             echo "Additional apt dev dependencies: ${ADDITIONAL_DEV_APT_DEPS}"
             shift 2
             ;;
-        --dev-apt-commad)
+        --dev-apt-command)
             export DEV_APT_COMMAND="${2}"
             echo "Apt dev command: ${DEV_APT_COMMAND}"
             shift 2
@@ -1049,7 +1049,7 @@ function breeze::parse_arguments() {
             echo "Additional apt runtime dependencies: ${ADDITIONAL_RUNTIME_APT_DEPS}"
             shift 2
             ;;
-        --runtime-apt-commad)
+        --runtime-apt-command)
             export RUNTIME_APT_COMMAND="${2}"
             echo "Apt runtime command: ${RUNTIME_APT_COMMAND}"
             shift 2
diff --git a/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manaager.rst b/docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst
similarity index 100%
rename from docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manaager.rst
rename to docs/apache-airflow-providers-amazon/secrets-backends/aws-secrets-manager.rst
diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py
index 2c5d9a2..b2867ce 100644
--- a/docs/exts/docs_build/docs_builder.py
+++ b/docs/exts/docs_build/docs_builder.py
@@ -150,8 +150,8 @@ class AirflowDocsBuilder:
                 )
                 warning_text = ""
                 for filepath in glob(f"{tmp_dir}/**/*.spelling", recursive=True):
-                    with open(filepath) as speeling_file:
-                        warning_text += speeling_file.read()
+                    with open(filepath) as spelling_file:
+                        warning_text += spelling_file.read()
 
                 spelling_errors.extend(parse_spelling_warnings(warning_text, self._src_dir))
         return spelling_errors
diff --git a/docs/exts/docs_build/github_action_utils.py b/docs/exts/docs_build/github_action_utils.py
index 4b21b03..104ff71 100644
--- a/docs/exts/docs_build/github_action_utils.py
+++ b/docs/exts/docs_build/github_action_utils.py
@@ -23,7 +23,7 @@ from contextlib import contextmanager
 def with_group(title):
     """
     If used in GitHub Action, creates an expandable group in the GitHub Action log.
-    Otherwise, dispaly simple text groups.
+    Otherwise, display simple text groups.
 
     For more information, see:
     https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index 10c1503..07149fc 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -104,7 +104,7 @@ function push_pull_remove_images::pull_image_github_dockerhub() {
     set -e
 }
 
-# Pulls the base Python image. This image is used as base for CI and PROD imaages, depending on the parameters used:
+# Pulls the base Python image. This image is used as base for CI and PROD images, depending on the parameters used:
 #
 # * if UPGRADE_TO_NEWER_DEPENDENCIES is noy false, then it pulls the latest Python image available first and
 #     adds `org.opencontainers.image.source` label to it, so that it is linked to Airflow repository when
diff --git a/tests/api/common/experimental/test_mark_tasks.py b/tests/api/common/experimental/test_mark_tasks.py
index db0caf7..5d2e754 100644
--- a/tests/api/common/experimental/test_mark_tasks.py
+++ b/tests/api/common/experimental/test_mark_tasks.py
@@ -592,7 +592,7 @@ class TestMarkDAGRun(unittest.TestCase):
 
         will_be_altered = set_dag_run_state_to_failed(self.dag1, date, commit=False)
 
-        # Only the running task shouldbe altered.
+        # Only the running task should be altered.
         expected = self._get_num_tasks_with_starting_state(State.RUNNING, inclusion=True)
         assert len(will_be_altered) == expected
         self._verify_dag_run_state(self.dag1, date, State.RUNNING)
diff --git a/tests/api_connexion/test_error_handling.py b/tests/api_connexion/test_error_handling.py
index cfd33da..44203c1 100644
--- a/tests/api_connexion/test_error_handling.py
+++ b/tests/api_connexion/test_error_handling.py
@@ -39,7 +39,7 @@ class TestErrorHandling(unittest.TestCase):
 
         assert 404 == resp_json["status"]
 
-        # When we are hitting non-api incorrect enpoint
+        # When we are hitting non-api incorrect endpoint
 
         resp_json = self.client.get("/incorrect_endpoint").json
 
diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py
index a011ee6..85d4711 100644
--- a/tests/cli/commands/test_task_command.py
+++ b/tests/cli/commands/test_task_command.py
@@ -237,10 +237,10 @@ class TestCliTasks(unittest.TestCase):
 
         dag2 = DagBag().dags['example_python_operator']
         task2 = dag2.get_task(task_id='print_the_context')
-        defaut_date2 = timezone.make_aware(datetime(2016, 1, 9))
+        default_date2 = timezone.make_aware(datetime(2016, 1, 9))
         dag2.clear()
 
-        ti2 = TaskInstance(task2, defaut_date2)
+        ti2 = TaskInstance(task2, default_date2)
 
         ti2.set_state(State.SUCCESS)
         ti_start = ti2.start_date
@@ -253,7 +253,7 @@ class TestCliTasks(unittest.TestCase):
                         'tasks',
                         'states-for-dag-run',
                         'example_python_operator',
-                        defaut_date2.isoformat(),
+                        default_date2.isoformat(),
                         '--output',
                         "json",
                     ]
@@ -352,7 +352,7 @@ class TestLogsfromTaskRunCommand(unittest.TestCase):
     def assert_log_line(self, text, logs_list, expect_from_logging_mixin=False):
         """
         Get Log Line and assert only 1 Entry exists with the given text. Also check that
-        "logging_mixin" line does not appear in that log line to avoid duplicate loggigng as below:
+        "logging_mixin" line does not appear in that log line to avoid duplicate logging as below:
 
         [2020-06-24 16:47:23,537] {logging_mixin.py:91} INFO - [2020-06-24 16:47:23,536] {python.py:135}
         """
diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py
index 537a242..a4cbff6 100644
--- a/tests/jobs/test_local_task_job.py
+++ b/tests/jobs/test_local_task_job.py
@@ -355,7 +355,7 @@ class TestLocalTaskJob(unittest.TestCase):
         job1 = LocalTaskJob(task_instance=ti, ignore_ti_state=True, executor=SequentialExecutor())
         with timeout(30):
             # This should be _much_ shorter to run.
-            # If you change this limit, make the timeout in the callbable above bigger
+            # If you change this limit, make the timeout in the callable above bigger
             job1.run()
 
         ti.refresh_from_db()
@@ -422,7 +422,7 @@ class TestLocalTaskJob(unittest.TestCase):
 
         with timeout(10):
             # This should be _much_ shorter to run.
-            # If you change this limit, make the timeout in the callbable above bigger
+            # If you change this limit, make the timeout in the callable above bigger
             job1.run()
 
         ti.refresh_from_db()
@@ -431,7 +431,7 @@ class TestLocalTaskJob(unittest.TestCase):
 
     def test_mark_success_on_success_callback(self):
         """
-        Test that ensures that where a task is marked suceess in the UI
+        Test that ensures that where a task is marked success in the UI
         on_success_callback gets executed
         """
         # use shared memory value so we can properly track value change even if
diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py
index d63a534..ac5a837 100644
--- a/tests/jobs/test_scheduler_job.py
+++ b/tests/jobs/test_scheduler_job.py
@@ -2359,7 +2359,7 @@ class TestSchedulerJob(unittest.TestCase):
             dag.run(start_date=dr.execution_date, end_date=dr.execution_date, executor=self.null_exec)
 
         # Mark the successful task as never having run since we want to see if the
-        # dagrun will be in a running state despite haveing an unfinished task.
+        # dagrun will be in a running state despite having an unfinished task.
         with create_session() as session:
             ti = dr.get_task_instance('test_dagrun_unfinished', session=session)
             ti.state = State.NONE
diff --git a/tests/kubernetes/test_pod_generator.py b/tests/kubernetes/test_pod_generator.py
index 17a942a..53b7b15 100644
--- a/tests/kubernetes/test_pod_generator.py
+++ b/tests/kubernetes/test_pod_generator.py
@@ -293,7 +293,7 @@ class TestPodGenerator(unittest.TestCase):
         }
         assert (
             result_from_pod == expected_from_pod
-        ), "There was a discrepency between KubernetesExecutor and pod_override"
+        ), "There was a discrepancy between KubernetesExecutor and pod_override"
 
         assert {
             'apiVersion': 'v1',
diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py
index 123c119..2e3b4d1 100644
--- a/tests/models/test_dag.py
+++ b/tests/models/test_dag.py
@@ -717,7 +717,7 @@ class TestDag(unittest.TestCase):
 
         model = session.query(DagModel).get((dag.dag_id,))
         assert model.next_dagrun == period_end
-        # We signle "at max active runs" by saying this run is never eligible to be created
+        # We signal "at max active runs" by saying this run is never eligible to be created
         assert model.next_dagrun_create_after is None
 
     def test_sync_to_db(self):
@@ -1042,7 +1042,7 @@ class TestDag(unittest.TestCase):
         dag.add_task(BaseOperator(task_id="faketastic", owner='Also fake', start_date=when))
 
         dag_run = dag.create_dagrun(State.RUNNING, when, run_type=DagRunType.MANUAL)
-        # should not rause any exception
+        # should not raise any exception
         dag.handle_callback(dag_run, success=False)
         dag.handle_callback(dag_run, success=True)
 
diff --git a/tests/models/test_dagcode.py b/tests/models/test_dagcode.py
index e1dcfe6..da8cec0 100644
--- a/tests/models/test_dagcode.py
+++ b/tests/models/test_dagcode.py
@@ -128,7 +128,7 @@ class TestDagCode(unittest.TestCase):
     def test_code_can_be_read_when_no_access_to_file(self):
         """
         Test that code can be retrieved from DB when you do not have access to Code file.
-        Source Code should atleast exist in one of DB or File.
+        Source Code should at least exist in one of DB or File.
         """
         example_dag = make_example_dags(example_dags_module).get('example_bash_operator')
         example_dag.sync_to_db()
diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py
index 14ebdf0..4c4a93a 100644
--- a/tests/operators/test_python.py
+++ b/tests/operators/test_python.py
@@ -531,7 +531,7 @@ class TestAirflowTaskDecorator(TestPythonBase):
         )
 
     def test_manual_task_id(self):
-        """Test manually seting task_id"""
+        """Test manually setting task_id"""
 
         @task_decorator(task_id='some_name')
         def do_run():
diff --git a/tests/plugins/test_plugin.py b/tests/plugins/test_plugin.py
index d52d8e5..e233ee9 100644
--- a/tests/plugins/test_plugin.py
+++ b/tests/plugins/test_plugin.py
@@ -89,7 +89,7 @@ appbuilder_mitem_toplevel = {
     "label": "The Apache Software Foundation",
 }
 
-# Creating a flask blueprint to intergrate the templates and static folder
+# Creating a flask blueprint to integrate the templates and static folder
 bp = Blueprint(
     "test_plugin",
     __name__,
diff --git a/tests/providers/amazon/aws/hooks/test_lambda_function.py b/tests/providers/amazon/aws/hooks/test_lambda_function.py
index 64ffe7d..54bc655 100644
--- a/tests/providers/amazon/aws/hooks/test_lambda_function.py
+++ b/tests/providers/amazon/aws/hooks/test_lambda_function.py
@@ -46,7 +46,7 @@ class TestAwsLambdaHook:
             hook.invoke_lambda(payload=payload)
 
         mock_invoke.asset_called_once_with(
-            FunctionName="test_functin",
+            FunctionName="test_function",
             InvocationType="RequestResponse",
             LogType="None",
             Payload=payload,
diff --git a/tests/providers/apache/spark/hooks/test_spark_submit.py b/tests/providers/apache/spark/hooks/test_spark_submit.py
index eb0a3bf..a4aa7ec 100644
--- a/tests/providers/apache/spark/hooks/test_spark_submit.py
+++ b/tests/providers/apache/spark/hooks/test_spark_submit.py
@@ -60,7 +60,7 @@ class TestSparkSubmitHook(unittest.TestCase):
             '--bar',
             'bar',
             '--with-spaces',
-            'args should keep embdedded spaces',
+            'args should keep embedded spaces',
             'baz',
         ],
     }
@@ -195,7 +195,7 @@ class TestSparkSubmitHook(unittest.TestCase):
             '--bar',
             'bar',
             '--with-spaces',
-            'args should keep embdedded spaces',
+            'args should keep embedded spaces',
             'baz',
         ]
         assert expected_build_cmd == cmd
diff --git a/tests/providers/apache/spark/operators/test_spark_submit.py b/tests/providers/apache/spark/operators/test_spark_submit.py
index fdbf3e4..dba7a22 100644
--- a/tests/providers/apache/spark/operators/test_spark_submit.py
+++ b/tests/providers/apache/spark/operators/test_spark_submit.py
@@ -63,7 +63,7 @@ class TestSparkSubmitOperator(unittest.TestCase):
             '--end',
             '{{ ds }}',
             '--with-spaces',
-            'args should keep embdedded spaces',
+            'args should keep embedded spaces',
         ],
     }
 
@@ -113,7 +113,7 @@ class TestSparkSubmitOperator(unittest.TestCase):
                 '--end',
                 '{{ ds }}',
                 '--with-spaces',
-                'args should keep embdedded spaces',
+                'args should keep embedded spaces',
             ],
             'spark_binary': 'sparky',
         }
@@ -163,7 +163,7 @@ class TestSparkSubmitOperator(unittest.TestCase):
             '--end',
             DEFAULT_DATE.strftime("%Y-%m-%d"),
             '--with-spaces',
-            'args should keep embdedded spaces',
+            'args should keep embedded spaces',
         ]
         expected_name = 'spark_submit_job'
         assert expected_application_args == getattr(operator, '_application_args')
diff --git a/tests/providers/google/cloud/hooks/test_compute_ssh.py b/tests/providers/google/cloud/hooks/test_compute_ssh.py
index 785aece..a035940 100644
--- a/tests/providers/google/cloud/hooks/test_compute_ssh.py
+++ b/tests/providers/google/cloud/hooks/test_compute_ssh.py
@@ -23,7 +23,7 @@ from airflow.providers.google.cloud.hooks.compute_ssh import ComputeEngineSSHHoo
 
 TEST_PROJECT_ID = "test-project-id"
 
-TEST_INSTANCE_NAME = "test-instnace"
+TEST_INSTANCE_NAME = "test-instance"
 TEST_ZONE = "test-zone-42"
 INTERNAL_IP = "192.9.9.9"
 EXTERNAL_IP = "192.3.3.3"
diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py
index c0da030..7ceef1f 100644
--- a/tests/providers/google/cloud/hooks/test_dataflow.py
+++ b/tests/providers/google/cloud/hooks/test_dataflow.py
@@ -93,7 +93,7 @@ TEST_FLEX_PARAMETERS = {
     "containerSpecGcsPath": "gs://test-bucket/test-file",
     "jobName": 'test-job-name',
     "parameters": {
-        "inputSubscription": 'test-subsription',
+        "inputSubscription": 'test-subscription',
         "outputTable": "test-project:test-dataset.streaming_beam_sql",
     },
 }
@@ -163,7 +163,7 @@ class TestFallbackToVariables(unittest.TestCase):
     def test_raise_exception_on_positional_argument(self):
         mock_instance = mock.MagicMock()
 
-        class FixutureFallback:
+        class FixtureFallback:
             @_fallback_to_project_id_from_variables
             def test_fn(self, *args, **kwargs):
                 mock_instance(*args, **kwargs)
@@ -171,7 +171,7 @@ class TestFallbackToVariables(unittest.TestCase):
         with pytest.raises(
             AirflowException, match="You must use keyword arguments in this methods rather than positional"
         ):
-            FixutureFallback().test_fn({'project': "TEST"}, "TEST2")
+            FixtureFallback().test_fn({'project': "TEST"}, "TEST2")
 
 
 def mock_init(
@@ -205,7 +205,7 @@ class TestDataflowHook(unittest.TestCase):
         mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
         on_new_job_id_callback = MagicMock()
-        py_requirements = ["pands", "numpy"]
+        py_requirements = ["pandas", "numpy"]
         job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
         with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"):
@@ -247,7 +247,7 @@ class TestDataflowHook(unittest.TestCase):
         mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
         on_new_job_id_callback = MagicMock()
-        py_requirements = ["pands", "numpy"]
+        py_requirements = ["pandas", "numpy"]
         job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
         passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
@@ -292,7 +292,7 @@ class TestDataflowHook(unittest.TestCase):
         mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
         on_new_job_id_callback = MagicMock()
-        py_requirements = ["pands", "numpy"]
+        py_requirements = ["pandas", "numpy"]
         job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
         passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
@@ -337,7 +337,7 @@ class TestDataflowHook(unittest.TestCase):
         mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline
         mock_uuid.return_value = MOCK_UUID
         on_new_job_id_callback = MagicMock()
-        py_requirements = ["pands", "numpy"]
+        py_requirements = ["pandas", "numpy"]
         job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}"
 
         passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY)
diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/tests/providers/google/cloud/hooks/test_gcs.py
index 5fe96ed..8b4f704 100644
--- a/tests/providers/google/cloud/hooks/test_gcs.py
+++ b/tests/providers/google/cloud/hooks/test_gcs.py
@@ -840,22 +840,22 @@ class TestGCSHookUpload(unittest.TestCase):
     def test_upload_exceptions(self, mock_service):
         test_bucket = 'test_bucket'
         test_object = 'test_object'
-        both_params_excep = (
+        both_params_except = (
             "'filename' and 'data' parameter provided. Please "
             "specify a single parameter, either 'filename' for "
             "local file uploads or 'data' for file content uploads."
         )
-        no_params_excep = "'filename' and 'data' parameter missing. One is required to upload to gcs."
+        no_params_except = "'filename' and 'data' parameter missing. One is required to upload to gcs."
 
         with pytest.raises(ValueError) as ctx:
             self.gcs_hook.upload(test_bucket, test_object)
-        assert no_params_excep == str(ctx.value)
+        assert no_params_except == str(ctx.value)
 
         with pytest.raises(ValueError) as ctx:
             self.gcs_hook.upload(
                 test_bucket, test_object, filename=self.testfile.name, data=self.testdata_str
             )
-        assert both_params_excep == str(ctx.value)
+        assert both_params_except == str(ctx.value)
 
 
 class TestSyncGcsHook(unittest.TestCase):
diff --git a/tests/providers/google/cloud/hooks/test_speech_to_text.py b/tests/providers/google/cloud/hooks/test_speech_to_text.py
index 924d73d..59ce7b5 100644
--- a/tests/providers/google/cloud/hooks/test_speech_to_text.py
+++ b/tests/providers/google/cloud/hooks/test_speech_to_text.py
@@ -24,7 +24,7 @@ from airflow.providers.google.cloud.hooks.speech_to_text import CloudSpeechToTex
 from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
 
 PROJECT_ID = "project-id"
-CONFIG = {"ecryption": "LINEAR16"}
+CONFIG = {"encryption": "LINEAR16"}
 AUDIO = {"uri": "gs://bucket/object"}
 
 
diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
index b4dbf69..4cd6180 100644
--- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
+++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py
@@ -284,8 +284,8 @@ labels.try_number="3"'''
         resource = Resource(
             type="cloud_composer_environment",
             labels={
-                "environment.name": 'test-instancce',
-                "location": 'europpe-west-3',
+                "environment.name": 'test-instance',
+                "location": 'europe-west-3',
                 "project_id": "project_id",
             },
         )
@@ -304,8 +304,8 @@ labels.try_number="3"'''
                 filter=(
                     'resource.type="cloud_composer_environment"\n'
                     'logName="projects/project_id/logs/airflow"\n'
-                    'resource.labels."environment.name"="test-instancce"\n'
-                    'resource.labels.location="europpe-west-3"\n'
+                    'resource.labels."environment.name"="test-instance"\n'
+                    'resource.labels.location="europe-west-3"\n'
                     'resource.labels.project_id="project_id"\n'
                     'labels.task_id="task_for_testing_file_log_handler"\n'
                     'labels.dag_id="dag_for_testing_file_task_handler"\n'
diff --git a/tests/providers/google/cloud/operators/test_dataflow.py b/tests/providers/google/cloud/operators/test_dataflow.py
index 5d65dcb..5e7079f 100644
--- a/tests/providers/google/cloud/operators/test_dataflow.py
+++ b/tests/providers/google/cloud/operators/test_dataflow.py
@@ -66,7 +66,7 @@ TEST_FLEX_PARAMETERS = {
     "containerSpecGcsPath": "gs://test-bucket/test-file",
     "jobName": 'test-job-name',
     "parameters": {
-        "inputSubscription": 'test-subsription',
+        "inputSubscription": 'test-subscription',
         "outputTable": "test-project:test-dataset.streaming_beam_sql",
     },
 }
diff --git a/tests/providers/google/cloud/operators/test_datafusion.py b/tests/providers/google/cloud/operators/test_datafusion.py
index dd47167..81ceaec 100644
--- a/tests/providers/google/cloud/operators/test_datafusion.py
+++ b/tests/providers/google/cloud/operators/test_datafusion.py
@@ -49,7 +49,7 @@ class TestCloudDataFusionUpdateInstanceOperator:
     def test_execute(self, mock_hook):
         update_maks = "instance.name"
         op = CloudDataFusionUpdateInstanceOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             instance_name=INSTANCE_NAME,
             instance=INSTANCE,
             update_mask=update_maks,
@@ -72,7 +72,7 @@ class TestCloudDataFusionRestartInstanceOperator:
     @mock.patch(HOOK_STR)
     def test_execute(self, mock_hook):
         op = CloudDataFusionRestartInstanceOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             instance_name=INSTANCE_NAME,
             location=LOCATION,
             project_id=PROJECT_ID,
@@ -89,7 +89,7 @@ class TestCloudDataFusionCreateInstanceOperator:
     @mock.patch(HOOK_STR)
     def test_execute(self, mock_hook):
         op = CloudDataFusionCreateInstanceOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             instance_name=INSTANCE_NAME,
             instance=INSTANCE,
             location=LOCATION,
@@ -110,7 +110,7 @@ class TestCloudDataFusionDeleteInstanceOperator:
     @mock.patch(HOOK_STR)
     def test_execute(self, mock_hook):
         op = CloudDataFusionDeleteInstanceOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             instance_name=INSTANCE_NAME,
             location=LOCATION,
             project_id=PROJECT_ID,
@@ -127,7 +127,7 @@ class TestCloudDataFusionGetInstanceOperator:
     @mock.patch(HOOK_STR)
     def test_execute(self, mock_hook):
         op = CloudDataFusionGetInstanceOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             instance_name=INSTANCE_NAME,
             location=LOCATION,
             project_id=PROJECT_ID,
@@ -144,7 +144,7 @@ class TestCloudDataFusionCreatePipelineOperator:
     def test_execute(self, mock_hook):
         mock_hook.return_value.get_instance.return_value = {"apiEndpoint": INSTANCE_URL}
         op = CloudDataFusionCreatePipelineOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             pipeline_name=PIPELINE_NAME,
             pipeline=PIPELINE,
             instance_name=INSTANCE_NAME,
@@ -170,7 +170,7 @@ class TestCloudDataFusionDeletePipelineOperator:
     def test_execute(self, mock_hook):
         mock_hook.return_value.get_instance.return_value = {"apiEndpoint": INSTANCE_URL}
         op = CloudDataFusionDeletePipelineOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             pipeline_name=PIPELINE_NAME,
             version_id="1.12",
             instance_name=INSTANCE_NAME,
@@ -225,7 +225,7 @@ class TestCloudDataFusionStopPipelineOperator:
     def test_execute(self, mock_hook):
         mock_hook.return_value.get_instance.return_value = {"apiEndpoint": INSTANCE_URL}
         op = CloudDataFusionStopPipelineOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             pipeline_name=PIPELINE_NAME,
             instance_name=INSTANCE_NAME,
             namespace=NAMESPACE,
@@ -249,7 +249,7 @@ class TestCloudDataFusionListPipelinesOperator:
         artifact_name = "artifact_name"
         mock_hook.return_value.get_instance.return_value = {"apiEndpoint": INSTANCE_URL}
         op = CloudDataFusionListPipelinesOperator(
-            task_id="test_taks",
+            task_id="test_tasks",
             instance_name=INSTANCE_NAME,
             artifact_version=artifact_version,
             artifact_name=artifact_name,
diff --git a/tests/providers/google/cloud/operators/test_natural_language.py b/tests/providers/google/cloud/operators/test_natural_language.py
index 7cf0473..945b453 100644
--- a/tests/providers/google/cloud/operators/test_natural_language.py
+++ b/tests/providers/google/cloud/operators/test_natural_language.py
@@ -38,7 +38,7 @@ DOCUMENT = Document(
     content="Airflow is a platform to programmatically author, schedule and monitor workflows."
 )
 
-CLASSIFY_TEXT_RRESPONSE = ClassifyTextResponse()
+CLASSIFY_TEXT_RESPONSE = ClassifyTextResponse()
 ANALYZE_ENTITIES_RESPONSE = AnalyzeEntitiesResponse()
 ANALYZE_ENTITY_SENTIMENT_RESPONSE = AnalyzeEntitySentimentResponse()
 ANALYZE_SENTIMENT_RESPONSE = AnalyzeSentimentResponse()
@@ -76,7 +76,7 @@ class TestCloudLanguageAnalyzeSentimentOperator(unittest.TestCase):
 class TestCloudLanguageClassifyTextOperator(unittest.TestCase):
     @patch("airflow.providers.google.cloud.operators.natural_language.CloudNaturalLanguageHook")
     def test_minimal_green_path(self, hook_mock):
-        hook_mock.return_value.classify_text.return_value = CLASSIFY_TEXT_RRESPONSE
+        hook_mock.return_value.classify_text.return_value = CLASSIFY_TEXT_RESPONSE
         op = CloudNaturalLanguageClassifyTextOperator(task_id="task-id", document=DOCUMENT)
         resp = op.execute({})
         assert resp == {}
diff --git a/tests/providers/google/cloud/sensors/test_dataflow.py b/tests/providers/google/cloud/sensors/test_dataflow.py
index 9c8b158..b2cecfd 100644
--- a/tests/providers/google/cloud/sensors/test_dataflow.py
+++ b/tests/providers/google/cloud/sensors/test_dataflow.py
@@ -31,7 +31,7 @@ from airflow.providers.google.cloud.sensors.dataflow import (
     DataflowJobStatusSensor,
 )
 
-TEST_TASK_ID = "tesk-id"
+TEST_TASK_ID = "task_id"
 TEST_JOB_ID = "test_job_id"
 TEST_PROJECT_ID = "test_project"
 TEST_LOCATION = "us-central1"
diff --git a/tests/providers/google/marketing_platform/hooks/test_display_video.py b/tests/providers/google/marketing_platform/hooks/test_display_video.py
index e0c18d5..777d79c 100644
--- a/tests/providers/google/marketing_platform/hooks/test_display_video.py
+++ b/tests/providers/google/marketing_platform/hooks/test_display_video.py
@@ -338,7 +338,7 @@ class TestGoogleDisplayVideo360Hook(TestCase):
     )
     def get_sdf_download_tasks_return_equal_values(self, get_conn_to_display_video):
         operation_name = "operation"
-        response = "reposonse"
+        response = "response"
 
         get_conn_to_display_video.return_value.sdfdownloadtasks.return_value.operations.return_value.get = (
             response
diff --git a/tests/providers/google/marketing_platform/operators/test_display_video.py b/tests/providers/google/marketing_platform/operators/test_display_video.py
index c7e2c74..98ee1e7 100644
--- a/tests/providers/google/marketing_platform/operators/test_display_video.py
+++ b/tests/providers/google/marketing_platform/operators/test_display_video.py
@@ -117,7 +117,7 @@ class TestGoogleDisplayVideo360GetReportOperator(TestCase):
         mock_gcs_hook,
         mock_xcom,
         mock_temp,
-        mock_reuqest,
+        mock_request,
         mock_shutil,
     ):
         report_id = "REPORT_ID"
diff --git a/tests/providers/google/suite/hooks/test_sheets.py b/tests/providers/google/suite/hooks/test_sheets.py
index 2647ff5..977185b 100644
--- a/tests/providers/google/suite/hooks/test_sheets.py
+++ b/tests/providers/google/suite/hooks/test_sheets.py
@@ -42,7 +42,7 @@ INCLUDE_VALUES_IN_RESPONSE = True
 VALUE_INPUT_OPTION = 'RAW'
 INSERT_DATA_OPTION = 'OVERWRITE'
 NUM_RETRIES = 5
-API_RESPONSE = {'test': 'repsonse'}
+API_RESPONSE = {'test': 'response'}
 
 
 class TestGSheetsHook(unittest.TestCase):
diff --git a/tests/providers/qubole/sensors/test_qubole.py b/tests/providers/qubole/sensors/test_qubole.py
index 470af51..5d3258d 100644
--- a/tests/providers/qubole/sensors/test_qubole.py
+++ b/tests/providers/qubole/sensors/test_qubole.py
@@ -40,7 +40,7 @@ class TestQuboleSensor(unittest.TestCase):
         db.merge_conn(Connection(conn_id=DEFAULT_CONN, conn_type='HTTP'))
 
     @patch('airflow.providers.qubole.sensors.qubole.QuboleFileSensor.poke')
-    def test_file_sensore(self, patched_poke):
+    def test_file_sensor(self, patched_poke):
         patched_poke.return_value = True
         sensor = QuboleFileSensor(
             task_id='test_qubole_file_sensor', data={"files": ["s3://some_bucket/some_file"]}
diff --git a/tests/providers/salesforce/hooks/test_salesforce.py b/tests/providers/salesforce/hooks/test_salesforce.py
index c821057..bf6041b 100644
--- a/tests/providers/salesforce/hooks/test_salesforce.py
+++ b/tests/providers/salesforce/hooks/test_salesforce.py
@@ -190,7 +190,7 @@ class TestSalesforceHook(unittest.TestCase):
         "airflow.providers.salesforce.hooks.salesforce.pd.DataFrame.from_records",
         return_value=pd.DataFrame({"test": [1, 2, 3], "field_1": ["2019-01-01", "2019-01-02", "2019-01-03"]}),
     )
-    def test_obect_to_df_with_timestamp_conversion(self, mock_data_frame, mock_describe_object):
+    def test_object_to_df_with_timestamp_conversion(self, mock_data_frame, mock_describe_object):
         obj_name = "obj_name"
 
         data_frame = self.salesforce_hook.object_to_df(
diff --git a/tests/providers/sendgrid/utils/test_emailer.py b/tests/providers/sendgrid/utils/test_emailer.py
index cb6232c..a2b0f28 100644
--- a/tests/providers/sendgrid/utils/test_emailer.py
+++ b/tests/providers/sendgrid/utils/test_emailer.py
@@ -29,7 +29,7 @@ from airflow.providers.sendgrid.utils.emailer import send_email
 class TestSendEmailSendGrid(unittest.TestCase):
     # Unit test for sendgrid.send_email()
     def setUp(self):
-        self.recepients = ['foo@foo.com', 'bar@bar.com']
+        self.recipients = ['foo@foo.com', 'bar@bar.com']
         self.subject = 'sendgrid-send-email unit test'
         self.html_content = '<b>Foo</b> bar'
         self.carbon_copy = ['foo-cc@foo.com', 'bar-cc@bar.com']
@@ -88,7 +88,7 @@ class TestSendEmailSendGrid(unittest.TestCase):
             )
 
             send_email(
-                self.recepients,
+                self.recipients,
                 self.subject,
                 self.html_content,
                 cc=self.carbon_copy,
@@ -102,7 +102,7 @@ class TestSendEmailSendGrid(unittest.TestCase):
     @mock.patch('airflow.providers.sendgrid.utils.emailer._post_sendgrid_mail')
     def test_send_email_sendgrid_correct_email_extras(self, mock_post):
         send_email(
-            self.recepients,
+            self.recipients,
             self.subject,
             self.html_content,
             cc=self.carbon_copy,
@@ -116,7 +116,7 @@ class TestSendEmailSendGrid(unittest.TestCase):
     @mock.patch('airflow.providers.sendgrid.utils.emailer._post_sendgrid_mail')
     def test_send_email_sendgrid_sender(self, mock_post):
         send_email(
-            self.recepients,
+            self.recipients,
             self.subject,
             self.html_content,
             cc=self.carbon_copy,
diff --git a/tests/providers/snowflake/transfers/test_snowflake_to_slack.py b/tests/providers/snowflake/transfers/test_snowflake_to_slack.py
index dea8530..e1aecf1 100644
--- a/tests/providers/snowflake/transfers/test_snowflake_to_slack.py
+++ b/tests/providers/snowflake/transfers/test_snowflake_to_slack.py
@@ -69,7 +69,7 @@ class TestSnowflakeToSlackOperator(unittest.TestCase):
             warehouse='test_warehouse',
         )
 
-        # Test that the get_pandas_df method is executed on the Snowflake hook with the prendered sql and
+        # Test that the get_pandas_df method is executed on the Snowflake hook with the pre-rendered sql and
         # correct params
         snowflake_hook.get_pandas_df.assert_called_once_with('sql 2017-01-01', parameters=['1', '2', '3'])
 
diff --git a/tests/sensors/test_smart_sensor_operator.py b/tests/sensors/test_smart_sensor_operator.py
index bec165b..9ea0c00 100644
--- a/tests/sensors/test_smart_sensor_operator.py
+++ b/tests/sensors/test_smart_sensor_operator.py
@@ -67,8 +67,8 @@ class DummySensor(BaseSensorOperator):
 
 class SmartSensorTest(unittest.TestCase):
     def setUp(self):
-        os.environ['AIRFLOW__SMART_SENSER__USE_SMART_SENSOR'] = 'true'
-        os.environ['AIRFLOW__SMART_SENSER__SENSORS_ENABLED'] = 'DummySmartSensor'
+        os.environ['AIRFLOW__SMART_SENSOR__USE_SMART_SENSOR'] = 'true'
+        os.environ['AIRFLOW__SMART_SENSOR__SENSORS_ENABLED'] = 'DummySmartSensor'
 
         args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
         self.dag = DAG(TEST_DAG_ID, default_args=args)
@@ -88,8 +88,8 @@ class SmartSensorTest(unittest.TestCase):
         session.query(SensorInstance).delete()
         session.commit()
 
-        os.environ.pop('AIRFLOW__SMART_SENSER__USE_SMART_SENSOR')
-        os.environ.pop('AIRFLOW__SMART_SENSER__SENSORS_ENABLED')
+        os.environ.pop('AIRFLOW__SMART_SENSOR__USE_SMART_SENSOR')
+        os.environ.pop('AIRFLOW__SMART_SENSOR__SENSORS_ENABLED')
 
     def _make_dag_run(self):
         return self.dag.create_dagrun(
diff --git a/tests/test_utils/perf/sql_queries.py b/tests/test_utils/perf/sql_queries.py
index a8e57aa..a142b09 100644
--- a/tests/test_utils/perf/sql_queries.py
+++ b/tests/test_utils/perf/sql_queries.py
@@ -106,7 +106,7 @@ class Query(NamedTuple):
 
 def reset_db():
     """
-    Wrapper function that calls the airflows resetdb function.
+    Wrapper function that calls the airflow resetdb function.
     """
     from airflow.utils.db import resetdb
 
diff --git a/tests/ti_deps/deps/test_runnable_exec_date_dep.py b/tests/ti_deps/deps/test_runnable_exec_date_dep.py
index b4f6c4b..d81e0aa 100644
--- a/tests/ti_deps/deps/test_runnable_exec_date_dep.py
+++ b/tests/ti_deps/deps/test_runnable_exec_date_dep.py
@@ -89,7 +89,7 @@ class TestRunnableExecDateDep(unittest.TestCase):
 
     def test_exec_date_after_task_end_date(self):
         """
-        If the task instance execution date is after the tasks's end date
+        If the task instance execution date is after the tasks end date
         this dep should fail
         """
         ti = self._get_task_instance(