You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2021/05/10 09:42:06 UTC

[airflow] branch master updated: Fix spelling (#15699)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new 9c8391a  Fix spelling (#15699)
9c8391a is described below

commit 9c8391a13f6ba29749675cf23f2f874f96b0cc8c
Author: John Bampton <jb...@users.noreply.github.com>
AuthorDate: Mon May 10 19:40:59 2021 +1000

    Fix spelling (#15699)
    
    Fix spelling of directory and PNG file name
---
 BREEZE.rst                                          |   6 +++---
 CI.rst                                              |   2 +-
 INSTALL                                             |   2 +-
 TESTING.rst                                         |   2 +-
 UPDATING.md                                         |   2 +-
 airflow/api/common/experimental/mark_tasks.py       |   2 +-
 airflow/api_connexion/openapi/v1.yaml               |   2 +-
 airflow/jobs/local_task_job.py                      |   2 +-
 airflow/kubernetes/pod_generator.py                 |   2 +-
 airflow/models/dagbag.py                            |   2 +-
 airflow/models/taskinstance.py                      |   2 +-
 airflow/providers/amazon/aws/hooks/glue.py          |   2 +-
 airflow/providers/google/cloud/hooks/compute_ssh.py |   2 +-
 .../google/cloud/transfers/gcs_to_bigquery.py       |   2 +-
 airflow/providers/postgres/provider.yaml            |   2 +-
 airflow/ui/docs/CONTRIBUTING.md                     |   4 ++--
 airflow/ui/tsconfig.json                            |   2 +-
 airflow/utils/dag_processing.py                     |   2 +-
 airflow/utils/decorators.py                         |   2 +-
 airflow/utils/log/secrets_masker.py                 |   4 ++--
 airflow/www/static/js/calendar.js                   |   2 +-
 airflow/www/utils.py                                |   2 +-
 breeze                                              |   2 +-
 dev/provider_packages/README.md                     |   2 +-
 dev/provider_packages/prepare_provider_packages.py  |   2 +-
 .../connections/hiveserver2.rst                     |   2 +-
 .../api-auth-backend/google-openid.rst              |   2 +-
 .../connections/imap.rst                            |   2 +-
 .../howto/create-update-providers.rst               |   4 ++--
 docs/exts/docs_build/lint_checks.py                 |   2 +-
 docs/exts/substitution_extensions.py                |   2 +-
 .../Postgress.png => postgres/Postgres.png}         | Bin
 pylintrc                                            |   2 +-
 pylintrc-tests                                      |   2 +-
 scripts/ci/images/ci_build_dockerhub.sh             |   2 +-
 scripts/ci/libraries/_build_images.sh               |   2 +-
 scripts/ci/libraries/_md5sum.sh                     |   4 ++--
 scripts/ci/libraries/_parameters.sh                 |   2 +-
 scripts/ci/libraries/_push_pull_remove_images.sh    |   2 +-
 scripts/ci/libraries/_testing.sh                    |   2 +-
 scripts/ci/selective_ci_checks.sh                   |   2 +-
 tests/jobs/test_scheduler_job.py                    |   8 ++++----
 tests/providers/amazon/aws/hooks/test_datasync.py   |   2 +-
 .../apache/hive/transfers/test_s3_to_hive.py        |  10 +++++-----
 .../google/cloud/operators/test_dataproc.py         |  20 ++++++++++----------
 tests/providers/http/sensors/test_http.py           |  14 +++++++-------
 .../perf/scheduler_dag_execution_timing.py          |   2 +-
 tests/utils/log/test_secrets_masker.py              |   4 ++--
 tests/utils/test_process_utils.py                   |   4 ++--
 49 files changed, 78 insertions(+), 78 deletions(-)

diff --git a/BREEZE.rst b/BREEZE.rst
index eaf3cf9..2d6a507 100644
--- a/BREEZE.rst
+++ b/BREEZE.rst
@@ -390,7 +390,7 @@ tmux session with four panes:
 
    - one to monitor the scheduler,
    - one for the webserver,
-   - one monitors and compiles Javascript files,
+   - one monitors and compiles JavaScript files,
    - one with a shell for additional commands.
 
 Managing Prod environment (with ``--production-image`` flag):
@@ -1273,7 +1273,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
                  3.6 3.7 3.8
 
   -a, --install-airflow-version INSTALL_AIRFLOW_VERSION
-          Uses differen version of Airflow when building PROD image.
+          Uses different version of Airflow when building PROD image.
 
                  2.0.2 2.0.1 2.0.0 wheel sdist
 
@@ -2503,7 +2503,7 @@ This is the current syntax for  `./breeze <./breeze>`_:
    Install different Airflow version during PROD image build
 
   -a, --install-airflow-version INSTALL_AIRFLOW_VERSION
-          Uses differen version of Airflow when building PROD image.
+          Uses different version of Airflow when building PROD image.
 
                  2.0.2 2.0.1 2.0.0 wheel sdist
 
diff --git a/CI.rst b/CI.rst
index 3d57885..9bee4c3 100644
--- a/CI.rst
+++ b/CI.rst
@@ -106,7 +106,7 @@ Default is the GitHub Package Registry one. The Pull Request forks have no acces
 auto-detect the registry used when they wait for the images.
 
 You can interact with the GitHub Registry images (pull/push) via `Breeze <BREEZE.rst>`_  - you can
-pass ``--github-registry`` flag wih  either ``docker.pkg.github.com`` for GitHub Package Registry or
+pass ``--github-registry`` flag with either ``docker.pkg.github.com`` for GitHub Package Registry or
 ``ghcr.io`` for GitHub Container Registry and pull/push operations will be performed using the chosen
 registry, using appropriate naming convention. This allows building and pushing the images locally by
 committers who have access to push/pull those images.
diff --git a/INSTALL b/INSTALL
index 919c4f5..0137554 100644
--- a/INSTALL
+++ b/INSTALL
@@ -71,7 +71,7 @@ This is useful if you want to develop providers:
 pip install -e . \
   --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt"
 
-You can als skip installing provider packages from PyPI by setting INSTALL_PROVIDERS_FROM_SOURCE to "true".
+You can also skip installing provider packages from PyPI by setting INSTALL_PROVIDERS_FROM_SOURCE to "true".
 In this case Airflow will be installed in non-editable mode with all providers installed from the sources.
 Additionally `provider.yaml` files will also be copied to providers folders which will make the providers
 discoverable by Airflow even if they are not installed from packages in this case.
diff --git a/TESTING.rst b/TESTING.rst
index e306b8d..3e8d752 100644
--- a/TESTING.rst
+++ b/TESTING.rst
@@ -446,7 +446,7 @@ test in parallel. This way we can decrease the time of running all tests in self
 .. note::
 
   We need to split tests manually into separate suites rather than utilise
-  ``pytest-xdist`` or ``pytest-parallel`` which could ba a simpler and much more "native" parallelization
+  ``pytest-xdist`` or ``pytest-parallel`` which could be a simpler and much more "native" parallelization
   mechanism. Unfortunately, we cannot utilise those tools because our tests are not truly ``unit`` tests that
   can run in parallel. A lot of our tests rely on shared databases - and they update/reset/cleanup the
   databases while they are executing. They are also exercising features of the Database such as locking which
diff --git a/UPDATING.md b/UPDATING.md
index 5604977..59aa772 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -1712,7 +1712,7 @@ Rename `sign_in` function to `get_conn`.
 
 #### `airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.create_segment`
 
-Rename parameter name from ``format`` to ``segment_format`` in PinotAdminHook function create_segment fro pylint compatible
+Rename parameter name from ``format`` to ``segment_format`` in PinotAdminHook function create_segment for pylint compatible
 
 #### `airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_partitions`
 
diff --git a/airflow/api/common/experimental/mark_tasks.py b/airflow/api/common/experimental/mark_tasks.py
index 04215dc..7612270 100644
--- a/airflow/api/common/experimental/mark_tasks.py
+++ b/airflow/api/common/experimental/mark_tasks.py
@@ -161,7 +161,7 @@ def get_all_dag_task_query(dag, session, state, task_ids, confirmed_dates):
 
 def get_subdag_runs(dag, session, state, task_ids, commit, confirmed_dates):
     """Go through subdag operators and create dag runs. We will only work
-    within the scope of the subdag. We wont propagate to the parent dag,
+    within the scope of the subdag. We won't propagate to the parent dag,
     but we will propagate from parent to subdag.
     """
     dags = [dag]
diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml
index 720411c..72c46d6 100644
--- a/airflow/api_connexion/openapi/v1.yaml
+++ b/airflow/api_connexion/openapi/v1.yaml
@@ -1881,7 +1881,7 @@ components:
       description: Log of user operations via CLI or Web UI.
       properties:
         event_log_id:
-          description: The evnet log ID
+          description: The event log ID
           type: integer
           readOnly: true
         when:
diff --git a/airflow/jobs/local_task_job.py b/airflow/jobs/local_task_job.py
index 085ef99..9e68450 100644
--- a/airflow/jobs/local_task_job.py
+++ b/airflow/jobs/local_task_job.py
@@ -151,7 +151,7 @@ class LocalTaskJob(BaseJob):
         self.log.info("Task exited with return code %s", return_code)
         self.task_instance.refresh_from_db()
         # task exited by itself, so we need to check for error file
-        # incase it failed due to runtime exception/error
+        # in case it failed due to runtime exception/error
         error = None
         if self.task_instance.state == State.RUNNING:
             # This is for a case where the task received a sigkill
diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py
index 9c534e8..80602e3 100644
--- a/airflow/kubernetes/pod_generator.py
+++ b/airflow/kubernetes/pod_generator.py
@@ -451,7 +451,7 @@ class PodGenerator:
             return None
 
         safe_uuid = uuid.uuid4().hex  # safe uuid will always be less than 63 chars
-        # Strip trailing '-' and '.' as they cant be followed by '.'
+        # Strip trailing '-' and '.' as they can't be followed by '.'
         trimmed_pod_id = pod_id[:MAX_LABEL_LEN].rstrip('-.')
 
         safe_pod_id = f"{trimmed_pod_id}.{safe_uuid}"
diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py
index dbbd429..b78463b 100644
--- a/airflow/models/dagbag.py
+++ b/airflow/models/dagbag.py
@@ -574,7 +574,7 @@ class DagBag(LoggingMixin):
             if dag.is_subdag:
                 return []
             try:
-                # We cant use bulk_write_to_db as we want to capture each error individually
+                # We can't use bulk_write_to_db as we want to capture each error individually
                 dag_was_updated = SerializedDagModel.write_dag(
                     dag,
                     min_update_interval=settings.MIN_SERIALIZED_DAG_UPDATE_INTERVAL,
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index 400e06f..a377964 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -283,7 +283,7 @@ class TaskInstance(Base, LoggingMixin):  # pylint: disable=R0902,R0904
 
     external_executor_id = Column(String(ID_LEN, **COLLATION_ARGS))
     # If adding new fields here then remember to add them to
-    # refresh_from_db() or they wont display in the UI correctly
+    # refresh_from_db() or they won't display in the UI correctly
 
     __table_args__ = (
         Index('ti_dag_state', dag_id, state),
diff --git a/airflow/providers/amazon/aws/hooks/glue.py b/airflow/providers/amazon/aws/hooks/glue.py
index 305acaf..8d1cba2 100644
--- a/airflow/providers/amazon/aws/hooks/glue.py
+++ b/airflow/providers/amazon/aws/hooks/glue.py
@@ -168,7 +168,7 @@ class AwsGlueJobHook(AwsBaseHook):
             return get_job_response['Job']['Name']
 
         except glue_client.exceptions.EntityNotFoundException:
-            self.log.info("Job doesnt exist. Now creating and running AWS Glue Job")
+            self.log.info("Job doesn't exist. Now creating and running AWS Glue Job")
             if self.s3_bucket is None:
                 raise AirflowException('Could not initialize glue job, error: Specify Parameter `s3_bucket`')
             s3_log_path = f's3://{self.s3_bucket}/{self.s3_glue_logs}{self.job_name}'
diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/airflow/providers/google/cloud/hooks/compute_ssh.py
index ccc5388..0458931 100644
--- a/airflow/providers/google/cloud/hooks/compute_ssh.py
+++ b/airflow/providers/google/cloud/hooks/compute_ssh.py
@@ -202,7 +202,7 @@ class ComputeEngineSSHHook(SSHHook):
         if not self.instance_name or not self.zone or not self.project_id:
             raise AirflowException(
                 f"Required parameters are missing: {missing_fields}. These parameters be passed either as "
-                "keyword parameter or as extra field in Airfow connection definition. Both are not set!"
+                "keyword parameter or as extra field in Airflow connection definition. Both are not set!"
             )
 
         self.log.info(
diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
index 519fc75..b925110 100644
--- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
+++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py
@@ -134,7 +134,7 @@ class GCSToBigQueryOperator(BaseOperator):
     :type cluster_fields: list[str]
     :param autodetect: [Optional] Indicates if we should automatically infer the
         options and schema for CSV and JSON sources. (Default: ``True``).
-        Parameter must be setted to True if 'schema_fields' and 'schema_object' are undefined.
+        Parameter must be set to True if 'schema_fields' and 'schema_object' are undefined.
         It is suggested to set to True if table are create outside of Airflow.
     :type autodetect: bool
     :param encryption_configuration: [Optional] Custom encryption configuration (e.g., Cloud KMS keys).
diff --git a/airflow/providers/postgres/provider.yaml b/airflow/providers/postgres/provider.yaml
index 0610662..ff0ba9e 100644
--- a/airflow/providers/postgres/provider.yaml
+++ b/airflow/providers/postgres/provider.yaml
@@ -31,7 +31,7 @@ integrations:
     external-doc-url: https://www.postgresql.org/
     how-to-guide:
       - /docs/apache-airflow-providers-postgres/operators/postgres_operator_howto_guide.rst
-    logo: /integration-logos/postgress/Postgress.png
+    logo: /integration-logos/postgres/Postgres.png
     tags: [software]
 
 operators:
diff --git a/airflow/ui/docs/CONTRIBUTING.md b/airflow/ui/docs/CONTRIBUTING.md
index 549d33a..2146992 100644
--- a/airflow/ui/docs/CONTRIBUTING.md
+++ b/airflow/ui/docs/CONTRIBUTING.md
@@ -23,7 +23,7 @@
 
 If you're new to modern frontend development or parts of our stack, you may want to check out these resources to understand our codebase:
 
-- Typescript is an extension of javascript to add type-checking to our app. Files ending in `.ts` or `.tsx` will be type-checked. Check out the [handbook](https://www.typescriptlang.org/docs/handbook/typescript-in-5-minutes-func.html) for an introduction or feel free to keep this [cheatsheet](https://github.com/typescript-cheatsheets/react) open while developing.
+- TypeScript is an extension of javascript to add type-checking to our app. Files ending in `.ts` or `.tsx` will be type-checked. Check out the [handbook](https://www.typescriptlang.org/docs/handbook/typescript-in-5-minutes-func.html) for an introduction or feel free to keep this [cheatsheet](https://github.com/typescript-cheatsheets/react) open while developing.
 
 - React powers our entire app so it would be valuable to learn JSX, the html-in-js templates React utilizes. Files that contain JSX will end in `.tsx` instead of `.ts`. Check out their official [tutorial](https://reactjs.org/tutorial/tutorial.html#overview) for a basic overview.
 
@@ -42,7 +42,7 @@ the more confidence they can give you." Keep their [cheatsheet](https://testing-
 - `.neutrinorc.js` is the main config file. Although some custom typescript or linting may need to be changed in `tsconfig.json` or `.eslintrc.js`, respectively
 - `src/components` are React components that will be shared across the app
 - `src/views` are React components that are specific to a certain url route
-- `src/interfaces` are custom-defined Typescript types/interfaces
+- `src/interfaces` are custom-defined TypeScript types/interfaces
 - `src/utils` contains various helper functions that are shared throughout the app
 - `src/auth` has the Context for authentication
 - `src/api` contains all of the actual API requests as custom hooks around react-query
diff --git a/airflow/ui/tsconfig.json b/airflow/ui/tsconfig.json
index 85085f2..eaa2d90 100644
--- a/airflow/ui/tsconfig.json
+++ b/airflow/ui/tsconfig.json
@@ -1,5 +1,5 @@
 /*
-*  Typescript config
+*  TypeScript config
 */
 {
   "compilerOptions": {
diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py
index 8c5e8b5..4b85234 100644
--- a/airflow/utils/dag_processing.py
+++ b/airflow/utils/dag_processing.py
@@ -719,7 +719,7 @@ class DagFileProcessorManager(LoggingMixin):  # pylint: disable=too-many-instanc
                 # "almost never happen" since the DagParsingStat object is
                 # small, and in async mode this stat is not actually _required_
                 # for normal operation (It only drives "max runs")
-                self.log.debug("BlockingIOError recived trying to send DagParsingStat, ignoring")
+                self.log.debug("BlockingIOError received trying to send DagParsingStat, ignoring")
 
             if max_runs_reached:
                 self.log.info(
diff --git a/airflow/utils/decorators.py b/airflow/utils/decorators.py
index 439ff49..60f033c 100644
--- a/airflow/utils/decorators.py
+++ b/airflow/utils/decorators.py
@@ -44,7 +44,7 @@ def apply_defaults(func: T) -> T:
         stacklevel=3,
     )
 
-    # Make it still be a wraper to keep the previous behaviour of an extra stack frame
+    # Make it still be a wrapper to keep the previous behaviour of an extra stack frame
     @wraps(func)
     def wrapper(*args, **kwargs):
         return func(*args, **kwargs)
diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py
index 5d27969..42e0e55 100644
--- a/airflow/utils/log/secrets_masker.py
+++ b/airflow/utils/log/secrets_masker.py
@@ -181,7 +181,7 @@ class SecretsMasker(logging.Filter):
         """
         Redact an any secrets found in ``item``, if it is a string.
 
-        If ``name`` is given, and it's a "sensitve" name (see
+        If ``name`` is given, and it's a "sensitive" name (see
         :func:`should_hide_value_for_key`) then all string values in the item
         is redacted.
 
@@ -195,7 +195,7 @@ class SecretsMasker(logging.Filter):
             if self.replacer:
                 # We can't replace specific values, but the key-based redacting
                 # can still happen, so we can't short-circuit, we need to walk
-                # the strucutre.
+                # the structure.
                 return self.replacer.sub('***', item)
             return item
         elif isinstance(item, (tuple, set)):
diff --git a/airflow/www/static/js/calendar.js b/airflow/www/static/js/calendar.js
index 259db89..cac968a 100644
--- a/airflow/www/static/js/calendar.js
+++ b/airflow/www/static/js/calendar.js
@@ -294,7 +294,7 @@ document.addEventListener('DOMContentLoaded', () => {
       })
       .on('click', (data) => {
         window.location.href = getTreeViewURL(
-          // add 1 day and substract 1 ms to not show any run from the next day.
+          // add 1 day and subtract 1 ms to not show any run from the next day.
           toMoment(data.year, data.month, data.day).add(1, 'day').subtract(1, 'ms'),
         );
       })
diff --git a/airflow/www/utils.py b/airflow/www/utils.py
index d4fa8cd..b0c93ba 100644
--- a/airflow/www/utils.py
+++ b/airflow/www/utils.py
@@ -461,7 +461,7 @@ class CustomSQLAInterface(SQLAInterface):
     filter_converter_class = UtcAwareFilterConverter
 
 
-# This class is used directly (i.e. we cant tell Fab to use a different
+# This class is used directly (i.e. we can't tell Fab to use a different
 # subclass) so we have no other option than to edit the conversion table in
 # place
 FieldConverter.conversion_table = (
diff --git a/breeze b/breeze
index 705677e..55a92c9 100755
--- a/breeze
+++ b/breeze
@@ -2431,7 +2431,7 @@ function breeze::flag_local_file_mounting() {
 function breeze::flag_build_different_airflow_version() {
     echo "
 -a, --install-airflow-version INSTALL_AIRFLOW_VERSION
-        Uses differen version of Airflow when building PROD image.
+        Uses different version of Airflow when building PROD image.
 
 ${FORMATTED_INSTALL_AIRFLOW_VERSIONS}
 
diff --git a/dev/provider_packages/README.md b/dev/provider_packages/README.md
index a8da8ed..61b9b4e 100644
--- a/dev/provider_packages/README.md
+++ b/dev/provider_packages/README.md
@@ -209,7 +209,7 @@ of those  steps automatically, but you can manually run the scripts as follows t
 The commands are best to execute in the Breeze environment as it has all the dependencies installed,
 Examples below describe that. However, for development you might run them in your local development
 environment as it makes it easier to debug. Just make sure you install your development environment
-with 'devel_all' extra (make sure to ue the right python version).
+with 'devel_all' extra (make sure to use the right python version).
 
 Note that it is best to use `INSTALL_PROVIDERS_FROM_SOURCES` set to`true`, to make sure
 that any new added providers are not added as packages (in case they are not yet available in PyPI.
diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py
index 8ff8a06..cd18b2c 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -1639,7 +1639,7 @@ def update_setup_files(
 
     :param provider_package_id: id of the package
     :param version_suffix: version suffix corresponding to the version in the code
-    :returns False if the package should be skipped, Tre if everything generated properly
+    :returns False if the package should be skipped, True if everything generated properly
     """
     verify_provider_package(provider_package_id)
     provider_details = get_provider_details(provider_package_id)
diff --git a/docs/apache-airflow-providers-apache-hive/connections/hiveserver2.rst b/docs/apache-airflow-providers-apache-hive/connections/hiveserver2.rst
index e690e9e..c5ae2bf 100644
--- a/docs/apache-airflow-providers-apache-hive/connections/hiveserver2.rst
+++ b/docs/apache-airflow-providers-apache-hive/connections/hiveserver2.rst
@@ -52,7 +52,7 @@ Port (optional)
     Specify your Hive Server2 port number.
 
 Schema (optional)
-    Specify the name fo the database you would like to connect to with Hive Server2.
+    Specify the name for the database you would like to connect to with Hive Server2.
 
 Extra (optional)
     Specify the extra parameters (as json dictionary) that can be used in Hive Server2 connection.
diff --git a/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst b/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst
index f1046cc..95f3a4c 100644
--- a/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst
+++ b/docs/apache-airflow-providers-google/api-auth-backend/google-openid.rst
@@ -57,7 +57,7 @@ look like the following.
 
   .. code-block:: bash
 
-      ENDPOINT_URL="http://locahost:8080/"
+      ENDPOINT_URL="http://localhost:8080/"
 
       AUDIENCE="project-id-random-value.apps.googleusercontent.com"
       ID_TOKEN="$(gcloud auth print-identity-token "--audience=${AUDIENCE}")"
diff --git a/docs/apache-airflow-providers-imap/connections/imap.rst b/docs/apache-airflow-providers-imap/connections/imap.rst
index ad53ee0..bf702a3 100644
--- a/docs/apache-airflow-providers-imap/connections/imap.rst
+++ b/docs/apache-airflow-providers-imap/connections/imap.rst
@@ -43,7 +43,7 @@ Login
     Specify the username used for the IMAP client.
 
 Password
-    Specify the password used fot the IMAP client.
+    Specify the password used for the IMAP client.
 
 Host
     Specify the the IMAP host url.
diff --git a/docs/apache-airflow-providers/howto/create-update-providers.rst b/docs/apache-airflow-providers/howto/create-update-providers.rst
index 47ebb77..91fb74e 100644
--- a/docs/apache-airflow-providers/howto/create-update-providers.rst
+++ b/docs/apache-airflow-providers/howto/create-update-providers.rst
@@ -34,7 +34,7 @@ help you to set up tests and other dependencies.
 
 First, you need to set up your local development environment. See `Contribution Quick Start <https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst>`_
 if you did not set up your local environment yet. We recommend using ``breeze`` to develop locally. This way you
-easily be able to have an environment more similar to the one executed by Github CI workflow.
+easily be able to have an environment more similar to the one executed by GitHub CI workflow.
 
   .. code-block:: bash
 
@@ -55,7 +55,7 @@ Most likely you have developed a version of the provider using some local custom
 transfer this code to the Airflow project. Below is described all the initial code structure that
 the provider may need. Understand that not all providers will need all the components described in this structure.
 If you still have doubts about building your provider, we recommend that you read the initial provider guide and
-open a issue on Github so the community can help you.
+open a issue on GitHub so the community can help you.
 
   .. code-block:: bash
 
diff --git a/docs/exts/docs_build/lint_checks.py b/docs/exts/docs_build/lint_checks.py
index 155b8f5..5fd996d 100644
--- a/docs/exts/docs_build/lint_checks.py
+++ b/docs/exts/docs_build/lint_checks.py
@@ -195,7 +195,7 @@ def _extract_file_content(file_path: str, message: Optional[str], pattern: str,
 
 def filter_file_list_by_pattern(file_paths: Iterable[str], pattern: str) -> List[str]:
     """
-    Filters file list to those tha content matches the pattern
+    Filters file list to those that content matches the pattern
     :param file_paths: file paths to check
     :param pattern: pattern to match
     :return: list of files matching the pattern
diff --git a/docs/exts/substitution_extensions.py b/docs/exts/substitution_extensions.py
index bbfe653..1cf3c6d 100644
--- a/docs/exts/substitution_extensions.py
+++ b/docs/exts/substitution_extensions.py
@@ -51,7 +51,7 @@ class SubstitutionCodeBlock(OriginalCodeBlock):  # type: ignore
 
 
 class SubstitutionCodeBlockTransform(SphinxTransform):
-    """Substitue ``|variables|`` in code and code-block nodes"""
+    """Substitute ``|variables|`` in code and code-block nodes"""
 
     # Run before we highlight the code!
     default_priority = HighlightLanguageTransform.default_priority - 1
diff --git a/docs/integration-logos/postgress/Postgress.png b/docs/integration-logos/postgres/Postgres.png
similarity index 100%
rename from docs/integration-logos/postgress/Postgress.png
rename to docs/integration-logos/postgres/Postgres.png
diff --git a/pylintrc b/pylintrc
index b3be01d..a44a5f9 100644
--- a/pylintrc
+++ b/pylintrc
@@ -67,7 +67,7 @@ confidence=
 # can either give multiple identifiers separated by comma (,) or put this
 # option multiple times (only on the command line, not in the configuration
 # file where it should appear only once). You can also use "--disable=all" to
-# disable everything first and then reenable specific checks. For example, if
+# disable everything first and then re-enable specific checks. For example, if
 # you want to run only the similarities checker, you can use "--disable=all
 # --enable=similarities". If you want to run only the classes checker, but have
 # no Warning level messages displayed, use "--disable=all --enable=classes
diff --git a/pylintrc-tests b/pylintrc-tests
index da6c76b..841fe0b 100644
--- a/pylintrc-tests
+++ b/pylintrc-tests
@@ -67,7 +67,7 @@ confidence=
 # can either give multiple identifiers separated by comma (,) or put this
 # option multiple times (only on the command line, not in the configuration
 # file where it should appear only once). You can also use "--disable=all" to
-# disable everything first and then reenable specific checks. For example, if
+# disable everything first and then re-enable specific checks. For example, if
 # you want to run only the similarities checker, you can use "--disable=all
 # --enable=similarities". If you want to run only the classes checker, but have
 # no Warning level messages displayed, use "--disable=all --enable=classes
diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh
index 73277a6..12e158b 100755
--- a/scripts/ci/images/ci_build_dockerhub.sh
+++ b/scripts/ci/images/ci_build_dockerhub.sh
@@ -54,7 +54,7 @@ if [[ ! "${DOCKER_TAG}" =~ ^[0-9].* ]]; then
     echo
     # All the packages: Airflow and providers will have a "dev" version suffix in the imaage that
     # is built from non-release tag. If this is not set, then building images from locally build
-    # packages fails, because the packages with non-dev version are skipped (as they are alredy released)
+    # packages fails, because the packages with non-dev version are skipped (as they are already released)
     export VERSION_SUFFIX_FOR_PYPI=".dev0"
     export VERSION_SUFFIX_FOR_SVN=".dev0"
     # Only build and push CI image for the nightly-master, v2-0-test branches
diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh
index ef9de00..51763df 100644
--- a/scripts/ci/libraries/_build_images.sh
+++ b/scripts/ci/libraries/_build_images.sh
@@ -114,7 +114,7 @@ function build_images::confirm_via_terminal() {
     RES=$?
 }
 
-# Confirms if hte image should be rebuild and interactively checks it with the user.
+# Confirms if the image should be rebuild and interactively checks it with the user.
 # In case iit needs to be rebuild. It only ask the user if it determines that the rebuild
 # is needed and that the rebuild is not already forced. It asks the user using available terminals
 # So that the script works also from within pre-commit run via git hooks - where stdin is not
diff --git a/scripts/ci/libraries/_md5sum.sh b/scripts/ci/libraries/_md5sum.sh
index 052e98d..4d4a1e7 100644
--- a/scripts/ci/libraries/_md5sum.sh
+++ b/scripts/ci/libraries/_md5sum.sh
@@ -17,7 +17,7 @@
 # under the License.
 
 #
-# Verifies if stored md5sum of the file changed since the last tme ot was checked
+# Verifies if stored md5sum of the file changed since the last time it was checked
 # The md5sum files are stored in .build directory - you can delete this directory
 # If you want to rebuild everything from the scratch
 #
@@ -55,7 +55,7 @@ function md5sum::calculate_file_md5sum {
 
 #
 # Moves md5sum file from it's temporary location in CACHE_TMP_FILE_DIR to
-# BUILD_CACHE_DIR - thus updating stored MD5 sum fo the file
+# BUILD_CACHE_DIR - thus updating stored MD5 sum for the file
 #
 function md5sum::move_file_md5sum {
     local FILE="${1}"
diff --git a/scripts/ci/libraries/_parameters.sh b/scripts/ci/libraries/_parameters.sh
index 6b43d56..c1ee8ee 100644
--- a/scripts/ci/libraries/_parameters.sh
+++ b/scripts/ci/libraries/_parameters.sh
@@ -31,7 +31,7 @@ function parameters::save_to_file() {
 # parameters:
 # $1 - name of the variable
 # $2 - descriptive name of the parameter
-# $3 - flag used to set te parameter
+# $3 - flag used to set the parameter
 function parameters::check_allowed_param() {
     _VARIABLE_NAME="${1}"
     _VARIABLE_DESCRIPTIVE_NAME="${2}"
diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh
index 5b16240..6dea2f7 100644
--- a/scripts/ci/libraries/_push_pull_remove_images.sh
+++ b/scripts/ci/libraries/_push_pull_remove_images.sh
@@ -292,7 +292,7 @@ function push_pull_remove_images::push_prod_images() {
 }
 
 # waits for an image to be available in GitHub Packages. Should be run with `set +e`
-# the buid automatically determines which registry to use based one the images available
+# the build automatically determines which registry to use based one the images available
 function push_pull_remove_images::check_for_image_in_github_packages() {
     local github_repository_lowercase
     github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" |tr '[:upper:]' '[:lower:]')"
diff --git a/scripts/ci/libraries/_testing.sh b/scripts/ci/libraries/_testing.sh
index 1a05093..11220a8 100644
--- a/scripts/ci/libraries/_testing.sh
+++ b/scripts/ci/libraries/_testing.sh
@@ -54,7 +54,7 @@ function testing::get_maximum_parallel_test_jobs() {
     docker_engine_resources::get_available_cpus_in_docker
     if [[ -n ${RUNS_ON=} && ${RUNS_ON} != *"self-hosted"* ]]; then
         echo
-        echo "${COLOR_YELLOW}This is a Github Public runner - for now we are forcing max parallel Quarantined tests jobs to 1 for those${COLOR_RESET}"
+        echo "${COLOR_YELLOW}This is a GitHub Public runner - for now we are forcing max parallel Quarantined tests jobs to 1 for those${COLOR_RESET}"
         echo
         export MAX_PARALLEL_QUARANTINED_TEST_JOBS="1"
     else
diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh
index 00954e8..7e77edd 100755
--- a/scripts/ci/selective_ci_checks.sh
+++ b/scripts/ci/selective_ci_checks.sh
@@ -345,7 +345,7 @@ function check_if_setup_files_changed() {
 
 
 function check_if_javascript_security_scans_should_be_run() {
-    start_end::group_start "Check Javascript security scans"
+    start_end::group_start "Check JavaScript security scans"
     local pattern_array=(
         "^airflow/.*\.[jt]sx?"
         "^airflow/.*\.lock"
diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py
index 016905d..954b395 100644
--- a/tests/jobs/test_scheduler_job.py
+++ b/tests/jobs/test_scheduler_job.py
@@ -2031,7 +2031,7 @@ class TestSchedulerJob(unittest.TestCase):
         self.scheduler_job.executor = mock.MagicMock(slots_available=8)
         self.scheduler_job._run_scheduler_loop = mock.MagicMock(side_effect=Exception("oops"))
         mock_processor_agent.return_value.end.side_effect = Exception("double oops")
-        self.scheduler_job.executor.end = mock.MagicMock(side_effect=Exception("tripple oops"))
+        self.scheduler_job.executor.end = mock.MagicMock(side_effect=Exception("triple oops"))
 
         with self.assertRaises(Exception):
             self.scheduler_job.run()
@@ -3921,7 +3921,7 @@ class TestSchedulerJob(unittest.TestCase):
             schedule_interval='@once',
             max_active_runs=1,
         ) as dag:
-            # Cant use DummyOperator as that goes straight to success
+            # Can't use DummyOperator as that goes straight to success
             task1 = BashOperator(task_id='dummy1', bash_command='true')
 
         session = settings.Session()
@@ -4034,7 +4034,7 @@ class TestSchedulerJob(unittest.TestCase):
             schedule_interval='@once',
             max_active_runs=1,
         ) as dag:
-            # Cant use DummyOperator as that goes straight to success
+            # Can't use DummyOperator as that goes straight to success
             task1 = BashOperator(task_id='dummy1', bash_command='true')
 
         session = settings.Session()
@@ -4081,7 +4081,7 @@ class TestSchedulerJob(unittest.TestCase):
             schedule_interval='@once',
             max_active_runs=1,
         ) as dag:
-            # Cant use DummyOperator as that goes straight to success
+            # Can't use DummyOperator as that goes straight to success
             task1 = BashOperator(task_id='dummy1', bash_command='true')
             task2 = BashOperator(task_id='dummy2', bash_command='true')
 
diff --git a/tests/providers/amazon/aws/hooks/test_datasync.py b/tests/providers/amazon/aws/hooks/test_datasync.py
index eccdd5f..5862844 100644
--- a/tests/providers/amazon/aws/hooks/test_datasync.py
+++ b/tests/providers/amazon/aws/hooks/test_datasync.py
@@ -43,7 +43,7 @@ class TestAwsDataSyncHook(unittest.TestCase):
 # mock_get_conn. We then override it to just return the locally created self.client instead of
 # the one created by the AWS self.hook.
 
-# Unfortunately this means we cant test the get_conn method - which is why we have it in a
+# Unfortunately this means we can't test the get_conn method - which is why we have it in a
 # separate class above
 
 
diff --git a/tests/providers/apache/hive/transfers/test_s3_to_hive.py b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
index f22d186..4ca6f39 100644
--- a/tests/providers/apache/hive/transfers/test_s3_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
@@ -162,25 +162,25 @@ class TestS3ToHiveTransfer(unittest.TestCase):
         self.kwargs['delimiter'] = '\t'
         fn_txt = self._get_fn('.txt', True)
         header_list = S3ToHiveOperator(**self.kwargs)._get_top_row_as_list(fn_txt)
-        assert header_list == ['Sno', 'Some,Text'], "Top row from file doesnt matched expected value"
+        assert header_list == ['Sno', 'Some,Text'], "Top row from file doesn't matched expected value"
 
         self.kwargs['delimiter'] = ','
         header_list = S3ToHiveOperator(**self.kwargs)._get_top_row_as_list(fn_txt)
-        assert header_list == ['Sno\tSome', 'Text'], "Top row from file doesnt matched expected value"
+        assert header_list == ['Sno\tSome', 'Text'], "Top row from file doesn't matched expected value"
 
     def test__match_headers(self):
         self.kwargs['field_dict'] = OrderedDict([('Sno', 'BIGINT'), ('Some,Text', 'STRING')])
         assert S3ToHiveOperator(**self.kwargs)._match_headers(
             ['Sno', 'Some,Text']
-        ), "Header row doesnt match expected value"
+        ), "Header row doesn't match expected value"
         # Testing with different column order
         assert not S3ToHiveOperator(**self.kwargs)._match_headers(
             ['Some,Text', 'Sno']
-        ), "Header row doesnt match expected value"
+        ), "Header row doesn't match expected value"
         # Testing with extra column in header
         assert not S3ToHiveOperator(**self.kwargs)._match_headers(
             ['Sno', 'Some,Text', 'ExtraColumn']
-        ), "Header row doesnt match expected value"
+        ), "Header row doesn't match expected value"
 
     def test__delete_top_row_and_compress(self):
         s32hive = S3ToHiveOperator(**self.kwargs)
diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/tests/providers/google/cloud/operators/test_dataproc.py
index 9a0ef21..764c225 100644
--- a/tests/providers/google/cloud/operators/test_dataproc.py
+++ b/tests/providers/google/cloud/operators/test_dataproc.py
@@ -596,10 +596,10 @@ class TestDataprocClusterCreateOperator(DataprocClusterTestBase):
 
         ti = TaskInstance(task=op, execution_date=DEFAULT_DATE)
 
-        # Assert operator link is empty when no XCom push occured
+        # Assert operator link is empty when no XCom push occurred
         self.assertEqual(op.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name), "")
 
-        # Assert operator link is empty for deserialized task when no XCom push occured
+        # Assert operator link is empty for deserialized task when no XCom push occurred
         self.assertEqual(
             deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name),
             "",
@@ -700,10 +700,10 @@ class TestDataprocClusterScaleOperator(DataprocClusterTestBase):
 
         ti = TaskInstance(task=op, execution_date=DEFAULT_DATE)
 
-        # Assert operator link is empty when no XCom push occured
+        # Assert operator link is empty when no XCom push occurred
         self.assertEqual(op.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name), "")
 
-        # Assert operator link is empty for deserialized task when no XCom push occured
+        # Assert operator link is empty for deserialized task when no XCom push occurred
         self.assertEqual(
             deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name),
             "",
@@ -909,10 +909,10 @@ class TestDataprocSubmitJobOperator(DataprocJobTestBase):
 
         ti = TaskInstance(task=op, execution_date=DEFAULT_DATE)
 
-        # Assert operator link is empty when no XCom push occured
+        # Assert operator link is empty when no XCom push occurred
         self.assertEqual(op.get_extra_links(DEFAULT_DATE, DataprocJobLink.name), "")
 
-        # Assert operator link is empty for deserialized task when no XCom push occured
+        # Assert operator link is empty for deserialized task when no XCom push occurred
         self.assertEqual(deserialized_task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name), "")
 
         ti.xcom_push(key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED)
@@ -1009,10 +1009,10 @@ class TestDataprocUpdateClusterOperator(DataprocClusterTestBase):
 
         ti = TaskInstance(task=op, execution_date=DEFAULT_DATE)
 
-        # Assert operator link is empty when no XCom push occured
+        # Assert operator link is empty when no XCom push occurred
         self.assertEqual(op.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name), "")
 
-        # Assert operator link is empty for deserialized task when no XCom push occured
+        # Assert operator link is empty for deserialized task when no XCom push occurred
         self.assertEqual(
             deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name),
             "",
@@ -1385,10 +1385,10 @@ class TestDataProcSparkOperator(DataprocJobTestBase):
 
         ti = TaskInstance(task=op, execution_date=DEFAULT_DATE)
 
-        # Assert operator link is empty when no XCom push occured
+        # Assert operator link is empty when no XCom push occurred
         self.assertEqual(op.get_extra_links(DEFAULT_DATE, DataprocJobLink.name), "")
 
-        # Assert operator link is empty for deserialized task when no XCom push occured
+        # Assert operator link is empty for deserialized task when no XCom push occurred
         self.assertEqual(deserialized_task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name), "")
 
         ti.xcom_push(key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED)
diff --git a/tests/providers/http/sensors/test_http.py b/tests/providers/http/sensors/test_http.py
index 811a0eb..23ac2fd 100644
--- a/tests/providers/http/sensors/test_http.py
+++ b/tests/providers/http/sensors/test_http.py
@@ -150,7 +150,7 @@ class TestHttpSensor(unittest.TestCase):
         response = requests.Response()
         response.status_code = 404
         response.reason = 'Not Found'
-        response._content = b'This endpoint doesnt exist'
+        response._content = b"This endpoint doesn't exist"
         mock_session_send.return_value = response
 
         task = HttpSensor(
@@ -172,17 +172,17 @@ class TestHttpSensor(unittest.TestCase):
             assert mock_errors.called
             calls = [
                 mock.call('HTTP error: %s', 'Not Found'),
-                mock.call('This endpoint doesnt exist'),
+                mock.call("This endpoint doesn't exist"),
                 mock.call('HTTP error: %s', 'Not Found'),
-                mock.call('This endpoint doesnt exist'),
+                mock.call("This endpoint doesn't exist"),
                 mock.call('HTTP error: %s', 'Not Found'),
-                mock.call('This endpoint doesnt exist'),
+                mock.call("This endpoint doesn't exist"),
                 mock.call('HTTP error: %s', 'Not Found'),
-                mock.call('This endpoint doesnt exist'),
+                mock.call("This endpoint doesn't exist"),
                 mock.call('HTTP error: %s', 'Not Found'),
-                mock.call('This endpoint doesnt exist'),
+                mock.call("This endpoint doesn't exist"),
                 mock.call('HTTP error: %s', 'Not Found'),
-                mock.call('This endpoint doesnt exist'),
+                mock.call("This endpoint doesn't exist"),
             ]
             mock_errors.assert_has_calls(calls)
 
diff --git a/tests/test_utils/perf/scheduler_dag_execution_timing.py b/tests/test_utils/perf/scheduler_dag_execution_timing.py
index 1dea281..680b2ea 100755
--- a/tests/test_utils/perf/scheduler_dag_execution_timing.py
+++ b/tests/test_utils/perf/scheduler_dag_execution_timing.py
@@ -285,7 +285,7 @@ def main(num_runs, repeat, pre_create_dag_runs, executor_class, dag_ids):
 
     times = []
 
-    # Need a lambda to refer to the _latest_ value fo scheduler_job, not just
+    # Need a lambda to refer to the _latest_ value for scheduler_job, not just
     # the initial one
     code_to_test = lambda: scheduler_job.run()  # pylint: disable=unnecessary-lambda
 
diff --git a/tests/utils/log/test_secrets_masker.py b/tests/utils/log/test_secrets_masker.py
index f2a7dec..ba88b87 100644
--- a/tests/utils/log/test_secrets_masker.py
+++ b/tests/utils/log/test_secrets_masker.py
@@ -110,7 +110,7 @@ class TestSecretsMasker:
         (It would likely need to construct a custom traceback that changed the
         source. I have no idead if that is even possible)
 
-        This test illustrates that, but ix marked xfail incase someone wants to
+        This test illustrates that, but ix marked xfail in case someone wants to
         fix this later.
         """
         try:
@@ -163,7 +163,7 @@ class TestSecretsMasker:
             # We don't mask dict _keys_.
             ({"secret", "other"}, None, {"data": {"secret": "secret"}}, {"data": {"secret": "***"}}),
             (
-                # Since this is a sensitve name, all the values should be redacted!
+                # Since this is a sensitive name, all the values should be redacted!
                 {"secret"},
                 "api_key",
                 {"other": "innoent", "nested": ["x", "y"]},
diff --git a/tests/utils/test_process_utils.py b/tests/utils/test_process_utils.py
index 21d6cdd..eabf94f 100644
--- a/tests/utils/test_process_utils.py
+++ b/tests/utils/test_process_utils.py
@@ -90,8 +90,8 @@ class TestReapProcessGroup(unittest.TestCase):
             assert not psutil.pid_exists(child_pid.value)
         finally:
             try:
-                os.kill(parent_pid.value, signal.SIGKILL)  # terminate doesnt work here
-                os.kill(child_pid.value, signal.SIGKILL)  # terminate doesnt work here
+                os.kill(parent_pid.value, signal.SIGKILL)  # terminate doesn't work here
+                os.kill(child_pid.value, signal.SIGKILL)  # terminate doesn't work here
             except OSError:
                 pass