You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2018/12/22 18:07:31 UTC

[GitHub] kaxil closed pull request #4357: [AIRFLOW-3557] Fix various typos

kaxil closed pull request #4357: [AIRFLOW-3557] Fix various typos
URL: https://github.com/apache/incubator-airflow/pull/4357
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/CHANGELOG.txt b/CHANGELOG.txt
index abb0563d71..98a1103792 100644
--- a/CHANGELOG.txt
+++ b/CHANGELOG.txt
@@ -24,7 +24,7 @@ Improvements:
 [AIRFLOW-2622] Add "confirm=False" option to SFTPOperator
 [AIRFLOW-2662] support affinity & nodeSelector policies for kubernetes executor/operator
 [AIRFLOW-2709] Improve error handling in Databricks hook
-[AIRFLOW-2723] Update lxml dependancy to >= 4.0.
+[AIRFLOW-2723] Update lxml dependency to >= 4.0.
 [AIRFLOW-2763] No precheck mechanism in place during worker initialisation for the connection to metadata database
 [AIRFLOW-2789] Add ability to create single node cluster to DataprocClusterCreateOperator
 [AIRFLOW-2797] Add ability to create Google Dataproc cluster with custom image
@@ -269,7 +269,7 @@ AIRFLOW 1.10.0, 2018-08-03
 [AIRFLOW-2429] Make Airflow flake8 compliant
 [AIRFLOW-2491] Resolve flask version conflict
 [AIRFLOW-2484] Remove duplicate key in MySQL to GCS Op
-[ARIFLOW-2458] Add cassandra-to-gcs operator
+[AIRFLOW-2458] Add cassandra-to-gcs operator
 [AIRFLOW-2477] Improve time units for task duration and landing times charts for RBAC UI
 [AIRFLOW-2474] Only import snakebite if using py2
 [AIRFLOW-48] Parse connection uri querystring
@@ -1504,7 +1504,7 @@ AIRFLOW 1.8.0, 2017-03-12
 [AIRFLOW-784] Pin funcsigs to 1.0.0
 [AIRFLOW-624] Fix setup.py to not import airflow.version as version
 [AIRFLOW-779] Task should fail with specific message when deleted
-[AIRFLOW-778] Fix completey broken MetastorePartitionSensor
+[AIRFLOW-778] Fix completely broken MetastorePartitionSensor
 [AIRFLOW-739] Set pickle_info log to debug
 [AIRFLOW-771] Make S3 logs append instead of clobber
 [AIRFLOW-773] Fix flaky datetime addition in api test
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 556a5d847b..2a60f1dc3c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -166,10 +166,10 @@ There are three ways to setup an Apache Airflow development environment.
   tox -e py35-backend_mysql
   ```
 
-  If you wish to run individual tests inside of docker enviroment you can do as follows:
+  If you wish to run individual tests inside of Docker environment you can do as follows:
 
   ```bash
-    # From the container (with your desired enviroment) with druid hook
+    # From the container (with your desired environment) with druid hook
     tox -e py35-backend_mysql -- tests/hooks/test_druid_hook.py
  ```
 
diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py
index 5cab013b28..30a16305db 100644
--- a/airflow/contrib/hooks/bigquery_hook.py
+++ b/airflow/contrib/hooks/bigquery_hook.py
@@ -1594,7 +1594,7 @@ def insert_all(self, project_id, dataset_id, table_id,
                 self.log.info('All row(s) inserted successfully: {}:{}.{}'.format(
                     dataset_project_id, dataset_id, table_id))
             else:
-                error_msg = '{} insert error(s) occured: {}:{}.{}. Details: {}'.format(
+                error_msg = '{} insert error(s) occurred: {}:{}.{}. Details: {}'.format(
                     len(resp['insertErrors']),
                     dataset_project_id, dataset_id, table_id, resp['insertErrors'])
                 if fail_on_error:
diff --git a/airflow/contrib/hooks/emr_hook.py b/airflow/contrib/hooks/emr_hook.py
index f9fd3f04de..fcdf4ac848 100644
--- a/airflow/contrib/hooks/emr_hook.py
+++ b/airflow/contrib/hooks/emr_hook.py
@@ -23,7 +23,7 @@
 
 class EmrHook(AwsHook):
     """
-    Interact with AWS EMR. emr_conn_id is only neccessary for using the
+    Interact with AWS EMR. emr_conn_id is only necessary for using the
     create_job_flow method.
     """
 
diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py
index 98ce6efba7..10694ea4b7 100644
--- a/airflow/executors/celery_executor.py
+++ b/airflow/executors/celery_executor.py
@@ -74,7 +74,7 @@ def execute_command(command_to_exec):
 
 class ExceptionWithTraceback(object):
     """
-    Wrapper class used to propogate exceptions to parent processes from subprocesses.
+    Wrapper class used to propagate exceptions to parent processes from subprocesses.
     :param exception: The exception to wrap
     :type exception: Exception
     :param traceback: The stacktrace to wrap
diff --git a/airflow/sensors/base_sensor_operator.py b/airflow/sensors/base_sensor_operator.py
index 1dc59dd230..eaa6306770 100644
--- a/airflow/sensors/base_sensor_operator.py
+++ b/airflow/sensors/base_sensor_operator.py
@@ -48,7 +48,7 @@ class BaseSensorOperator(BaseOperator, SkipMixin):
         When set to ``poke`` the sensor is taking up a worker slot for its
         whole execution time and sleeps between pokes. Use this mode if the
         expected runtime of the sensor is short or if a short poke interval
-        is requried.
+        is required.
         When set to ``reschedule`` the sensor task frees the worker slot when
         the criteria is not yet met and it's rescheduled at a later time. Use
         this mode if the expected time until the criteria is met is. The poke
diff --git a/tests/dags/.gitignore b/tests/dags/.gitignore
index beddda487f..d42b544171 100644
--- a/tests/dags/.gitignore
+++ b/tests/dags/.gitignore
@@ -1,5 +1,5 @@
 # This line is to avoid accidental commits of example dags for integration testing
 # In order to test example dags easily we often create symbolic links in this directory
 # and run the Airflow with AIRFLOW__CORE__UNIT_TEST_MODE=True
-# this line prevents accidental commiting of such symbolic links
+# this line prevents accidental committing of such symbolic links.
 example_*
diff --git a/tests/models.py b/tests/models.py
index 921f8a52cf..2e8a139eb0 100644
--- a/tests/models.py
+++ b/tests/models.py
@@ -1364,7 +1364,7 @@ def tearDownClass(cls):
 
     def test_get_existing_dag(self):
         """
-        test that were're able to parse some example DAGs and retrieve them
+        Test that we're able to parse some example DAGs and retrieve them
         """
         dagbag = models.DagBag(dag_folder=self.empty_dir, include_examples=True)
 
diff --git a/tests/test_jobs.py b/tests/test_jobs.py
index 7ae056512b..75deba44e9 100644
--- a/tests/test_jobs.py
+++ b/tests/test_jobs.py
@@ -196,7 +196,7 @@ def test_backfill_examples(self):
         """
         Test backfilling example dags
 
-        Try to backfill some of the example dags. Be carefull, not all dags are suitable
+        Try to backfill some of the example dags. Be careful, not all dags are suitable
         for doing this. For example, a dag that sleeps forever, or does not have a
         schedule won't work here since you simply can't backfill them.
         """


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services