You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2018/08/12 04:11:21 UTC

[GitHub] feng-tao closed pull request #3732: [AIRFLOW-2889] Fix typos detected by github.com/client9/misspell

feng-tao closed pull request #3732: [AIRFLOW-2889] Fix typos detected by github.com/client9/misspell
URL: https://github.com/apache/incubator-airflow/pull/3732
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/airflow/contrib/example_dags/example_databricks_operator.py b/airflow/contrib/example_dags/example_databricks_operator.py
index bc827d465b..79f947ba1c 100644
--- a/airflow/contrib/example_dags/example_databricks_operator.py
+++ b/airflow/contrib/example_dags/example_databricks_operator.py
@@ -32,7 +32,7 @@
 # the spark jar task will NOT run until the notebook task completes
 # successfully.
 #
-# The definition of a succesful run is if the run has a result_state of "SUCCESS".
+# The definition of a successful run is if the run has a result_state of "SUCCESS".
 # For more information about the state of a run refer to
 # https://docs.databricks.com/api/latest/jobs.html#runstate
 
diff --git a/airflow/contrib/hooks/azure_fileshare_hook.py b/airflow/contrib/hooks/azure_fileshare_hook.py
index edabc17293..d4066ee549 100644
--- a/airflow/contrib/hooks/azure_fileshare_hook.py
+++ b/airflow/contrib/hooks/azure_fileshare_hook.py
@@ -100,7 +100,7 @@ def list_directories_and_files(self, share_name, directory_name=None, **kwargs):
 
     def create_directory(self, share_name, directory_name, **kwargs):
         """
-        Create a new direcotry on a Azure File Share.
+        Create a new directory on a Azure File Share.
 
         :param share_name: Name of the share.
         :type share_name: str
diff --git a/airflow/contrib/hooks/bigquery_hook.py b/airflow/contrib/hooks/bigquery_hook.py
index 2a94580f50..e4c0653bfe 100644
--- a/airflow/contrib/hooks/bigquery_hook.py
+++ b/airflow/contrib/hooks/bigquery_hook.py
@@ -627,7 +627,7 @@ def run_query(self,
 
         if query_params:
             if self.use_legacy_sql:
-                raise ValueError("Query paramaters are not allowed when using "
+                raise ValueError("Query parameters are not allowed when using "
                                  "legacy SQL")
             else:
                 configuration['query']['queryParameters'] = query_params
diff --git a/airflow/contrib/hooks/emr_hook.py b/airflow/contrib/hooks/emr_hook.py
index 6cd92c6d85..d116f2275f 100644
--- a/airflow/contrib/hooks/emr_hook.py
+++ b/airflow/contrib/hooks/emr_hook.py
@@ -23,7 +23,7 @@
 
 class EmrHook(AwsHook):
     """
-    Interact with AWS EMR. emr_conn_id is only neccessary for using the
+    Interact with AWS EMR. emr_conn_id is only necessary for using the
     create_job_flow method.
     """
 
diff --git a/airflow/contrib/hooks/gcp_dataproc_hook.py b/airflow/contrib/hooks/gcp_dataproc_hook.py
index 8e4f32b137..57c48bde59 100644
--- a/airflow/contrib/hooks/gcp_dataproc_hook.py
+++ b/airflow/contrib/hooks/gcp_dataproc_hook.py
@@ -235,6 +235,6 @@ def wait(self, operation):
     DataProcHook,
     "await",
     deprecation.deprecated(
-        DataProcHook.wait, "renamed to 'wait' for Python3.7 compatability"
+        DataProcHook.wait, "renamed to 'wait' for Python3.7 compatibility"
     ),
 )
diff --git a/airflow/contrib/hooks/qubole_hook.py b/airflow/contrib/hooks/qubole_hook.py
index 5be5923e7b..3df77d3a1f 100755
--- a/airflow/contrib/hooks/qubole_hook.py
+++ b/airflow/contrib/hooks/qubole_hook.py
@@ -125,7 +125,7 @@ def execute(self, context):
 
     def kill(self, ti):
         """
-        Kill (cancel) a Qubole commmand
+        Kill (cancel) a Qubole command
         :param ti: Task Instance of the dag, used to determine the Quboles command id
         :return: response from Qubole
         """
diff --git a/airflow/contrib/hooks/salesforce_hook.py b/airflow/contrib/hooks/salesforce_hook.py
index 24b67f49fc..efc819e543 100644
--- a/airflow/contrib/hooks/salesforce_hook.py
+++ b/airflow/contrib/hooks/salesforce_hook.py
@@ -53,14 +53,14 @@ def __init__(
 
         :param conn_id:     the name of the connection that has the parameters
                             we need to connect to Salesforce.
-                            The conenction shoud be type `http` and include a
+                            The connection shoud be type `http` and include a
                             user's security token in the `Extras` field.
         .. note::
             For the HTTP connection type, you can include a
             JSON structure in the `Extras` field.
             We need a user's security token to connect to Salesforce.
             So we define it in the `Extras` field as:
-                `{"security_token":"YOUR_SECRUITY_TOKEN"}`
+                `{"security_token":"YOUR_SECURITY_TOKEN"}`
         """
         self.conn_id = conn_id
         self._args = args
diff --git a/airflow/contrib/operators/gcs_to_bq.py b/airflow/contrib/operators/gcs_to_bq.py
index 533cf01de5..fb27e8f205 100644
--- a/airflow/contrib/operators/gcs_to_bq.py
+++ b/airflow/contrib/operators/gcs_to_bq.py
@@ -86,7 +86,7 @@ class GoogleCloudStorageToBigQueryOperator(BaseOperator):
         for other formats.
     :type allow_jagged_rows: bool
     :param max_id_key: If set, the name of a column in the BigQuery table
-        that's to be loaded. Thsi will be used to select the MAX value from
+        that's to be loaded. This will be used to select the MAX value from
         BigQuery after the load occurs. The results will be returned by the
         execute() command, which in turn gets stored in XCom for future
         operators to use. This can be helpful with incremental loads--during
diff --git a/airflow/contrib/operators/mlengine_operator_utils.py b/airflow/contrib/operators/mlengine_operator_utils.py
index 7ce784ebb4..60001daea4 100644
--- a/airflow/contrib/operators/mlengine_operator_utils.py
+++ b/airflow/contrib/operators/mlengine_operator_utils.py
@@ -160,7 +160,7 @@ def validate_err_and_count(summary):
         then the `dag`'s `default_args['model_name']` will be used.
     :type model_name: string
 
-    :param version_name: Used to indicate a model version to use for prediciton,
+    :param version_name: Used to indicate a model version to use for prediction,
         in combination with model_name. Cannot be used together with model_uri.
         See MLEngineBatchPredictionOperator for more detail. If None, then the
         `dag`'s `default_args['version_name']` will be used.
diff --git a/airflow/contrib/operators/qubole_check_operator.py b/airflow/contrib/operators/qubole_check_operator.py
index 0e8d75e167..235af08ca7 100644
--- a/airflow/contrib/operators/qubole_check_operator.py
+++ b/airflow/contrib/operators/qubole_check_operator.py
@@ -28,7 +28,7 @@ class QuboleCheckOperator(CheckOperator, QuboleOperator):
     """
     Performs checks against Qubole Commands. ``QuboleCheckOperator`` expects
     a command that will be executed on QDS.
-    By default, each value on first row of the result of this Qubole Commmand
+    By default, each value on first row of the result of this Qubole Command
     is evaluated using python ``bool`` casting. If any of the
     values return ``False``, the check is failed and errors out.
 
diff --git a/airflow/contrib/plugins/metastore_browser/templates/metastore_browser/table.html b/airflow/contrib/plugins/metastore_browser/templates/metastore_browser/table.html
index 3a9d7ca671..cbb7acff6d 100644
--- a/airflow/contrib/plugins/metastore_browser/templates/metastore_browser/table.html
+++ b/airflow/contrib/plugins/metastore_browser/templates/metastore_browser/table.html
@@ -28,7 +28,7 @@ <h4>
     <li role="presentation" class="active"><a href="#home" aria-controls="fields" role="tab" data-toggle="tab">Fields</a></li>
     <li role="presentation"><a href="#data" aria-controls="data" role="tab" data-toggle="tab">Sample Data</a></li>
     <li role="presentation"><a href="#partitions" aria-controls="partitions" role="tab" data-toggle="tab">Partitions</a></li>
-    <li role="presentation"><a href="#attributes" aria-controls="attributes" role="tab" data-toggle="tab">Atributes</a></li>
+    <li role="presentation"><a href="#attributes" aria-controls="attributes" role="tab" data-toggle="tab">Attributes</a></li>
     <li role="presentation"><a href="#parameters" aria-controls="parameters" role="tab" data-toggle="tab">Parameters</a></li>
     <li role="presentation"><a href="#ddl" aria-controls="ddl" role="tab" data-toggle="tab">DDL</a></li>
 </ul>
diff --git a/airflow/hooks/mysql_hook.py b/airflow/hooks/mysql_hook.py
index c02c0f43b5..51b53c1f8f 100644
--- a/airflow/hooks/mysql_hook.py
+++ b/airflow/hooks/mysql_hook.py
@@ -121,7 +121,7 @@ def bulk_dump(self, table, tmp_file):
     def _serialize_cell(cell, conn):
         """
         MySQLdb converts an argument to a literal
-        when passing those seperately to execute. Hence, this method does nothing.
+        when passing those separately to execute. Hence, this method does nothing.
 
         :param cell: The cell to insert into the table
         :type cell: object
diff --git a/airflow/models.py b/airflow/models.py
index c52353b1fc..55badf4828 100755
--- a/airflow/models.py
+++ b/airflow/models.py
@@ -942,7 +942,7 @@ def init_on_load(self):
     @property
     def try_number(self):
         """
-        Return the try number that this task number will be when it is acutally
+        Return the try number that this task number will be when it is actually
         run.
 
         If the TI is currently running, this will match the column in the
diff --git a/airflow/operators/hive_to_druid.py b/airflow/operators/hive_to_druid.py
index 859515db51..8d5ed0361a 100644
--- a/airflow/operators/hive_to_druid.py
+++ b/airflow/operators/hive_to_druid.py
@@ -164,7 +164,7 @@ def construct_ingest_query(self, static_path, columns):
         :type columns: list
         """
 
-        # backward compatibilty for num_shards,
+        # backward compatibility for num_shards,
         # but target_partition_size is the default setting
         # and overwrites the num_shards
         num_shards = self.num_shards
diff --git a/airflow/sensors/hdfs_sensor.py b/airflow/sensors/hdfs_sensor.py
index d05adef71c..4d95556f47 100644
--- a/airflow/sensors/hdfs_sensor.py
+++ b/airflow/sensors/hdfs_sensor.py
@@ -88,12 +88,12 @@ def filter_for_ignored_ext(result, ignored_ext, ignore_copying):
         if ignore_copying:
             log = LoggingMixin().log
             regex_builder = "^.*\.(%s$)$" % '$|'.join(ignored_ext)
-            ignored_extentions_regex = re.compile(regex_builder)
+            ignored_extensions_regex = re.compile(regex_builder)
             log.debug(
                 'Filtering result for ignored extensions: %s in files %s',
-                ignored_extentions_regex.pattern, map(lambda x: x['path'], result)
+                ignored_extensions_regex.pattern, map(lambda x: x['path'], result)
             )
-            result = [x for x in result if not ignored_extentions_regex.match(x['path'])]
+            result = [x for x in result if not ignored_extensions_regex.match(x['path'])]
             log.debug('HdfsSensor.poke: after ext filter result is %s', result)
         return result
 
diff --git a/dev/airflow-pr b/dev/airflow-pr
index 28fc300939..c7da677a05 100755
--- a/dev/airflow-pr
+++ b/dev/airflow-pr
@@ -714,8 +714,8 @@ def standardize_jira_ref(text, only_jira=False):
     '[AIRFLOW-5954][MLLIB] Top by key'
     >>> standardize_jira_ref("[AIRFLOW-979] a LRU scheduler for load balancing in TaskSchedulerImpl")
     '[AIRFLOW-979] a LRU scheduler for load balancing in TaskSchedulerImpl'
-    >>> standardize_jira_ref("AIRFLOW-1094 Support MiMa for reporting binary compatibility accross versions.")
-    '[AIRFLOW-1094] Support MiMa for reporting binary compatibility accross versions.'
+    >>> standardize_jira_ref("AIRFLOW-1094 Support MiMa for reporting binary compatibility across versions.")
+    '[AIRFLOW-1094] Support MiMa for reporting binary compatibility across versions.'
     >>> standardize_jira_ref("[WIP]  [AIRFLOW-1146] Vagrant support for Spark")
     '[AIRFLOW-1146][WIP] Vagrant support for Spark'
     >>> standardize_jira_ref("AIRFLOW-1032. If Yarn app fails before registering, app master stays aroun...")
@@ -942,7 +942,7 @@ def cli():
     status = run_cmd('git status --porcelain', echo_cmd=False)
     if status:
         msg = (
-            'You have uncomitted changes in this branch. Running this tool\n'
+            'You have uncommitted changes in this branch. Running this tool\n'
             'will delete them permanently. Continue?')
         if click.confirm(click.style(msg, fg='red', bold=True)):
             run_cmd('git reset --hard', echo_cmd=False)
diff --git a/docs/howto/write-logs.rst b/docs/howto/write-logs.rst
index a29f6069ca..adf146ab85 100644
--- a/docs/howto/write-logs.rst
+++ b/docs/howto/write-logs.rst
@@ -11,7 +11,7 @@ directory.
 In addition, users can supply a remote location for storing logs and log
 backups in cloud storage.
 
-In the Airflow Web UI, local logs take precedance over remote logs. If local logs
+In the Airflow Web UI, local logs take precedence over remote logs. If local logs
 can not be found or accessed, the remote logs will be displayed. Note that logs
 are only sent to remote storage once a task completes (including failure). In other
 words, remote logs for running tasks are unavailable. Logs are stored in the log
diff --git a/scripts/ci/kubernetes/kube/secrets.yaml b/scripts/ci/kubernetes/kube/secrets.yaml
index 4d533b3663..a93a0103e0 100644
--- a/scripts/ci/kubernetes/kube/secrets.yaml
+++ b/scripts/ci/kubernetes/kube/secrets.yaml
@@ -20,6 +20,6 @@ metadata:
   name: airflow-secrets
 type: Opaque
 data:
-  # The sql_alchemy_conn value is a base64 encoded represenation of this connection string:
+  # The sql_alchemy_conn value is a base64 encoded representation of this connection string:
   # postgresql+psycopg2://root:root@postgres-airflow:5432/airflow
   sql_alchemy_conn: cG9zdGdyZXNxbCtwc3ljb3BnMjovL3Jvb3Q6cm9vdEBwb3N0Z3Jlcy1haXJmbG93OjU0MzIvYWlyZmxvdwo=
diff --git a/tests/contrib/hooks/test_bigquery_hook.py b/tests/contrib/hooks/test_bigquery_hook.py
index 39435f0c4e..d7e9491b8b 100644
--- a/tests/contrib/hooks/test_bigquery_hook.py
+++ b/tests/contrib/hooks/test_bigquery_hook.py
@@ -52,12 +52,12 @@ def test_throws_exception_with_invalid_query(self):
         self.assertIn('Reason: ', str(context.exception), "")
 
     @unittest.skipIf(not bq_available, 'BQ is not available to run tests')
-    def test_suceeds_with_explicit_legacy_query(self):
+    def test_succeeds_with_explicit_legacy_query(self):
         df = self.instance.get_pandas_df('select 1', dialect='legacy')
         self.assertEqual(df.iloc(0)[0][0], 1)
 
     @unittest.skipIf(not bq_available, 'BQ is not available to run tests')
-    def test_suceeds_with_explicit_std_query(self):
+    def test_succeeds_with_explicit_std_query(self):
         df = self.instance.get_pandas_df(
             'select * except(b) from (select 1 a, 2 b)', dialect='standard')
         self.assertEqual(df.iloc(0)[0][0], 1)
diff --git a/tests/contrib/operators/test_ecs_operator.py b/tests/contrib/operators/test_ecs_operator.py
index b5d3141ec8..43a816da4a 100644
--- a/tests/contrib/operators/test_ecs_operator.py
+++ b/tests/contrib/operators/test_ecs_operator.py
@@ -181,7 +181,7 @@ def test_check_success_tasks_raises_pending(self):
         self.assertIn("'lastStatus': 'PENDING'", str(e.exception))
         client_mock.describe_tasks.assert_called_once_with(cluster='c', tasks=['arn'])
 
-    def test_check_success_tasks_raises_mutliple(self):
+    def test_check_success_tasks_raises_multiple(self):
         client_mock = mock.Mock()
         self.ecs.client = client_mock
         self.ecs.arn = 'arn'
diff --git a/tests/core.py b/tests/core.py
index f5e01b9fef..bef47b6e1d 100644
--- a/tests/core.py
+++ b/tests/core.py
@@ -831,7 +831,7 @@ def test_bad_trigger_rule(self):
         with self.assertRaises(AirflowException):
             DummyOperator(
                 task_id='test_bad_trigger',
-                trigger_rule="non_existant",
+                trigger_rule="non_existent",
                 dag=self.dag)
 
     def test_terminate_task(self):
diff --git a/tests/models.py b/tests/models.py
index 27415c643f..a1fd1e9912 100644
--- a/tests/models.py
+++ b/tests/models.py
@@ -97,7 +97,7 @@ def test_dag_as_context_manager(self):
         """
         Test DAG as a context manager.
         When used as a context manager, Operators are automatically added to
-        the DAG (unless they specifiy a different DAG)
+        the DAG (unless they specify a different DAG)
         """
         dag = DAG(
             'dag',


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services