You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by bo...@apache.org on 2017/03/28 23:55:13 UTC

incubator-airflow git commit: [AIRFLOW-1043] Fix doc strings of operators

Repository: incubator-airflow
Updated Branches:
  refs/heads/master 6393366a7 -> b55f41f2c


[AIRFLOW-1043] Fix doc strings of operators

Closes #2188 from gtoonstra/feature/AIRFLOW-1043


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/b55f41f2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/b55f41f2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/b55f41f2

Branch: refs/heads/master
Commit: b55f41f2c22e210d130a0b42586f0385bd5515a7
Parents: 6393366
Author: gtoonstra <gt...@gmail.com>
Authored: Tue Mar 28 16:55:03 2017 -0700
Committer: Bolke de Bruin <bo...@xs4all.nl>
Committed: Tue Mar 28 16:55:03 2017 -0700

----------------------------------------------------------------------
 airflow/contrib/operators/bigquery_operator.py  | 45 +++++-----
 .../operators/bigquery_table_delete_operator.py | 31 ++++---
 .../contrib/operators/bigquery_to_bigquery.py   | 70 ++++++++--------
 airflow/contrib/operators/bigquery_to_gcs.py    | 87 ++++++++++----------
 airflow/contrib/operators/ecs_operator.py       | 10 ++-
 airflow/contrib/operators/file_to_gcs.py        | 27 +++---
 .../contrib/operators/gcs_download_operator.py  | 66 +++++++--------
 airflow/contrib/operators/hipchat_operator.py   |  2 +
 .../contrib/operators/ssh_execute_operator.py   |  2 +-
 airflow/operators/mssql_operator.py             |  4 +-
 airflow/operators/mssql_to_hive.py              |  4 +-
 11 files changed, 167 insertions(+), 181 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/bigquery_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/bigquery_operator.py b/airflow/contrib/operators/bigquery_operator.py
index 9faad8c..2f3abe7 100644
--- a/airflow/contrib/operators/bigquery_operator.py
+++ b/airflow/contrib/operators/bigquery_operator.py
@@ -22,6 +22,26 @@ from airflow.utils.decorators import apply_defaults
 class BigQueryOperator(BaseOperator):
     """
     Executes BigQuery SQL queries in a specific BigQuery database
+
+    :param bql: the sql code to be executed
+    :type bql: Can receive a str representing a sql statement,
+        a list of str (sql statements), or reference to a template file.
+        Template reference are recognized by str ending in '.sql'
+    :param destination_dataset_table: A dotted
+        (<project>.|<project>:)<dataset>.<table> that, if set, will store the results
+        of the query.
+    :type destination_dataset_table: string
+    :param bigquery_conn_id: reference to a specific BigQuery hook.
+    :type bigquery_conn_id: string
+    :param delegate_to: The account to impersonate, if any.
+        For this to work, the service account making the request must have domain-wide
+        delegation enabled.
+    :type delegate_to: string
+    :param udf_config: The User Defined Function configuration for the query.
+        See https://cloud.google.com/bigquery/user-defined-functions for details.
+    :type udf_config: list
+    :param use_legacy_sql: Whether to use legacy SQL (true) or standard SQL (false).
+    :type use_legacy_sql: boolean
     """
     template_fields = ('bql', 'destination_dataset_table')
     template_ext = ('.sql',)
@@ -30,7 +50,7 @@ class BigQueryOperator(BaseOperator):
     @apply_defaults
     def __init__(self,
                  bql,
-                 destination_dataset_table = False,
+                 destination_dataset_table=False,
                  write_disposition='WRITE_EMPTY',
                  allow_large_results=False,
                  bigquery_conn_id='bigquery_default',
@@ -39,29 +59,6 @@ class BigQueryOperator(BaseOperator):
                  use_legacy_sql=True,
                  *args,
                  **kwargs):
-        """
-        Create a new BigQueryOperator.
-
-        :param bql: the sql code to be executed
-        :type bql: Can receive a str representing a sql statement,
-            a list of str (sql statements), or reference to a template file.
-            Template reference are recognized by str ending in '.sql'
-        :param destination_dataset_table: A dotted
-            (<project>.|<project>:)<dataset>.<table> that, if set, will store the results
-            of the query.
-        :type destination_dataset_table: string
-        :param bigquery_conn_id: reference to a specific BigQuery hook.
-        :type bigquery_conn_id: string
-        :param delegate_to: The account to impersonate, if any.
-            For this to work, the service account making the request must have domain-wide
-            delegation enabled.
-        :type delegate_to: string
-        :param udf_config: The User Defined Function configuration for the query.
-            See https://cloud.google.com/bigquery/user-defined-functions for details.
-        :type udf_config: list
-        :param use_legacy_sql: Whether to use legacy SQL (true) or standard SQL (false).
-        :type use_legacy_sql: boolean
-        """
         super(BigQueryOperator, self).__init__(*args, **kwargs)
         self.bql = bql
         self.destination_dataset_table = destination_dataset_table

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/bigquery_table_delete_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/bigquery_table_delete_operator.py b/airflow/contrib/operators/bigquery_table_delete_operator.py
index b879939..cd0c9dc 100644
--- a/airflow/contrib/operators/bigquery_table_delete_operator.py
+++ b/airflow/contrib/operators/bigquery_table_delete_operator.py
@@ -22,6 +22,20 @@ from airflow.utils.decorators import apply_defaults
 class BigQueryTableDeleteOperator(BaseOperator):
     """
     Deletes BigQuery tables
+
+    :param deletion_dataset_table: A dotted
+        (<project>.|<project>:)<dataset>.<table> that indicates which table
+        will be deleted.
+    :type deletion_dataset_table: string
+    :param bigquery_conn_id: reference to a specific BigQuery hook.
+    :type bigquery_conn_id: string
+    :param delegate_to: The account to impersonate, if any.
+        For this to work, the service account making the request must have domain-wide
+        delegation enabled.
+    :type delegate_to: string
+    :param ignore_if_missing: if True, then return success even if the
+        requested table does not exist.
+    :type ignore_if_missing: boolean
     """
     ui_color = '#ffd1dc'
 
@@ -33,23 +47,6 @@ class BigQueryTableDeleteOperator(BaseOperator):
                  ignore_if_missing=False,
                  *args,
                  **kwargs):
-        """
-        Create a new BigQueryTableDeleteOperator.
-
-        :param deletion_dataset_table: A dotted
-            (<project>.|<project>:)<dataset>.<table> that indicates which table
-            will be deleted.
-        :type deletion_dataset_table: string
-        :param bigquery_conn_id: reference to a specific BigQuery hook.
-        :type bigquery_conn_id: string
-        :param delegate_to: The account to impersonate, if any.
-            For this to work, the service account making the request must have domain-wide
-            delegation enabled.
-        :type delegate_to: string
-        :param ignore_if_missing: if True, then return success even if the
-            requested table does not exist.
-        :type ignore_if_missing: boolean
-        """
         super(BigQueryTableDeleteOperator, self).__init__(*args, **kwargs)
         self.deletion_dataset_table = deletion_dataset_table
         self.bigquery_conn_id = bigquery_conn_id

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/bigquery_to_bigquery.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/bigquery_to_bigquery.py b/airflow/contrib/operators/bigquery_to_bigquery.py
index 01db1aa..6f4843c 100644
--- a/airflow/contrib/operators/bigquery_to_bigquery.py
+++ b/airflow/contrib/operators/bigquery_to_bigquery.py
@@ -21,7 +21,30 @@ from airflow.utils.decorators import apply_defaults
 
 class BigQueryToBigQueryOperator(BaseOperator):
     """
-    Copy a BigQuery table to another BigQuery table.
+    Copies data from one BigQuery table to another. See here:
+
+    https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy
+
+    For more details about these parameters.
+
+    :param source_project_dataset_tables: One or more
+        dotted (project:|project.)<dataset>.<table> BigQuery tables to use as the
+        source data. If <project> is not included, project will be the project defined
+        in the connection json. Use a list if there are multiple source tables.
+    :type source_project_dataset_tables: list|string
+    :param destination_project_dataset_table: The destination BigQuery
+        table. Format is: (project:|project.)<dataset>.<table>
+    :type destination_project_dataset_table: string
+    :param write_disposition: The write disposition if the table already exists.
+    :type write_disposition: string
+    :param create_disposition: The create disposition if the table doesn't exist.
+    :type create_disposition: string
+    :param bigquery_conn_id: reference to a specific BigQuery hook.
+    :type bigquery_conn_id: string
+    :param delegate_to: The account to impersonate, if any.
+        For this to work, the service account making the request must have domain-wide
+        delegation enabled.
+    :type delegate_to: string
     """
     template_fields = ('source_project_dataset_tables',
                        'destination_project_dataset_table')
@@ -29,42 +52,15 @@ class BigQueryToBigQueryOperator(BaseOperator):
     ui_color = '#e6f0e4'
 
     @apply_defaults
-    def __init__(
-        self,
-        source_project_dataset_tables,
-        destination_project_dataset_table,
-        write_disposition='WRITE_EMPTY',
-        create_disposition='CREATE_IF_NEEDED',
-        bigquery_conn_id='bigquery_default',
-        delegate_to=None,
-        *args,
-        **kwargs):
-        """
-        Copies data from one BigQuery table to another. See here:
-
-        https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy
-
-        For more details about these parameters.
-
-        :param source_project_dataset_tables: One or more
-            dotted (project:|project.)<dataset>.<table> BigQuery tables to use as the
-            source data. If <project> is not included, project will be the project defined
-            in the connection json. Use a list if there are multiple source tables.
-        :type source_project_dataset_tables: list|string
-        :param destination_project_dataset_table: The destination BigQuery
-            table. Format is: (project:|project.)<dataset>.<table>
-        :type destination_project_dataset_table: string
-        :param write_disposition: The write disposition if the table already exists.
-        :type write_disposition: string
-        :param create_disposition: The create disposition if the table doesn't exist.
-        :type create_disposition: string
-        :param bigquery_conn_id: reference to a specific BigQuery hook.
-        :type bigquery_conn_id: string
-        :param delegate_to: The account to impersonate, if any.
-            For this to work, the service account making the request must have domain-wide
-            delegation enabled.
-        :type delegate_to: string
-        """
+    def __init__(self,
+                 source_project_dataset_tables,
+                 destination_project_dataset_table,
+                 write_disposition='WRITE_EMPTY',
+                 create_disposition='CREATE_IF_NEEDED',
+                 bigquery_conn_id='bigquery_default',
+                 delegate_to=None,
+                 *args,
+                 **kwargs):
         super(BigQueryToBigQueryOperator, self).__init__(*args, **kwargs)
         self.source_project_dataset_tables = source_project_dataset_tables
         self.destination_project_dataset_table = destination_project_dataset_table

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/bigquery_to_gcs.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/bigquery_to_gcs.py b/airflow/contrib/operators/bigquery_to_gcs.py
index 15d23d9..aaff462 100644
--- a/airflow/contrib/operators/bigquery_to_gcs.py
+++ b/airflow/contrib/operators/bigquery_to_gcs.py
@@ -22,57 +22,54 @@ from airflow.utils.decorators import apply_defaults
 class BigQueryToCloudStorageOperator(BaseOperator):
     """
     Transfers a BigQuery table to a Google Cloud Storage bucket.
+
+    See here:
+
+    https://cloud.google.com/bigquery/docs/reference/v2/jobs
+
+    For more details about these parameters.
+
+    :param source_project_dataset_table: The dotted
+        (<project>.|<project>:)<dataset>.<table> BigQuery table to use as the source
+        data. If <project> is not included, project will be the project defined in
+        the connection json.
+    :type source_project_dataset_table: string
+    :param destination_cloud_storage_uris: The destination Google Cloud
+        Storage URI (e.g. gs://some-bucket/some-file.txt). Follows
+        convention defined here:
+        https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple
+    :type destination_cloud_storage_uris: list
+    :param compression: Type of compression to use.
+    :type compression: string
+    :param export_format: File format to export.
+    :type field_delimiter: string
+    :param field_delimiter: The delimiter to use when extracting to a CSV.
+    :type field_delimiter: string
+    :param print_header: Whether to print a header for a CSV file extract.
+    :type print_header: boolean
+    :param bigquery_conn_id: reference to a specific BigQuery hook.
+    :type bigquery_conn_id: string
+    :param delegate_to: The account to impersonate, if any.
+        For this to work, the service account making the request must have domain-wide
+        delegation enabled.
+    :type delegate_to: string
     """
     template_fields = ('source_project_dataset_table', 'destination_cloud_storage_uris')
     template_ext = ('.sql',)
     ui_color = '#e4e6f0'
 
     @apply_defaults
-    def __init__(
-        self,
-        source_project_dataset_table,
-        destination_cloud_storage_uris,
-        compression='NONE',
-        export_format='CSV',
-        field_delimiter=',',
-        print_header=True,
-        bigquery_conn_id='bigquery_default',
-        delegate_to=None,
-        *args,
-        **kwargs):
-        """
-        Create a new BigQueryToCloudStorage to move data from BigQuery to
-        Google Cloud Storage.  See here:
-
-        https://cloud.google.com/bigquery/docs/reference/v2/jobs
-
-        For more details about these parameters.
-
-        :param source_project_dataset_table: The dotted
-            (<project>.|<project>:)<dataset>.<table> BigQuery table to use as the source
-            data. If <project> is not included, project will be the project defined in
-            the connection json.
-        :type source_project_dataset_table: string
-        :param destination_cloud_storage_uris: The destination Google Cloud
-            Storage URI (e.g. gs://some-bucket/some-file.txt). Follows
-            convention defined here:
-            https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple
-        :type destination_cloud_storage_uris: list
-        :param compression: Type of compression to use.
-        :type compression: string
-        :param export_format: File format to export.
-        :type field_delimiter: string
-        :param field_delimiter: The delimiter to use when extracting to a CSV.
-        :type field_delimiter: string
-        :param print_header: Whether to print a header for a CSV file extract.
-        :type print_header: boolean
-        :param bigquery_conn_id: reference to a specific BigQuery hook.
-        :type bigquery_conn_id: string
-        :param delegate_to: The account to impersonate, if any.
-            For this to work, the service account making the request must have domain-wide
-            delegation enabled.
-        :type delegate_to: string
-        """
+    def __init__(self,
+                 source_project_dataset_table,
+                 destination_cloud_storage_uris,
+                 compression='NONE',
+                 export_format='CSV',
+                 field_delimiter=',',
+                 print_header=True,
+                 bigquery_conn_id='bigquery_default',
+                 delegate_to=None,
+                 *args,
+                 **kwargs):
         super(BigQueryToCloudStorageOperator, self).__init__(*args, **kwargs)
         self.source_project_dataset_table = source_project_dataset_table
         self.destination_cloud_storage_uris = destination_cloud_storage_uris

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/ecs_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/ecs_operator.py b/airflow/contrib/operators/ecs_operator.py
index 7415d32..df02c4e 100644
--- a/airflow/contrib/operators/ecs_operator.py
+++ b/airflow/contrib/operators/ecs_operator.py
@@ -22,7 +22,6 @@ from airflow.contrib.hooks.aws_hook import AwsHook
 
 
 class ECSOperator(BaseOperator):
-
     """
     Execute a task on AWS EC2 Container Service
 
@@ -30,9 +29,11 @@ class ECSOperator(BaseOperator):
     :type task_definition: str
     :param cluster: the cluster name on EC2 Container Service
     :type cluster: str
-    :param: overrides: the same parameter that boto3 will receive: http://boto3.readthedocs.org/en/latest/reference/services/ecs.html#ECS.Client.run_task
+    :param: overrides: the same parameter that boto3 will receive:
+            http://boto3.readthedocs.org/en/latest/reference/services/ecs.html#ECS.Client.run_task
     :type: overrides: dict
-    :param aws_conn_id: connection id of AWS credentials / region name. If None, credential boto3 strategy will be used (http://boto3.readthedocs.io/en/latest/guide/configuration.html).
+    :param aws_conn_id: connection id of AWS credentials / region name. If None,
+            credential boto3 strategy will be used (http://boto3.readthedocs.io/en/latest/guide/configuration.html).
     :type aws_conn_id: str
     :param region_name: region name to use in AWS Hook. Override the region_name in connection (if provided)
     """
@@ -112,7 +113,8 @@ class ECSOperator(BaseOperator):
                 elif container.get('lastStatus') == 'PENDING':
                     raise AirflowException('This task is still pending {}'.format(task))
                 elif 'error' in container.get('reason', '').lower():
-                    raise AirflowException('This containers encounter an error during launching : {}'.format(container.get('reason', '').lower()))
+                    raise AirflowException('This containers encounter an error during launching : {}'.
+                                           format(container.get('reason', '').lower()))
 
     def get_hook(self):
         return AwsHook(

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/file_to_gcs.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/file_to_gcs.py b/airflow/contrib/operators/file_to_gcs.py
index c4ab1ad..61497a5 100644
--- a/airflow/contrib/operators/file_to_gcs.py
+++ b/airflow/contrib/operators/file_to_gcs.py
@@ -21,6 +21,19 @@ from airflow.utils.decorators import apply_defaults
 class FileToGoogleCloudStorageOperator(BaseOperator):
     """
     Uploads a file to Google Cloud Storage
+
+    :param src: Path to the local file
+    :type src: string
+    :param dst: Destination path within the specified bucket
+    :type dst: string
+    :param bucket: The bucket to upload to
+    :type bucket: string
+    :param google_cloud_storage_conn_id: The Airflow connection ID to upload with
+    :type google_cloud_storage_conn_id: string
+    :param mime_type: The mime-type string
+    :type mime_type: string
+    :param delegate_to: The account to impersonate, if any
+    :type delegate_to: string
     """
 
     @apply_defaults
@@ -33,20 +46,6 @@ class FileToGoogleCloudStorageOperator(BaseOperator):
                  delegate_to=None,
                  *args,
                  **kwargs):
-        """
-        :param src: Path to the local file
-        :type src: string
-        :param dst: Destination path within the specified bucket
-        :type dst: string
-        :param bucket: The bucket to upload to
-        :type bucket: string
-        :param google_cloud_storage_conn_id: The Airflow connection ID to upload with
-        :type google_cloud_storage_conn_id: string
-        :param mime_type: The mime-type string
-        :type mime_type: string
-        :param delegate_to: The account to impersonate, if any
-        :type delegate_to: string
-        """
         super(FileToGoogleCloudStorageOperator, self).__init__(*args, **kwargs)
         self.src = src
         self.dst = dst

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/gcs_download_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/gcs_download_operator.py b/airflow/contrib/operators/gcs_download_operator.py
index f24ad85..c17f774 100644
--- a/airflow/contrib/operators/gcs_download_operator.py
+++ b/airflow/contrib/operators/gcs_download_operator.py
@@ -23,45 +23,41 @@ from airflow.utils.decorators import apply_defaults
 class GoogleCloudStorageDownloadOperator(BaseOperator):
     """
     Downloads a file from Google Cloud Storage.
+
+    :param bucket: The Google cloud storage bucket where the object is.
+    :type bucket: string
+    :param object: The name of the object to download in the Google cloud
+        storage bucket.
+    :type object: string
+    :param filename: The file path on the local file system (where the
+        operator is being executed) that the file should be downloaded to.
+        If false, the downloaded data will not be stored on the local file
+        system.
+    :type filename: string
+    :param store_to_xcom_key: If this param is set, the operator will push
+        the contents of the downloaded file to XCom with the key set in this
+        parameter. If false, the downloaded data will not be pushed to XCom.
+    :type store_to_xcom_key: string
+    :param google_cloud_storage_conn_id: The connection ID to use when
+        connecting to Google cloud storage.
+    :type google_cloud_storage_conn_id: string
+    :param delegate_to: The account to impersonate, if any.
+        For this to work, the service account making the request must have domain-wide delegation enabled.
+    :type delegate_to: string
     """
-    template_fields = ('bucket','object','filename','store_to_xcom_key',)
+    template_fields = ('bucket', 'object', 'filename', 'store_to_xcom_key',)
     ui_color = '#f0eee4'
 
     @apply_defaults
-    def __init__(
-        self,
-        bucket,
-        object,
-        filename=False,
-        store_to_xcom_key=False,
-        google_cloud_storage_conn_id='google_cloud_storage_default',
-        delegate_to=None,
-        *args,
-        **kwargs):
-        """
-        Create a new GoogleCloudStorageDownloadOperator.
-
-        :param bucket: The Google cloud storage bucket where the object is.
-        :type bucket: string
-        :param object: The name of the object to download in the Google cloud
-            storage bucket.
-        :type object: string
-        :param filename: The file path on the local file system (where the
-            operator is being executed) that the file should be downloaded to.
-            If false, the downloaded data will not be stored on the local file
-            system.
-        :type filename: string
-        :param store_to_xcom_key: If this param is set, the operator will push
-            the contents of the downloaded file to XCom with the key set in this
-            parameter. If false, the downloaded data will not be pushed to XCom.
-        :type store_to_xcom_key: string
-        :param google_cloud_storage_conn_id: The connection ID to use when
-            connecting to Google cloud storage.
-        :type google_cloud_storage_conn_id: string
-        :param delegate_to: The account to impersonate, if any.
-            For this to work, the service account making the request must have domain-wide delegation enabled.
-        :type delegate_to: string
-        """
+    def __init__(self,
+                 bucket,
+                 object,
+                 filename=False,
+                 store_to_xcom_key=False,
+                 google_cloud_storage_conn_id='google_cloud_storage_default',
+                 delegate_to=None,
+                 *args,
+                 **kwargs):
         super(GoogleCloudStorageDownloadOperator, self).__init__(*args, **kwargs)
         self.bucket = bucket
         self.object = object

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/hipchat_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/hipchat_operator.py b/airflow/contrib/operators/hipchat_operator.py
index 0bc7cbe..aeb37d9 100644
--- a/airflow/contrib/operators/hipchat_operator.py
+++ b/airflow/contrib/operators/hipchat_operator.py
@@ -29,6 +29,7 @@ class HipChatAPIOperator(BaseOperator):
     at https://www.hipchat.com/docs/apiv2. Before using any HipChat API operators you need
     to get an authentication token at https://www.hipchat.com/docs/apiv2/auth.
     In the future additional HipChat operators will be derived from this class as well.
+
     :param token: HipChat REST API authentication token
     :type token: str
     :param base_url: HipChat REST API base url.
@@ -76,6 +77,7 @@ class HipChatAPISendRoomNotificationOperator(HipChatAPIOperator):
     """
     Send notification to a specific HipChat room.
     More info: https://www.hipchat.com/docs/apiv2/method/send_room_notification
+
     :param room_id: Room in which to send notification on HipChat
     :type room_id: str
     :param message: The message body

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/contrib/operators/ssh_execute_operator.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/operators/ssh_execute_operator.py b/airflow/contrib/operators/ssh_execute_operator.py
index dd9e197..3bd8f09 100644
--- a/airflow/contrib/operators/ssh_execute_operator.py
+++ b/airflow/contrib/operators/ssh_execute_operator.py
@@ -91,7 +91,7 @@ class SSHExecuteOperator(BaseOperator):
 
     :param ssh_hook: A SSHHook that indicates the remote host
                      you want to run the script
-    :param ssh_hook: SSHHook
+    :type ssh_hook: string
     :param bash_command: The command, set of commands or reference to a
         bash script (must be '.sh') to be executed.
     :type bash_command: string

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/operators/mssql_operator.py
----------------------------------------------------------------------
diff --git a/airflow/operators/mssql_operator.py b/airflow/operators/mssql_operator.py
index 0f0cd63..9ae2fff 100644
--- a/airflow/operators/mssql_operator.py
+++ b/airflow/operators/mssql_operator.py
@@ -22,11 +22,11 @@ from airflow.utils.decorators import apply_defaults
 class MsSqlOperator(BaseOperator):
     """
     Executes sql code in a specific Microsoft SQL database
+
     :param mssql_conn_id: reference to a specific mssql database
     :type mssql_conn_id: string
     :param sql: the sql code to be executed
-    :type sql: string or string pointing to a template file.
-    File must have a '.sql' extensions.
+    :type sql: string or string pointing to a template file with .sql extension
     :param database: name of database which overwrite defined one in connection
     :type database: string
     """

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/b55f41f2/airflow/operators/mssql_to_hive.py
----------------------------------------------------------------------
diff --git a/airflow/operators/mssql_to_hive.py b/airflow/operators/mssql_to_hive.py
index 6db0cba..6d7521e 100644
--- a/airflow/operators/mssql_to_hive.py
+++ b/airflow/operators/mssql_to_hive.py
@@ -40,10 +40,10 @@ class MsSqlToHiveTransfer(BaseOperator):
     queried considerably, you may want to use this operator only to
     stage the data into a temporary table before loading it into its
     final destination using a ``HiveOperator``.
+
     :param sql: SQL query to execute against the Microsoft SQL Server database
     :type sql: str
-    :param hive_table: target Hive table, use dot notation to target a
-    specific database
+    :param hive_table: target Hive table, use dot notation to target a specific database
     :type hive_table: str
     :param create: whether to create the table if it doesn't exist
     :type create: bool