You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2018/12/18 06:02:44 UTC

[GitHub] stale[bot] closed pull request #3891: [AIRFLOW-3029] New Operator - SqlOperator

stale[bot] closed pull request #3891: [AIRFLOW-3029] New Operator - SqlOperator
URL: https://github.com/apache/incubator-airflow/pull/3891
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/airflow/contrib/operators/sql_operator.py b/airflow/contrib/operators/sql_operator.py
new file mode 100644
index 0000000000..19a0c857b7
--- /dev/null
+++ b/airflow/contrib/operators/sql_operator.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.hooks.base_hook import BaseHook
+from airflow.models import BaseOperator
+from airflow.utils.decorators import apply_defaults
+
+
+class SqlOperator(BaseOperator):
+    """
+    Executes sql code in a database.
+
+    This abstract operator can be instantiated directly,
+    and does not need to be derived into subclasses for each DbApiHook subclass.
+    It will automatically usesthe correct DbApiHook subclass implmenetation,
+    made possible by reflecting upon the Connection's assigned `conn_type`.
+
+    :param conn_id: reference to a predefined sql database connection
+    :type conn_id: str
+    :param sql: the sql code to be executed. (templated)
+    :type sql: Can receive a str representing a sql statement,
+        a list of str (sql statements), or reference to a template file.
+        Template reference are recognized by str ending in '.sql'
+    """
+
+    template_fields = ('sql',)
+    template_ext = ('.sql',)
+    ui_color = '#ededed'
+
+    @apply_defaults
+    def __init__(
+            self,
+            sql,
+            conn_id,
+            autocommit=False,
+            parameters=None,
+            *args, **kwargs):
+        super(SqlOperator, self).__init__(*args, **kwargs)
+        self.parameters = parameters
+        self.sql = sql
+        self.conn_id = conn_id
+        self.autocommit = autocommit
+
+    def execute(self, context):
+        self.log.info('Executing: %s', self.sql)
+        hook = BaseHook.get_hook(conn_id=self.conn_id)
+        hook.run(sql=self.sql,
+                 autocommit=self.autocommit,
+                 parameters=self.parameters)
diff --git a/docs/concepts.rst b/docs/concepts.rst
index 50c18c9b98..100b070096 100644
--- a/docs/concepts.rst
+++ b/docs/concepts.rst
@@ -116,7 +116,7 @@ Airflow provides operators for many common tasks, including:
 - ``PythonOperator`` - calls an arbitrary Python function
 - ``EmailOperator`` - sends an email
 - ``SimpleHttpOperator`` - sends an HTTP request
-- ``MySqlOperator``, ``SqliteOperator``, ``PostgresOperator``, ``MsSqlOperator``, ``OracleOperator``, ``JdbcOperator``, etc. - executes a SQL command
+- ``SqlOperator``, ``MySqlOperator``, ``SqliteOperator``, ``PostgresOperator``, ``MsSqlOperator``, ``OracleOperator``, ``JdbcOperator``, etc. - executes a SQL command
 - ``Sensor`` - waits for a certain time, file, database row, S3 key, etc...
 
 In addition to these basic building blocks, there are many more specific
diff --git a/tests/contrib/operators/test_sql_operator.py b/tests/contrib/operators/test_sql_operator.py
new file mode 100644
index 0000000000..dc849270e1
--- /dev/null
+++ b/tests/contrib/operators/test_sql_operator.py
@@ -0,0 +1,105 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from airflow.contrib.operators.sql_operator import SqlOperator
+from airflow import DAG
+from airflow import configuration
+from airflow.utils.tests import skipUnlessImported
+from airflow.utils import timezone
+import unittest
+
+
+class SqlOperatorTest(unittest.TestCase):
+
+    DEFAULT_DATE = timezone.datetime(2015, 1, 1)
+
+    def setUp(self):
+        configuration.load_test_config()
+        args = {'owner': 'airflow', 'start_date': self.DEFAULT_DATE}
+        dag = DAG('unit_test_dag', default_args=args)
+        self.dag = dag
+
+    @skipUnlessImported('airflow.operators.postgres_operator', 'PostgresOperator')
+    def test_sql_operator_postgres_test(self):
+        sql = """
+        CREATE TABLE IF NOT EXISTS test_airflow (
+            dummy VARCHAR(50)
+        );
+        """
+        task_basic = SqlOperator(task_id='sql_operator_postgres_test',
+                                 sql=sql,
+                                 conn_id='postgres_default',
+                                 dag=self.dag)
+        task_basic.run(start_date=self.DEFAULT_DATE,
+                       end_date=self.DEFAULT_DATE,
+                       ignore_ti_state=True)
+
+        task_autocommit = SqlOperator(task_id='sql_operator_postgres_test_autocommit',
+                                      sql=sql,
+                                      conn_id='postgres_default',
+                                      dag=self.dag,
+                                      autocommit=True)
+        task_autocommit.run(start_date=self.DEFAULT_DATE,
+                            end_date=self.DEFAULT_DATE,
+                            ignore_ti_state=True)
+        sql = [
+            "TRUNCATE TABLE test_airflow",
+            "INSERT INTO test_airflow VALUES ('X')",
+        ]
+        task_multi = SqlOperator(task_id='sql_operator_postgres_test_multi',
+                                 sql=sql,
+                                 conn_id='postgres_default',
+                                 dag=self.dag)
+        task_multi.run(start_date=self.DEFAULT_DATE,
+                       end_date=self.DEFAULT_DATE,
+                       ignore_ti_state=True)
+
+    @skipUnlessImported('airflow.operators.mysql_operator', 'MySqlOperator')
+    def test_sql_operator_mysql_test(self):
+        sql = """
+        CREATE TABLE IF NOT EXISTS test_airflow (
+            dummy VARCHAR(50)
+        );
+        """
+        task_basic = SqlOperator(task_id='sql_operator_mysql_test',
+                                 sql=sql,
+                                 conn_id='airflow_db',
+                                 dag=self.dag)
+        task_basic.run(start_date=self.DEFAULT_DATE,
+                       end_date=self.DEFAULT_DATE,
+                       ignore_ti_state=True)
+
+        task_autocommit = SqlOperator(task_id='sql_operator_mysql_test_autocommit',
+                                      sql=sql,
+                                      conn_id='airflow_db',
+                                      dag=self.dag,
+                                      autocommit=True)
+        task_autocommit.run(start_date=self.DEFAULT_DATE,
+                            end_date=self.DEFAULT_DATE,
+                            ignore_ti_state=True)
+        sql = [
+            "TRUNCATE TABLE test_airflow",
+            "INSERT INTO test_airflow VALUES ('X')",
+        ]
+        task_multi = SqlOperator(task_id='sql_operator_mysql_test_multi',
+                                 sql=sql,
+                                 conn_id='airflow_db',
+                                 dag=self.dag)
+        task_multi.run(start_date=self.DEFAULT_DATE,
+                       end_date=self.DEFAULT_DATE,
+                       ignore_ti_state=True)


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services