You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/06/03 10:45:07 UTC
[airflow] branch main updated: Migrate SQLite example DAGs to new design #22461 (#24150)
This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 32a158a95b Migrate SQLite example DAGs to new design #22461 (#24150)
32a158a95b is described below
commit 32a158a95bc8ded8329a3cb17dcf9e9e85c3de46
Author: chethanuk-plutoflume <ch...@tessian.com>
AuthorDate: Fri Jun 3 11:44:59 2022 +0100
Migrate SQLite example DAGs to new design #22461 (#24150)
---
airflow/providers/sqlite/example_dags/__init__.py | 17 ----
.../sqlite/example_dags/example_sqlite.py | 86 -----------------
docs/apache-airflow-providers-sqlite/index.rst | 2 +-
docs/apache-airflow-providers-sqlite/operators.rst | 4 +-
.../system/providers/sqlite}/create_table.sql | 0
tests/system/providers/sqlite/example_sqlite.py | 102 +++++++++++++++++++++
6 files changed, 105 insertions(+), 106 deletions(-)
diff --git a/airflow/providers/sqlite/example_dags/__init__.py b/airflow/providers/sqlite/example_dags/__init__.py
deleted file mode 100644
index 217e5db960..0000000000
--- a/airflow/providers/sqlite/example_dags/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
diff --git a/airflow/providers/sqlite/example_dags/example_sqlite.py b/airflow/providers/sqlite/example_dags/example_sqlite.py
deleted file mode 100644
index b1755996e2..0000000000
--- a/airflow/providers/sqlite/example_dags/example_sqlite.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-"""
-This is an example DAG for the use of the SqliteOperator.
-In this example, we create two tasks that execute in sequence.
-The first task calls an sql command, defined in the SQLite operator,
-which when triggered, is performed on the connected sqlite database.
-The second task is similar but instead calls the SQL command from an external file.
-"""
-
-from datetime import datetime
-
-from airflow import DAG
-from airflow.providers.sqlite.hooks.sqlite import SqliteHook
-from airflow.providers.sqlite.operators.sqlite import SqliteOperator
-
-dag = DAG(
- dag_id='example_sqlite',
- schedule_interval='@daily',
- start_date=datetime(2021, 1, 1),
- tags=['example'],
- catchup=False,
-)
-
-# [START howto_operator_sqlite]
-
-# Example of creating a task that calls a common CREATE TABLE sql command.
-create_table_sqlite_task = SqliteOperator(
- task_id='create_table_sqlite',
- sql=r"""
- CREATE TABLE Customers (
- customer_id INT PRIMARY KEY,
- first_name TEXT,
- last_name TEXT
- );
- """,
- dag=dag,
-)
-
-# [END howto_operator_sqlite]
-
-
-@dag.task(task_id="insert_sqlite_task")
-def insert_sqlite_hook():
- sqlite_hook = SqliteHook()
-
- rows = [('James', '11'), ('James', '22'), ('James', '33')]
- target_fields = ['first_name', 'last_name']
- sqlite_hook.insert_rows(table='Customers', rows=rows, target_fields=target_fields)
-
-
-@dag.task(task_id="replace_sqlite_task")
-def replace_sqlite_hook():
- sqlite_hook = SqliteHook()
-
- rows = [('James', '11'), ('James', '22'), ('James', '33')]
- target_fields = ['first_name', 'last_name']
- sqlite_hook.insert_rows(table='Customers', rows=rows, target_fields=target_fields, replace=True)
-
-
-# [START howto_operator_sqlite_external_file]
-
-# Example of creating a task that calls an sql command from an external file.
-external_create_table_sqlite_task = SqliteOperator(
- task_id='create_table_sqlite_external_file',
- sql='create_table.sql',
-)
-
-# [END howto_operator_sqlite_external_file]
-
-create_table_sqlite_task >> external_create_table_sqlite_task >> insert_sqlite_hook() >> replace_sqlite_hook()
diff --git a/docs/apache-airflow-providers-sqlite/index.rst b/docs/apache-airflow-providers-sqlite/index.rst
index f7e8841dd1..933dd52995 100644
--- a/docs/apache-airflow-providers-sqlite/index.rst
+++ b/docs/apache-airflow-providers-sqlite/index.rst
@@ -39,7 +39,7 @@ Content
:maxdepth: 1
:caption: Resources
- Example DAGs <https://github.com/apache/airflow/tree/main/airflow/providers/sqlite/example_dags>
+ Example DAGs <https://github.com/apache/airflow/tree/main/tests/system/providers/sqlite>
.. toctree::
:maxdepth: 1
diff --git a/docs/apache-airflow-providers-sqlite/operators.rst b/docs/apache-airflow-providers-sqlite/operators.rst
index 6cbe5f6d30..604a2062bc 100644
--- a/docs/apache-airflow-providers-sqlite/operators.rst
+++ b/docs/apache-airflow-providers-sqlite/operators.rst
@@ -51,14 +51,14 @@ the connection metadata is structured as follows:
An example usage of the SqliteOperator is as follows:
-.. exampleinclude:: /../../airflow/providers/sqlite/example_dags/example_sqlite.py
+.. exampleinclude:: /../../tests/system/providers/sqlite/example_sqlite.py
:language: python
:start-after: [START howto_operator_sqlite]
:end-before: [END howto_operator_sqlite]
Furthermore, you can use an external file to execute the SQL commands. Script folder must be at the same level as DAG.py file.
-.. exampleinclude:: /../../airflow/providers/sqlite/example_dags/example_sqlite.py
+.. exampleinclude:: /../../tests/system/providers/sqlite/example_sqlite.py
:language: python
:start-after: [START howto_operator_sqlite_external_file]
:end-before: [END howto_operator_sqlite_external_file]
diff --git a/airflow/providers/sqlite/example_dags/create_table.sql b/tests/system/providers/sqlite/create_table.sql
similarity index 100%
rename from airflow/providers/sqlite/example_dags/create_table.sql
rename to tests/system/providers/sqlite/create_table.sql
diff --git a/tests/system/providers/sqlite/example_sqlite.py b/tests/system/providers/sqlite/example_sqlite.py
new file mode 100644
index 0000000000..cf0c74c193
--- /dev/null
+++ b/tests/system/providers/sqlite/example_sqlite.py
@@ -0,0 +1,102 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example DAG for the use of the SqliteOperator.
+In this example, we create two tasks that execute in sequence.
+The first task calls an sql command, defined in the SQLite operator,
+which when triggered, is performed on the connected sqlite database.
+The second task is similar but instead calls the SQL command from an external file.
+"""
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.providers.sqlite.hooks.sqlite import SqliteHook
+from airflow.providers.sqlite.operators.sqlite import SqliteOperator
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_sqlite"
+
+with DAG(
+ dag_id=DAG_ID,
+ schedule_interval='@daily',
+ start_date=datetime(2021, 1, 1),
+ tags=['example'],
+ catchup=False,
+) as dag:
+
+ # [START howto_operator_sqlite]
+
+ # Example of creating a task that calls a common CREATE TABLE sql command.
+ create_table_sqlite_task = SqliteOperator(
+ task_id='create_table_sqlite',
+ sql=r"""
+ CREATE TABLE Customers (
+ customer_id INT PRIMARY KEY,
+ first_name TEXT,
+ last_name TEXT
+ );
+ """,
+ )
+
+ # [END howto_operator_sqlite]
+
+ @dag.task(task_id="insert_sqlite_task")
+ def insert_sqlite_hook():
+ sqlite_hook = SqliteHook()
+
+ rows = [('James', '11'), ('James', '22'), ('James', '33')]
+ target_fields = ['first_name', 'last_name']
+ sqlite_hook.insert_rows(table='Customers', rows=rows, target_fields=target_fields)
+
+ @dag.task(task_id="replace_sqlite_task")
+ def replace_sqlite_hook():
+ sqlite_hook = SqliteHook()
+
+ rows = [('James', '11'), ('James', '22'), ('James', '33')]
+ target_fields = ['first_name', 'last_name']
+ sqlite_hook.insert_rows(table='Customers', rows=rows, target_fields=target_fields, replace=True)
+
+ # [START howto_operator_sqlite_external_file]
+
+ # Example of creating a task that calls an sql command from an external file.
+ external_create_table_sqlite_task = SqliteOperator(
+ task_id='create_table_sqlite_external_file',
+ sql='create_table.sql',
+ )
+
+ # [END howto_operator_sqlite_external_file]
+
+ (
+ create_table_sqlite_task
+ >> external_create_table_sqlite_task
+ >> insert_sqlite_hook()
+ >> replace_sqlite_hook()
+ )
+
+ from tests.system.utils.watcher import watcher
+
+ # This test needs watcher in order to properly mark success/failure
+ # when "tearDown" task with trigger rule is part of the DAG
+ list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run # noqa: E402
+
+# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)