You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/07/26 12:04:16 UTC

[airflow] branch main updated: Migrate Google example life_sciences to new design AIP-47 (#25264)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 28db8c10b2 Migrate Google example life_sciences to new design AIP-47 (#25264)
28db8c10b2 is described below

commit 28db8c10b2422d99217658a039cc6dc45a38ff51
Author: Chenglong Yan <al...@gmail.com>
AuthorDate: Tue Jul 26 20:04:04 2022 +0800

    Migrate Google example life_sciences to new design AIP-47 (#25264)
    
    related: #22447, #22430
---
 .../operators/cloud/life_sciences.rst              |  6 +--
 .../cloud/operators/test_life_sciences_system.py   | 43 ----------------------
 .../cloud/transfers/test_gdrive_to_gcs_system.py   |  2 +-
 .../google/cloud/life_sciences/__init__.py         | 16 ++++++++
 .../cloud/life_sciences}/example_life_sciences.py  | 11 +++++-
 5 files changed, 30 insertions(+), 48 deletions(-)

diff --git a/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst
index 070f6e3882..4844c64277 100644
--- a/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst
+++ b/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst
@@ -34,7 +34,7 @@ Pipeline Configuration
 In order to run the pipeline, it is necessary to configure the request body.
 Here is an example of the pipeline configuration with a single action.
 
-.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_life_sciences.py
+.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py
     :language: python
     :dedent: 0
     :start-after: [START howto_configure_simple_action_pipeline]
@@ -42,7 +42,7 @@ Here is an example of the pipeline configuration with a single action.
 
 The pipeline can also be configured with multiple action.
 
-.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_life_sciences.py
+.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py
     :language: python
     :dedent: 0
     :start-after: [START howto_configure_multiple_action_pipeline]
@@ -59,7 +59,7 @@ Use the
 :class:`~airflow.providers.google.cloud.operators.life_sciences.LifeSciencesRunPipelineOperator`
 to execute pipelines.
 
-.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_life_sciences.py
+.. exampleinclude:: /../../tests/system/providers/google/cloud/life_sciences/example_life_sciences.py
     :language: python
     :dedent: 0
     :start-after: [START howto_run_pipeline]
diff --git a/tests/providers/google/cloud/operators/test_life_sciences_system.py b/tests/providers/google/cloud/operators/test_life_sciences_system.py
deleted file mode 100644
index bf01794bbe..0000000000
--- a/tests/providers/google/cloud/operators/test_life_sciences_system.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import os
-
-import pytest
-
-from airflow.providers.google.cloud.example_dags.example_life_sciences import BUCKET, FILENAME, LOCATION
-from tests.providers.google.cloud.utils.gcp_authenticator import GCP_LIFE_SCIENCES_KEY
-from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
-
-
-@pytest.mark.backend("mysql", "postgres")
-@pytest.mark.credential_file(GCP_LIFE_SCIENCES_KEY)
-class CloudLifeSciencesExampleDagsSystemTest(GoogleSystemTest):
-    @provide_gcp_context(GCP_LIFE_SCIENCES_KEY)
-    def setUp(self):
-        super().setUp()
-        self.create_gcs_bucket(BUCKET, LOCATION)
-        self.upload_content_to_gcs(lines=f"{os.urandom(1 * 1024 * 1024)}", bucket=BUCKET, filename=FILENAME)
-
-    @provide_gcp_context(GCP_LIFE_SCIENCES_KEY)
-    def test_run_example_dag_function(self):
-        self.run_dag('example_gcp_life_sciences', CLOUD_DAG_FOLDER)
-
-    @provide_gcp_context(GCP_LIFE_SCIENCES_KEY)
-    def tearDown(self):
-        self.delete_gcs_bucket(BUCKET)
-        super().tearDown()
diff --git a/tests/providers/google/cloud/transfers/test_gdrive_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_gdrive_to_gcs_system.py
index 9b278526d2..505508ce84 100644
--- a/tests/providers/google/cloud/transfers/test_gdrive_to_gcs_system.py
+++ b/tests/providers/google/cloud/transfers/test_gdrive_to_gcs_system.py
@@ -17,8 +17,8 @@
 # under the License.
 import pytest
 
-from airflow.providers.google.cloud.example_dags.example_life_sciences import BUCKET
 from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY
+from tests.system.providers.google.cloud.life_sciences.example_life_sciences import BUCKET
 from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
 
 
diff --git a/tests/system/providers/google/cloud/life_sciences/__init__.py b/tests/system/providers/google/cloud/life_sciences/__init__.py
new file mode 100644
index 0000000000..13a83393a9
--- /dev/null
+++ b/tests/system/providers/google/cloud/life_sciences/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/airflow/providers/google/cloud/example_dags/example_life_sciences.py b/tests/system/providers/google/cloud/life_sciences/example_life_sciences.py
similarity index 92%
rename from airflow/providers/google/cloud/example_dags/example_life_sciences.py
rename to tests/system/providers/google/cloud/life_sciences/example_life_sciences.py
index 0503a8e1e3..6297def813 100644
--- a/airflow/providers/google/cloud/example_dags/example_life_sciences.py
+++ b/tests/system/providers/google/cloud/life_sciences/example_life_sciences.py
@@ -22,6 +22,9 @@ from datetime import datetime
 from airflow import models
 from airflow.providers.google.cloud.operators.life_sciences import LifeSciencesRunPipelineOperator
 
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = "example_gcp_life_sciences"
+
 PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project-id")
 BUCKET = os.environ.get("GCP_GCS_LIFE_SCIENCES_BUCKET", "INVALID BUCKET NAME")
 FILENAME = os.environ.get("GCP_GCS_LIFE_SCIENCES_FILENAME", 'input.in')
@@ -74,7 +77,7 @@ MULTI_ACTION_PIPELINE = {
 # [END howto_configure_multiple_action_pipeline]
 
 with models.DAG(
-    "example_gcp_life_sciences",
+    DAG_ID,
     schedule_interval='@once',
     start_date=datetime(2021, 1, 1),
     catchup=False,
@@ -95,3 +98,9 @@ with models.DAG(
     )
 
     simple_life_science_action_pipeline >> multiple_life_science_action_pipeline
+
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)