You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/06/01 19:25:02 UTC

[airflow] branch main updated: Migrate Yandex example DAGs to new design AIP-47 (#24082)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 65ad2aed26 Migrate Yandex example DAGs to new design AIP-47 (#24082)
65ad2aed26 is described below

commit 65ad2aed26f7572ba0d3b04a33f9144989ac7117
Author: Chenglong Yan <al...@gmail.com>
AuthorDate: Thu Jun 2 03:24:50 2022 +0800

    Migrate Yandex example DAGs to new design AIP-47 (#24082)
    
    closes: #22470
---
 airflow/providers/yandex/example_dags/__init__.py        | 16 ----------------
 docs/apache-airflow-providers-yandex/operators.rst       |  2 +-
 .../providers/yandex}/example_yandexcloud_dataproc.py    | 10 +++++++++-
 3 files changed, 10 insertions(+), 18 deletions(-)

diff --git a/airflow/providers/yandex/example_dags/__init__.py b/airflow/providers/yandex/example_dags/__init__.py
deleted file mode 100644
index 13a83393a9..0000000000
--- a/airflow/providers/yandex/example_dags/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
diff --git a/docs/apache-airflow-providers-yandex/operators.rst b/docs/apache-airflow-providers-yandex/operators.rst
index bf5c6819a6..552fd4febd 100644
--- a/docs/apache-airflow-providers-yandex/operators.rst
+++ b/docs/apache-airflow-providers-yandex/operators.rst
@@ -37,4 +37,4 @@ Prerequisite Tasks
 
 Using the operators
 ^^^^^^^^^^^^^^^^^^^^^
-See the usage examples in `example DAGs <https://github.com/apache/airflow/blob/main/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py>`_
+See the usage examples in `example DAGs <https://github.com/apache/airflow/blob/main/tests/system/providers/yandex/example_yandexcloud_dataproc.py>`_
diff --git a/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py b/tests/system/providers/yandex/example_yandexcloud_dataproc.py
similarity index 95%
rename from airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py
rename to tests/system/providers/yandex/example_yandexcloud_dataproc.py
index 7d42946380..29636d363f 100644
--- a/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py
+++ b/tests/system/providers/yandex/example_yandexcloud_dataproc.py
@@ -14,6 +14,7 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+import os
 import uuid
 from datetime import datetime
 
@@ -35,9 +36,11 @@ AVAILABILITY_ZONE_ID = 'ru-central1-c'
 # Dataproc cluster jobs will produce logs in specified s3 bucket
 S3_BUCKET_NAME_FOR_JOB_LOGS = ''
 
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+DAG_ID = 'example_yandexcloud_dataproc_operator'
 
 with DAG(
-    'example_yandexcloud_dataproc_operator',
+    DAG_ID,
     schedule_interval=None,
     start_date=datetime(2021, 1, 1),
     tags=['example'],
@@ -153,3 +156,8 @@ with DAG(
 
     create_cluster >> create_mapreduce_job >> create_hive_query >> create_hive_query_from_file
     create_hive_query_from_file >> create_spark_job >> create_pyspark_job >> delete_cluster
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)