You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by jo...@apache.org on 2022/08/16 13:47:47 UTC

[airflow] branch main updated: Delete Old-style System Tests (#25655)

This is an automated email from the ASF dual-hosted git repository.

joshfell pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new bc46477d20 Delete Old-style System Tests (#25655)
bc46477d20 is described below

commit bc46477d20802242ec9596279933742c1743b2f1
Author: D. Ferruzzi <fe...@amazon.com>
AuthorDate: Tue Aug 16 06:47:34 2022 -0700

    Delete Old-style System Tests (#25655)
---
 .../amazon/aws/hooks/test_base_aws_system.py       | 55 ------------------
 .../amazon/aws/operators/test_eks_system.py        | 58 -------------------
 .../amazon/aws/operators/test_emr_system.py        | 34 -----------
 .../amazon/aws/operators/test_glacier_system.py    | 37 ------------
 .../aws/operators/test_s3_bucket_tagging_system.py | 38 -------------
 .../aws/transfers/test_google_api_to_s3_system.py  | 66 ----------------------
 .../transfers/test_imap_attachment_to_s3_system.py | 41 --------------
 .../aws/transfers/test_s3_to_redshift_system.py    | 50 ----------------
 8 files changed, 379 deletions(-)

diff --git a/tests/providers/amazon/aws/hooks/test_base_aws_system.py b/tests/providers/amazon/aws/hooks/test_base_aws_system.py
deleted file mode 100644
index cb9b674eee..0000000000
--- a/tests/providers/amazon/aws/hooks/test_base_aws_system.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import json
-import os
-from unittest import mock
-
-import pytest
-
-from airflow.models import Connection
-from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
-from tests.providers.google.cloud.utils.gcp_authenticator import GCP_AWS_KEY
-from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context
-
-ROLE_ANR = os.environ.get('GCP_AWS_ROLE_ANR', "arn:aws:iam::123456:role/role_arn")
-AUDIENCE = os.environ.get('GCP_AWS_AUDIENCE', 'aws-federation.airflow.apache.org')
-
-
-@pytest.mark.system("google.cloud")
-@pytest.mark.credential_file(GCP_AWS_KEY)
-class AwsBaseHookSystemTest(GoogleSystemTest):
-    @provide_gcp_context(GCP_AWS_KEY)
-    def test_run_example_gcp_vision_autogenerated_id_dag(self):
-        mock_connection = Connection(
-            conn_type="aws",
-            extra=json.dumps(
-                {
-                    "role_arn": ROLE_ANR,
-                    "assume_role_method": "assume_role_with_web_identity",
-                    "assume_role_with_web_identity_federation": 'google',
-                    "assume_role_with_web_identity_federation_audience": AUDIENCE,
-                }
-            ),
-        )
-
-        with mock.patch.dict('os.environ', AIRFLOW_CONN_AWS_DEFAULT=mock_connection.get_uri()):
-            hook = AwsBaseHook(client_type='s3')
-
-            client = hook.get_conn()
-            response = client.list_buckets()
-            assert 'Buckets' in response
diff --git a/tests/providers/amazon/aws/operators/test_eks_system.py b/tests/providers/amazon/aws/operators/test_eks_system.py
deleted file mode 100644
index 29bbe160fb..0000000000
--- a/tests/providers/amazon/aws/operators/test_eks_system.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import pytest
-
-from tests.test_utils.amazon_system_helpers import (
-    AWS_DAG_FOLDER,
-    AWS_EKS_KEY,
-    AmazonSystemTest,
-    provide_aws_context,
-)
-
-
-@pytest.mark.system("amazon.aws")
-@pytest.mark.backend("mysql", "postgres")
-@pytest.mark.credential_file(AWS_EKS_KEY)
-class ExampleDagsSystemTest(AmazonSystemTest):
-    @provide_aws_context(AWS_EKS_KEY)
-    def setUp(self):
-        super().setUp()
-
-    @provide_aws_context(AWS_EKS_KEY)
-    def tearDown(self):
-        super().tearDown()
-
-    @pytest.mark.long_running
-    @provide_aws_context(AWS_EKS_KEY)
-    def test_run_example_dag_eks_create_cluster(self):
-        self.run_dag('create_eks_cluster_dag', AWS_DAG_FOLDER)
-
-    @pytest.mark.long_running
-    @provide_aws_context(AWS_EKS_KEY)
-    def test_run_example_dag_eks_create_nodegroup(self):
-        self.run_dag('create_eks_nodegroup_dag', AWS_DAG_FOLDER)
-
-    @pytest.mark.long_running
-    @provide_aws_context(AWS_EKS_KEY)
-    def test_run_example_dag_create_eks_cluster_and_nodegroup(self):
-        self.run_dag('create_eks_cluster_and_nodegroup_dag', AWS_DAG_FOLDER)
-
-    @pytest.mark.long_running
-    @provide_aws_context(AWS_EKS_KEY)
-    def test_run_example_dag_eks_run_pod(self):
-        self.run_dag('eks_run_pod_dag', AWS_DAG_FOLDER)
diff --git a/tests/providers/amazon/aws/operators/test_emr_system.py b/tests/providers/amazon/aws/operators/test_emr_system.py
deleted file mode 100644
index c943343898..0000000000
--- a/tests/providers/amazon/aws/operators/test_emr_system.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from tests.test_utils.amazon_system_helpers import AWS_DAG_FOLDER, AmazonSystemTest
-
-
-class EmrSystemTest(AmazonSystemTest):
-    """
-    System tests for AWS EMR operators
-    """
-
-    @classmethod
-    def setup_class(cls):
-        cls.create_emr_default_roles()
-
-    def test_run_example_dag_emr_automatic_steps(self):
-        self.run_dag('emr_job_flow_automatic_steps_dag', AWS_DAG_FOLDER)
-
-    def test_run_example_dag_emr_manual_steps(self):
-        self.run_dag('emr_job_flow_manual_steps_dag', AWS_DAG_FOLDER)
diff --git a/tests/providers/amazon/aws/operators/test_glacier_system.py b/tests/providers/amazon/aws/operators/test_glacier_system.py
deleted file mode 100644
index c9a237ee97..0000000000
--- a/tests/providers/amazon/aws/operators/test_glacier_system.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from tests.test_utils.amazon_system_helpers import AWS_DAG_FOLDER, AmazonSystemTest
-from tests.test_utils.gcp_system_helpers import GoogleSystemTest
-
-BUCKET = "data_from_glacier"
-
-
-class GlacierSystemTest(AmazonSystemTest):
-    """
-    System test for AWS Glacier operators
-    """
-
-    def setUp(self):
-        super().setUp()
-        GoogleSystemTest.create_gcs_bucket(BUCKET)
-
-    def tearDown(self):
-        GoogleSystemTest.delete_gcs_bucket(BUCKET)
-
-    def test_run_example_dag(self):
-        self.run_dag(dag_id="example_glacier_to_gcs", dag_folder=AWS_DAG_FOLDER)
diff --git a/tests/providers/amazon/aws/operators/test_s3_bucket_tagging_system.py b/tests/providers/amazon/aws/operators/test_s3_bucket_tagging_system.py
deleted file mode 100644
index 4b6cbf66e5..0000000000
--- a/tests/providers/amazon/aws/operators/test_s3_bucket_tagging_system.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import pytest
-
-from tests.test_utils.amazon_system_helpers import AWS_DAG_FOLDER, AmazonSystemTest, provide_aws_context
-
-BUCKET_NAME = "AmazonSystemTestBucket"
-
-
-@pytest.mark.backend("mysql", "postgres")
-@pytest.mark.system("amazon.aws")
-class ExampleDagsSystemTest(AmazonSystemTest):
-    @provide_aws_context()
-    def setUp(self):
-        super().setUp()
-
-    @provide_aws_context()
-    def tearDown(self):
-        super().tearDown()
-
-    @provide_aws_context()
-    def test_run_example_dag_bucket_tagging(self):
-        self.run_dag('s3_bucket_tagging_dag', AWS_DAG_FOLDER)
diff --git a/tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py b/tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py
deleted file mode 100644
index 9f06d72275..0000000000
--- a/tests/providers/amazon/aws/transfers/test_google_api_to_s3_system.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-import pytest
-
-from airflow.providers.amazon.aws.example_dags.example_google_api_sheets_to_s3 import (
-    S3_DESTINATION_KEY as SHEETS_S3_DESTINATION_KEY,
-)
-from airflow.providers.amazon.aws.example_dags.example_google_api_youtube_to_s3 import (
-    S3_BUCKET_NAME as YOUTUBE_S3_BUCKET_NAME,
-)
-from airflow.providers.amazon.aws.hooks.s3 import S3Hook
-from tests.providers.google.cloud.utils.gcp_authenticator import GMP_KEY
-from tests.test_utils.amazon_system_helpers import (
-    AWS_DAG_FOLDER,
-    AmazonSystemTest,
-    provide_aws_context,
-    provide_aws_s3_bucket,
-)
-from tests.test_utils.gcp_system_helpers import GoogleSystemTest, provide_gcp_context
-
-SHEETS_BUCKET, _ = S3Hook.parse_s3_url(SHEETS_S3_DESTINATION_KEY)
-
-
-@pytest.fixture
-def provide_s3_bucket_sheets():
-    with provide_aws_s3_bucket(SHEETS_BUCKET):
-        yield
-
-
-@pytest.fixture
-def provide_s3_bucket_youtube():
-    with provide_aws_s3_bucket(YOUTUBE_S3_BUCKET_NAME):
-        yield
-
-
-@pytest.mark.backend("mysql", "postgres")
-@pytest.mark.credential_file(GMP_KEY)
-class GoogleApiToS3OperatorExampleDagsSystemTest(GoogleSystemTest, AmazonSystemTest):
-    @pytest.mark.usefixtures("provide_s3_bucket_sheets")
-    @provide_aws_context()
-    @provide_gcp_context(GMP_KEY, scopes=['https://www.googleapis.com/auth/spreadsheets.readonly'])
-    def test_run_example_dag_google_api_to_s3_transfer_basic(self):
-        self.run_dag('example_google_api_to_s3_transfer_basic', AWS_DAG_FOLDER)
-
-    @pytest.mark.usefixtures("provide_s3_bucket_youtube")
-    @provide_aws_context()
-    @provide_gcp_context(GMP_KEY, scopes=['https://www.googleapis.com/auth/youtube.readonly'])
-    def test_run_example_dag_google_api_to_s3_transfer_advanced(self):
-        self.run_dag('example_google_api_to_s3_transfer_advanced', AWS_DAG_FOLDER)
diff --git a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py b/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py
deleted file mode 100644
index 5c6b35a49f..0000000000
--- a/tests/providers/amazon/aws/transfers/test_imap_attachment_to_s3_system.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import pytest
-
-from airflow.providers.amazon.aws.example_dags.example_imap_attachment_to_s3 import S3_BUCKET
-from tests.test_utils.amazon_system_helpers import (
-    AWS_DAG_FOLDER,
-    AmazonSystemTest,
-    provide_aws_context,
-    provide_aws_s3_bucket,
-)
-
-
-@pytest.fixture
-def provide_s3_bucket():
-    with provide_aws_s3_bucket(S3_BUCKET):
-        yield
-
-
-@pytest.mark.backend("mysql", "postgres")
-@pytest.mark.system("imap")
-class TestImapAttachmentToS3ExampleDags(AmazonSystemTest):
-    @pytest.mark.usefixtures("provide_s3_bucket")
-    @provide_aws_context()
-    def test_run_example_dag_imap_attachment_to_s3(self):
-        self.run_dag('example_imap_attachment_to_s3', AWS_DAG_FOLDER)
diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py b/tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py
deleted file mode 100644
index ae731675d7..0000000000
--- a/tests/providers/amazon/aws/transfers/test_s3_to_redshift_system.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import os
-
-import pytest
-
-from airflow.models import Connection
-from airflow.utils import db
-from airflow.utils.session import create_session
-from tests.test_utils import AIRFLOW_MAIN_FOLDER
-from tests.test_utils.amazon_system_helpers import AWS_DAG_FOLDER, AmazonSystemTest
-from tests.test_utils.terraform import Terraform
-
-
-@pytest.mark.backend("mysql", "postgres")
-class TestS3ToRedshiftExampleDags(AmazonSystemTest, Terraform):
-    TERRAFORM_DIR = os.path.join(
-        AIRFLOW_MAIN_FOLDER, "tests", "providers", "amazon", "aws", "infrastructure", "example_s3_to_redshift"
-    )
-
-    def setUp(self) -> None:
-        super().setUp()
-        host, port = self.get_tf_output("redshift_endpoint").split(':')
-        schema = self.get_tf_output("redshift_database_name")
-        login = self.get_tf_output("redshift_master_username")
-        password = self.get_tf_output("redshift_master_password")
-        db.merge_conn(Connection("redshift_default", "postgres", host, login, password, schema, port))
-
-    def test_run_example_dag_s3_to_redshift(self):
-        self.run_dag('example_s3_to_redshift', AWS_DAG_FOLDER)
-
-    def tearDown(self) -> None:
-        super().tearDown()
-        with create_session() as session:
-            session.query(Connection).filter(Connection.conn_id == "redshift_default").delete()