You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2023/02/25 08:24:11 UTC

[airflow] branch main updated: migrated tests/providers/google/cloud/transfers to pytest (#29757)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 9de71187ed migrated tests/providers/google/cloud/transfers to pytest (#29757)
9de71187ed is described below

commit 9de71187ed53dd4e9f91d2b148c8db6bf69221a8
Author: Abhishek-kumar-samsung <bi...@gmail.com>
AuthorDate: Sat Feb 25 13:54:02 2023 +0530

    migrated tests/providers/google/cloud/transfers to pytest (#29757)
---
 .../google/cloud/transfers/test_adls_to_gcs.py     |  3 +-
 .../cloud/transfers/test_azure_fileshare_to_gcs.py |  3 +-
 .../cloud/transfers/test_bigquery_to_bigquery.py   |  3 +-
 .../google/cloud/transfers/test_bigquery_to_gcs.py |  3 +-
 .../cloud/transfers/test_bigquery_to_mssql.py      |  3 +-
 .../cloud/transfers/test_bigquery_to_mysql.py      |  3 +-
 .../cloud/transfers/test_cassandra_to_gcs.py       |  3 +-
 .../google/cloud/transfers/test_gcs_to_bigquery.py |  5 ++-
 .../google/cloud/transfers/test_gcs_to_gcs.py      |  3 +-
 .../google/cloud/transfers/test_gcs_to_local.py    |  3 +-
 .../google/cloud/transfers/test_gcs_to_sftp.py     | 36 ++++++++++++----------
 .../google/cloud/transfers/test_gdrive_to_local.py |  4 +--
 .../google/cloud/transfers/test_local_to_gcs.py    |  7 ++---
 .../google/cloud/transfers/test_mssql_to_gcs.py    |  9 +++---
 .../google/cloud/transfers/test_mysql_to_gcs.py    |  9 +++---
 .../google/cloud/transfers/test_oracle_to_gcs.py   |  3 +-
 .../google/cloud/transfers/test_postgres_to_gcs.py | 13 ++++----
 .../google/cloud/transfers/test_s3_to_gcs.py       |  3 +-
 .../cloud/transfers/test_salesforce_to_gcs.py      |  3 +-
 .../google/cloud/transfers/test_sftp_to_gcs.py     |  3 +-
 .../google/cloud/transfers/test_sql_to_gcs.py      |  3 +-
 21 files changed, 54 insertions(+), 71 deletions(-)

diff --git a/tests/providers/google/cloud/transfers/test_adls_to_gcs.py b/tests/providers/google/cloud/transfers/test_adls_to_gcs.py
index e13b3ddd35..8575fc353f 100644
--- a/tests/providers/google/cloud/transfers/test_adls_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_adls_to_gcs.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.google.cloud.transfers.adls_to_gcs import ADLSToGCSOperator
@@ -37,7 +36,7 @@ GCS_CONN_ID = "google_cloud_default"
 IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
 
 
-class TestAdlsToGoogleCloudStorageOperator(unittest.TestCase):
+class TestAdlsToGoogleCloudStorageOperator:
     def test_init(self):
         """Test AdlsToGoogleCloudStorageOperator instance is properly initialized."""
 
diff --git a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py
index 386d04e87f..49bae9dfdf 100644
--- a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs.py
@@ -16,7 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.google.cloud.transfers.azure_fileshare_to_gcs import AzureFileShareToGCSOperator
@@ -31,7 +30,7 @@ GCS_CONN_ID = "google_cloud_default"
 IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
 
 
-class TestAzureFileShareToGCSOperator(unittest.TestCase):
+class TestAzureFileShareToGCSOperator:
     def test_init(self):
         """Test AzureFileShareToGCSOperator instance is properly initialized."""
 
diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
index 13eb4eda5c..5c0f8f560c 100644
--- a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.google.cloud.transfers.bigquery_to_bigquery import BigQueryToBigQueryOperator
@@ -28,7 +27,7 @@ TEST_DATASET = "test-dataset"
 TEST_TABLE_ID = "test-table-id"
 
 
-class TestBigQueryToBigQueryOperator(unittest.TestCase):
+class TestBigQueryToBigQueryOperator:
     @mock.patch(BQ_HOOK_PATH)
     def test_execute_without_location_should_execute_successfully(self, mock_hook):
         source_project_dataset_tables = f"{TEST_DATASET}.{TEST_TABLE_ID}"
diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py
index 014a86405f..3360084642 100644
--- a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 from unittest.mock import MagicMock
 
@@ -34,7 +33,7 @@ TEST_TABLE_ID = "test-table-id"
 PROJECT_ID = "test-project-id"
 
 
-class TestBigQueryToGCSOperator(unittest.TestCase):
+class TestBigQueryToGCSOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryHook")
     def test_execute(self, mock_hook):
         source_project_dataset_table = f"{PROJECT_ID}:{TEST_DATASET}.{TEST_TABLE_ID}"
diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py b/tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py
index 3c4e8f0ecc..1fd5105767 100644
--- a/tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_mssql.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 import pytest
@@ -36,7 +35,7 @@ TEST_DAG_ID = "test-bigquery-operators"
 
 
 @pytest.mark.backend("mssql")
-class TestBigQueryToMsSqlOperator(unittest.TestCase):
+class TestBigQueryToMsSqlOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.bigquery_to_mssql.BigQueryHook")
     def test_execute_good_request_to_bq(self, mock_hook):
         destination_table = "table"
diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py
index 18c37ec076..720961d3c4 100644
--- a/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.google.cloud.transfers.bigquery_to_mysql import BigQueryToMySqlOperator
@@ -28,7 +27,7 @@ TEST_TABLE_ID = "test-table-id"
 TEST_DAG_ID = "test-bigquery-operators"
 
 
-class TestBigQueryToMySqlOperator(unittest.TestCase):
+class TestBigQueryToMySqlOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.bigquery_to_mysql.BigQueryHook")
     def test_execute_good_request_to_bq(self, mock_hook):
         destination_table = "table"
diff --git a/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py b/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py
index d448875147..56b47f34b2 100644
--- a/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_cassandra_to_gcs.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 from unittest.mock import call
 
@@ -31,7 +30,7 @@ CQL = "select * from keyspace1.table1"
 TASK_ID = "test-cas-to-gcs"
 
 
-class TestCassandraToGCS(unittest.TestCase):
+class TestCassandraToGCS:
     @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.NamedTemporaryFile")
     @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.GCSHook.upload")
     @mock.patch("airflow.providers.google.cloud.transfers.cassandra_to_gcs.CassandraHook")
diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py
index 57dc7e0f59..609f9c33a1 100644
--- a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery.py
@@ -18,7 +18,6 @@
 from __future__ import annotations
 
 import json
-import unittest
 from unittest import mock
 from unittest.mock import MagicMock, call
 
@@ -67,7 +66,7 @@ pytest.real_job_id = f"{job_id}_{hash_}"
 GCS_TO_BQ_PATH = "airflow.providers.google.cloud.transfers.gcs_to_bigquery.{}"
 
 
-class TestGCSToBigQueryOperator(unittest.TestCase):
+class TestGCSToBigQueryOperator:
     @mock.patch(GCS_TO_BQ_PATH.format("BigQueryHook"))
     def test_max_value_external_table_should_execute_successfully(self, hook):
         hook.return_value.insert_job.side_effect = [
@@ -1187,7 +1186,7 @@ class TestGCSToBigQueryOperator(unittest.TestCase):
         )
 
 
-class TestAsyncGCSToBigQueryOperator(unittest.TestCase):
+class TestAsyncGCSToBigQueryOperator:
     @mock.patch(GCS_TO_BQ_PATH.format("BigQueryHook"))
     def test_execute_without_external_table_async_should_execute_successfully(self, hook):
         """
diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
index 1c2328d4e5..d8fa94c0c6 100644
--- a/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_gcs.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from datetime import datetime
 from unittest import mock
 
@@ -55,7 +54,7 @@ MOD_TIME_1 = datetime(2016, 1, 1)
 MOD_TIME_2 = datetime(2019, 1, 1)
 
 
-class TestGoogleCloudStorageToCloudStorageOperator(unittest.TestCase):
+class TestGoogleCloudStorageToCloudStorageOperator:
     """
     Tests the three use-cases for the wildcard operator. These are
     no_prefix: *test_object
diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_local.py b/tests/providers/google/cloud/transfers/test_gcs_to_local.py
index 20efa8afef..4c08645003 100644
--- a/tests/providers/google/cloud/transfers/test_gcs_to_local.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_local.py
@@ -16,7 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 from unittest.mock import MagicMock
 
@@ -42,7 +41,7 @@ FILE_CONTENT_BYTES_UTF16 = (
 )
 
 
-class TestGoogleCloudStorageDownloadOperator(unittest.TestCase):
+class TestGoogleCloudStorageDownloadOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_local.GCSHook")
     def test_execute(self, mock_hook):
         operator = GCSToLocalFilesystemOperator(
diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py
index 445bec1167..f5c9b5b987 100644
--- a/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_sftp.py
@@ -19,11 +19,9 @@
 from __future__ import annotations
 
 import os
-import unittest
 from unittest import mock
 
 import pytest
-from parameterized import parameterized
 
 from airflow.exceptions import AirflowException
 from airflow.providers.google.cloud.transfers.gcs_to_sftp import GCSToSFTPOperator
@@ -37,19 +35,20 @@ TEST_BUCKET = "test-bucket"
 DESTINATION_SFTP = "destination_path"
 
 
-class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
-    @parameterized.expand(
+class TestGoogleCloudStorageToSFTPOperator:
+    @pytest.mark.parametrize(
+        "source_object, target_object, keep_directory_structure",
         [
             ("folder/test_object.txt", "folder/test_object.txt", True),
             ("folder/subfolder/test_object.txt", "folder/subfolder/test_object.txt", True),
             ("folder/test_object.txt", "test_object.txt", False),
             ("folder/subfolder/test_object.txt", "test_object.txt", False),
-        ]
+        ],
     )
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_copy_single_file(
-        self, source_object, target_object, keep_directory_structure, sftp_hook_mock, gcs_hook_mock
+        self, sftp_hook_mock, gcs_hook_mock, source_object, target_object, keep_directory_structure
     ):
         task = GCSToSFTPOperator(
             task_id=TASK_ID,
@@ -81,18 +80,19 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
 
         gcs_hook_mock.return_value.delete.assert_not_called()
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "source_object, target_object, keep_directory_structure",
         [
             ("folder/test_object.txt", "folder/test_object.txt", True),
             ("folder/subfolder/test_object.txt", "folder/subfolder/test_object.txt", True),
             ("folder/test_object.txt", "test_object.txt", False),
             ("folder/subfolder/test_object.txt", "test_object.txt", False),
-        ]
+        ],
     )
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_move_single_file(
-        self, source_object, target_object, keep_directory_structure, sftp_hook_mock, gcs_hook_mock
+        self, sftp_hook_mock, gcs_hook_mock, source_object, target_object, keep_directory_structure
     ):
         task = GCSToSFTPOperator(
             task_id=TASK_ID,
@@ -124,7 +124,8 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
 
         gcs_hook_mock.return_value.delete.assert_called_once_with(TEST_BUCKET, source_object)
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "source_object, prefix, delimiter, gcs_files_list, target_objects, keep_directory_structure",
         [
             (
                 "folder/test_object*.txt",
@@ -170,20 +171,20 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
                 ["folder/test_object/file1.txt", "folder/test_object/file2.txt"],
                 True,
             ),
-        ]
+        ],
     )
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_copy_with_wildcard(
         self,
+        sftp_hook_mock,
+        gcs_hook_mock,
         source_object,
         prefix,
         delimiter,
         gcs_files_list,
         target_objects,
         keep_directory_structure,
-        sftp_hook_mock,
-        gcs_hook_mock,
     ):
         gcs_hook_mock.return_value.list.return_value = gcs_files_list
         operator = GCSToSFTPOperator(
@@ -216,7 +217,8 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
 
         gcs_hook_mock.return_value.delete.assert_not_called()
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "source_object, prefix, delimiter, gcs_files_list, target_objects, keep_directory_structure",
         [
             (
                 "folder/test_object*.txt",
@@ -262,20 +264,20 @@ class TestGoogleCloudStorageToSFTPOperator(unittest.TestCase):
                 ["folder/test_object/file1.txt", "folder/test_object/file2.txt"],
                 True,
             ),
-        ]
+        ],
     )
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.GCSHook")
     @mock.patch("airflow.providers.google.cloud.transfers.gcs_to_sftp.SFTPHook")
     def test_execute_move_with_wildcard(
         self,
+        sftp_hook_mock,
+        gcs_hook_mock,
         source_object,
         prefix,
         delimiter,
         gcs_files_list,
         target_objects,
         keep_directory_structure,
-        sftp_hook_mock,
-        gcs_hook_mock,
     ):
         gcs_hook_mock.return_value.list.return_value = gcs_files_list
         operator = GCSToSFTPOperator(
diff --git a/tests/providers/google/cloud/transfers/test_gdrive_to_local.py b/tests/providers/google/cloud/transfers/test_gdrive_to_local.py
index def50026f8..3118f1bec1 100644
--- a/tests/providers/google/cloud/transfers/test_gdrive_to_local.py
+++ b/tests/providers/google/cloud/transfers/test_gdrive_to_local.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 from tempfile import NamedTemporaryFile
-from unittest import TestCase, mock
+from unittest import mock
 
 from airflow.providers.google.cloud.transfers.gdrive_to_local import GoogleDriveToLocalOperator
 
@@ -28,7 +28,7 @@ FILE_NAME = "file.pdf"
 GCP_CONN_ID = "google_cloud_default"
 
 
-class TestGoogleDriveToLocalOperator(TestCase):
+class TestGoogleDriveToLocalOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.gdrive_to_local.GoogleDriveHook")
     def test_execute(self, hook_mock):
         with NamedTemporaryFile("wb") as temp_file:
diff --git a/tests/providers/google/cloud/transfers/test_local_to_gcs.py b/tests/providers/google/cloud/transfers/test_local_to_gcs.py
index b763c46336..1bf0757afe 100644
--- a/tests/providers/google/cloud/transfers/test_local_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_local_to_gcs.py
@@ -19,7 +19,6 @@ from __future__ import annotations
 
 import datetime
 import os
-import unittest
 from glob import glob
 from unittest import mock
 
@@ -29,11 +28,11 @@ from airflow.models.dag import DAG
 from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
 
 
-class TestFileToGcsOperator(unittest.TestCase):
+class TestFileToGcsOperator:
 
     _config = {"bucket": "dummy", "mime_type": "application/octet-stream", "gzip": False}
 
-    def setUp(self):
+    def setup_method(self):
         args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)}
         self.dag = DAG("test_dag_id", default_args=args)
         self.testfile1 = "/tmp/fake1.csv"
@@ -44,7 +43,7 @@ class TestFileToGcsOperator(unittest.TestCase):
             f.write(b"x" * 393216)
         self.testfiles = [self.testfile1, self.testfile2]
 
-    def tearDown(self):
+    def teardown_method(self):
         os.remove(self.testfile1)
         os.remove(self.testfile2)
 
diff --git a/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py
index f2aeb218e3..71b626b60f 100644
--- a/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_mssql_to_gcs.py
@@ -18,11 +18,9 @@
 from __future__ import annotations
 
 import datetime
-import unittest
 from unittest import mock
 
 import pytest
-from parameterized import parameterized
 
 try:
     from airflow.providers.google.cloud.transfers.mssql_to_gcs import MSSQLToGCSOperator
@@ -55,8 +53,9 @@ SCHEMA_JSON = [
 
 
 @pytest.mark.backend("mssql")
-class TestMsSqlToGoogleCloudStorageOperator(unittest.TestCase):
-    @parameterized.expand(
+class TestMsSqlToGoogleCloudStorageOperator:
+    @pytest.mark.parametrize(
+        "value, expected",
         [
             ("string", "string"),
             (32.9, 32.9),
@@ -66,7 +65,7 @@ class TestMsSqlToGoogleCloudStorageOperator(unittest.TestCase):
             (datetime.datetime(1970, 1, 1, 1, 0), "1970-01-01T01:00:00"),
             (datetime.time(hour=0, minute=0, second=0), "00:00:00"),
             (datetime.time(hour=23, minute=59, second=59), "23:59:59"),
-        ]
+        ],
     )
     def test_convert_type(self, value, expected):
         op = MSSQLToGCSOperator(
diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
index 4688b9bf77..515014dbda 100644
--- a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py
@@ -19,11 +19,9 @@ from __future__ import annotations
 
 import datetime
 import decimal
-import unittest
 from unittest import mock
 
 import pytest
-from parameterized import parameterized
 
 TASK_ID = "test-mysql-to-gcs"
 MYSQL_CONN_ID = "mysql_conn_test"
@@ -75,7 +73,7 @@ except ImportError:
 
 
 @pytest.mark.backend("mysql")
-class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
+class TestMySqlToGoogleCloudStorageOperator:
     def test_init(self):
         """Test MySqlToGoogleCloudStorageOperator instance is properly initialized."""
         op = MySQLToGCSOperator(
@@ -93,7 +91,8 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
         assert op.export_format == "csv"
         assert op.field_delimiter == "|"
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "value, schema_type, expected",
         [
             ("string", None, "string"),
             (datetime.date(1970, 1, 2), None, "1970-01-02 00:00:00"),
@@ -108,7 +107,7 @@ class TestMySqlToGoogleCloudStorageOperator(unittest.TestCase):
             (b"bytes", "BYTES", "Ynl0ZXM="),
             (b"\x00\x01", "INTEGER", 1),
             (None, "BYTES", None),
-        ]
+        ],
     )
     def test_convert_type(self, value, schema_type, expected):
         op = MySQLToGCSOperator(
diff --git a/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py b/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py
index f2d02f6a29..ce160efdd4 100644
--- a/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_oracle_to_gcs.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 import oracledb
@@ -48,7 +47,7 @@ SCHEMA_JSON = [
 ]
 
 
-class TestOracleToGoogleCloudStorageOperator(unittest.TestCase):
+class TestOracleToGoogleCloudStorageOperator:
     def test_init(self):
         """Test OracleToGoogleCloudStorageOperator instance is properly initialized."""
         op = OracleToGCSOperator(task_id=TASK_ID, sql=SQL, bucket=BUCKET, filename=JSON_FILENAME)
diff --git a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py
index 0a0b9e8cb0..4d9934f188 100644
--- a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py
@@ -18,12 +18,10 @@
 from __future__ import annotations
 
 import datetime
-import unittest
 from unittest.mock import patch
 
 import pytest
 import pytz
-from parameterized import parameterized
 
 from airflow.providers.google.cloud.transfers.postgres_to_gcs import PostgresToGCSOperator
 from airflow.providers.postgres.hooks.postgres import PostgresHook
@@ -50,9 +48,9 @@ SCHEMA_JSON = (
 
 
 @pytest.mark.backend("postgres")
-class TestPostgresToGoogleCloudStorageOperator(unittest.TestCase):
+class TestPostgresToGoogleCloudStorageOperator:
     @classmethod
-    def setUpClass(cls):
+    def setup_class(cls):
         postgres = PostgresHook()
         with postgres.get_conn() as conn:
             with conn.cursor() as cur:
@@ -79,7 +77,7 @@ class TestPostgresToGoogleCloudStorageOperator(unittest.TestCase):
                 )
 
     @classmethod
-    def tearDownClass(cls):
+    def teardown_class(cls):
         postgres = PostgresHook()
         with postgres.get_conn() as conn:
             with conn.cursor() as cur:
@@ -102,7 +100,8 @@ class TestPostgresToGoogleCloudStorageOperator(unittest.TestCase):
         with open(tmp_filename, "rb") as file:
             assert b"".join(NDJSON_LINES) == file.read()
 
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "value, expected",
         [
             ("string", "string"),
             (32.9, 32.9),
@@ -116,7 +115,7 @@ class TestPostgresToGoogleCloudStorageOperator(unittest.TestCase):
             ),
             (datetime.time(hour=0, minute=0, second=0), "0:00:00"),
             (datetime.time(hour=23, minute=59, second=59), "23:59:59"),
-        ]
+        ],
     )
     def test_convert_type(self, value, expected):
         op = PostgresToGCSOperator(
diff --git a/tests/providers/google/cloud/transfers/test_s3_to_gcs.py b/tests/providers/google/cloud/transfers/test_s3_to_gcs.py
index 6cf252f3ea..5bcf44e7ba 100644
--- a/tests/providers/google/cloud/transfers/test_s3_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_s3_to_gcs.py
@@ -17,7 +17,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from unittest import mock
 
 from airflow.providers.google.cloud.transfers.s3_to_gcs import S3ToGCSOperator
@@ -33,7 +32,7 @@ GCS_CONN_ID = "google_cloud_default"
 IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
 
 
-class TestS3ToGoogleCloudStorageOperator(unittest.TestCase):
+class TestS3ToGoogleCloudStorageOperator:
     def test_init(self):
         """Test S3ToGCSOperator instance is properly initialized."""
 
diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py
index cafb0af968..3001376ca0 100644
--- a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py
@@ -16,7 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-import unittest
 from collections import OrderedDict
 from unittest import mock
 
@@ -53,7 +52,7 @@ INCLUDE_DELETED = True
 QUERY_PARAMS = {"DEFAULT_SETTING": "ENABLED"}
 
 
-class TestSalesforceToGcsOperator(unittest.TestCase):
+class TestSalesforceToGcsOperator:
     @mock.patch.object(GCSHook, "upload")
     @mock.patch.object(SalesforceHook, "write_object_to_file")
     @mock.patch.object(SalesforceHook, "make_query")
diff --git a/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py b/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py
index 313a0e2e45..c22f1b5906 100644
--- a/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_sftp_to_gcs.py
@@ -19,7 +19,6 @@
 from __future__ import annotations
 
 import os
-import unittest
 from unittest import mock
 
 import pytest
@@ -53,7 +52,7 @@ DESTINATION_PATH_DIR = "destination_dir"
 DESTINATION_PATH_FILE = "destination_dir/copy.txt"
 
 
-class TestSFTPToGCSOperator(unittest.TestCase):
+class TestSFTPToGCSOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.GCSHook")
     @mock.patch("airflow.providers.google.cloud.transfers.sftp_to_gcs.SFTPHook")
     def test_execute_copy_single_file(self, sftp_hook, gcs_hook):
diff --git a/tests/providers/google/cloud/transfers/test_sql_to_gcs.py b/tests/providers/google/cloud/transfers/test_sql_to_gcs.py
index d2c9594df7..c82e9d118e 100644
--- a/tests/providers/google/cloud/transfers/test_sql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_sql_to_gcs.py
@@ -17,7 +17,6 @@
 from __future__ import annotations
 
 import json
-import unittest
 from unittest import mock
 from unittest.mock import MagicMock, Mock
 
@@ -86,7 +85,7 @@ class DummySQLToGCSOperator(BaseSQLToGCSOperator):
         pass
 
 
-class TestBaseSQLToGCSOperator(unittest.TestCase):
+class TestBaseSQLToGCSOperator:
     @mock.patch("airflow.providers.google.cloud.transfers.sql_to_gcs.NamedTemporaryFile")
     @mock.patch.object(csv.writer, "writerow")
     @mock.patch.object(GCSHook, "upload")