You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by as...@apache.org on 2022/01/11 18:21:21 UTC
[airflow] branch main updated: Reduce test_utils.mock_operators to only that which is reusued (#20812)
This is an automated email from the ASF dual-hosted git repository.
ash pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 0afe3d1 Reduce test_utils.mock_operators to only that which is reusued (#20812)
0afe3d1 is described below
commit 0afe3d129db2792f58f6fd8cc5e278aba9cf166c
Author: Ash Berlin-Taylor <as...@firemirror.com>
AuthorDate: Tue Jan 11 18:20:46 2022 +0000
Reduce test_utils.mock_operators to only that which is reusued (#20812)
There was a lot in tests.test_utils.mock_* that was only used by the
Hive provider -- all of that has been removed and put in
tests.providers.apache.hive
Anything else that was only used by a single test has been moved in to
that specific test
(The main driver for this was to remove the import of HiveOperator from
mock_operators to make it easier to run "core" tests without needing
_all_ of the extras installed.)
---
tests/models/test_baseoperator.py | 9 +-
tests/providers/apache/hive/__init__.py | 122 +++++++++++++++++++++
tests/providers/apache/hive/hooks/test_hive.py | 14 ++-
tests/providers/apache/hive/operators/test_hive.py | 16 +--
.../apache/hive/operators/test_hive_stats.py | 27 ++++-
.../apache/hive/sensors/test_hive_partition.py | 3 +-
.../hive/sensors/test_metastore_partition.py | 3 +-
.../hive/sensors/test_named_hive_partition.py | 13 +--
.../apache/hive/transfers/test_hive_to_mysql.py | 3 +-
.../apache/hive/transfers/test_hive_to_samba.py | 20 +++-
tests/test_utils/mock_hooks.py | 94 ----------------
tests/test_utils/mock_operators.py | 18 +--
tests/test_utils/mock_process.py | 98 -----------------
tests/www/views/test_views_extra_links.py | 9 +-
14 files changed, 196 insertions(+), 253 deletions(-)
diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py
index c0a4593..a3edde0 100644
--- a/tests/models/test_baseoperator.py
+++ b/tests/models/test_baseoperator.py
@@ -18,7 +18,7 @@
import logging
import uuid
from datetime import date, datetime
-from typing import Any
+from typing import Any, NamedTuple
from unittest import mock
import jinja2
@@ -42,7 +42,7 @@ from airflow.utils.task_group import TaskGroup
from airflow.utils.trigger_rule import TriggerRule
from airflow.utils.weight_rule import WeightRule
from tests.models import DEFAULT_DATE
-from tests.test_utils.mock_operators import DeprecatedOperator, MockNamedTuple, MockOperator
+from tests.test_utils.mock_operators import DeprecatedOperator, MockOperator
class ClassWithCustomAttributes:
@@ -86,6 +86,11 @@ class DummySubClass(DummyClass):
self.test_sub_param = test_sub_param
+class MockNamedTuple(NamedTuple):
+ var1: str
+ var2: str
+
+
class TestBaseOperator:
def test_apply(self):
dummy = DummyClass(test_param=True)
diff --git a/tests/providers/apache/hive/__init__.py b/tests/providers/apache/hive/__init__.py
index 08245b0..25212cc 100644
--- a/tests/providers/apache/hive/__init__.py
+++ b/tests/providers/apache/hive/__init__.py
@@ -17,9 +17,13 @@
# under the License.
from datetime import datetime
+from typing import Optional
from unittest import TestCase
+from unittest.mock import MagicMock
from airflow.models.dag import DAG
+from airflow.providers.apache.hive.hooks.hive import HiveCliHook, HiveMetastoreHook, HiveServer2Hook
+from airflow.providers.mysql.hooks.mysql import MySqlHook
DEFAULT_DATE = datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
@@ -45,3 +49,121 @@ class TestHiveEnvironment(TestCase):
PARTITION(ds='{{ ds }}')
SELECT state, year, name, gender, num FROM static_babynames;
"""
+
+
+class MockHiveMetastoreHook(HiveMetastoreHook):
+ def __init__(self, *args, **kwargs):
+ self._find_valid_server = MagicMock(return_value={})
+ self.get_metastore_client = MagicMock(return_value=MagicMock())
+ super().__init__()
+
+
+class MockHiveCliHook(HiveCliHook):
+ def __init__(self, *args, **kwargs):
+ super().__init__()
+ self.conn = MockConnectionCursor()
+ self.conn.schema = 'default'
+ self.conn.host = 'localhost'
+ self.conn.port = 10000
+ self.conn.login = None
+ self.conn.password = None
+
+ self.conn.execute = MagicMock()
+ self.get_conn = MagicMock(return_value=self.conn)
+ self.get_connection = MagicMock(return_value=MockDBConnection({}))
+
+
+class MockHiveServer2Hook(HiveServer2Hook):
+ def __init__(self, *args, **kwargs):
+ super().__init__()
+ self.mock_cursor = kwargs.get('connection_cursor', MockConnectionCursor())
+ self.mock_cursor.execute = MagicMock()
+ self.get_conn = MagicMock(return_value=self.mock_cursor)
+ self.get_connection = MagicMock(return_value=MockDBConnection({}))
+
+
+class MockMySqlHook(MySqlHook):
+ def __init__(self, *args, **kwargs):
+ self.conn = MockConnectionCursor()
+
+ self.conn.execute = MagicMock()
+ self.get_conn = MagicMock(return_value=self.conn)
+ self.get_records = MagicMock(return_value=[])
+ self.insert_rows = MagicMock(return_value=True)
+ super().__init__(*args, **kwargs)
+
+ def get_connection(self, *args, **kwargs):
+ return self.conn
+
+
+class MockDBConnection:
+ def __init__(self, extra_dejson=None, *args, **kwargs):
+ self.extra_dejson = extra_dejson
+ self.get_records = MagicMock(return_value=[['test_record']])
+
+ output = kwargs.get('output', ['' for _ in range(10)])
+ self.readline = MagicMock(side_effect=[line.encode() for line in output])
+
+ def status(self, *args, **kwargs):
+ return True
+
+
+class BaseMockConnectionCursor:
+ def __init__(self, **kwargs):
+ self.arraysize = None
+ self.description = [
+ ('hive_server_hook.a', 'INT_TYPE', None, None, None, None, True),
+ ('hive_server_hook.b', 'INT_TYPE', None, None, None, None, True),
+ ]
+ self.conn_exists = kwargs.get('exists', True)
+
+ def close(self):
+ pass
+
+ def cursor(self):
+ return self
+
+ def execute(self, values=None):
+ pass
+
+ def exists(self):
+ return self.conn_exists
+
+ def isfile(self):
+ return True
+
+ def remove(self):
+ pass
+
+ def upload(self, local_filepath, destination_filepath):
+ pass
+
+ def __next__(self):
+ return self.iterable
+
+ def __iter__(self):
+ yield from self.iterable
+
+
+class MockConnectionCursor(BaseMockConnectionCursor):
+ def __init__(self):
+ super().__init__()
+ self.iterable = [(1, 1), (2, 2)]
+
+
+class MockStdOut:
+ def __init__(self, *args, **kwargs):
+ output = kwargs.get('output', ['' for _ in range(10)])
+ self.readline = MagicMock(side_effect=[line.encode() for line in output])
+
+
+class MockSubProcess:
+ PIPE = -1
+ STDOUT = -2
+ returncode: Optional[int] = None
+
+ def __init__(self, *args, **kwargs):
+ self.stdout = MockStdOut(*args, **kwargs)
+
+ def wait(self):
+ return
diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py
index 7a0acfb..3171f20 100644
--- a/tests/providers/apache/hive/hooks/test_hive.py
+++ b/tests/providers/apache/hive/hooks/test_hive.py
@@ -36,15 +36,25 @@ from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook, HiveServ
from airflow.secrets.environment_variables import CONN_ENV_PREFIX
from airflow.utils import timezone
from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING
+from tests.providers.apache.hive import (
+ BaseMockConnectionCursor,
+ MockHiveCliHook,
+ MockHiveServer2Hook,
+ MockSubProcess,
+)
from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
-from tests.test_utils.mock_hooks import MockHiveCliHook, MockHiveServer2Hook
-from tests.test_utils.mock_process import EmptyMockConnectionCursor, MockSubProcess
DEFAULT_DATE = timezone.datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
+class EmptyMockConnectionCursor(BaseMockConnectionCursor):
+ def __init__(self):
+ super().__init__()
+ self.iterable = []
+
+
@pytest.mark.skipif(
PY39,
reason="Hive does not run on Python 3.9 because it brings SASL via thrift-sasl."
diff --git a/tests/providers/apache/hive/operators/test_hive.py b/tests/providers/apache/hive/operators/test_hive.py
index 4b0f01f..8a167da 100644
--- a/tests/providers/apache/hive/operators/test_hive.py
+++ b/tests/providers/apache/hive/operators/test_hive.py
@@ -24,14 +24,12 @@ from airflow.configuration import conf
from airflow.models import DagRun, TaskInstance
from airflow.providers.apache.hive.operators.hive import HiveOperator
from airflow.utils import timezone
-from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
-from tests.test_utils.mock_operators import MockHiveOperator
-from tests.test_utils.mock_process import MockSubProcess
+from tests.providers.apache.hive import DEFAULT_DATE, MockSubProcess, TestHiveEnvironment
class HiveOperatorConfigTest(TestHiveEnvironment):
def test_hive_airflow_default_config_queue(self):
- op = MockHiveOperator(
+ op = HiveOperator(
task_id='test_default_config_queue',
hql=self.hql,
mapred_queue_priority='HIGH',
@@ -45,7 +43,7 @@ class HiveOperatorConfigTest(TestHiveEnvironment):
def test_hive_airflow_default_config_queue_override(self):
specific_mapred_queue = 'default'
- op = MockHiveOperator(
+ op = HiveOperator(
task_id='test_default_config_queue',
hql=self.hql,
mapred_queue=specific_mapred_queue,
@@ -60,15 +58,13 @@ class HiveOperatorConfigTest(TestHiveEnvironment):
class HiveOperatorTest(TestHiveEnvironment):
def test_hiveconf_jinja_translate(self):
hql = "SELECT ${num_col} FROM ${hiveconf:table};"
- op = MockHiveOperator(
- hiveconf_jinja_translate=True, task_id='dry_run_basic_hql', hql=hql, dag=self.dag
- )
+ op = HiveOperator(hiveconf_jinja_translate=True, task_id='dry_run_basic_hql', hql=hql, dag=self.dag)
op.prepare_template()
assert op.hql == "SELECT {{ num_col }} FROM {{ table }};"
def test_hiveconf(self):
hql = "SELECT * FROM ${hiveconf:table} PARTITION (${hiveconf:day});"
- op = MockHiveOperator(
+ op = HiveOperator(
hiveconfs={'table': 'static_babynames', 'day': '{{ ds }}'},
task_id='dry_run_basic_hql',
hql=hql,
@@ -81,7 +77,7 @@ class HiveOperatorTest(TestHiveEnvironment):
def test_mapred_job_name(self, mock_get_hook):
mock_hook = mock.MagicMock()
mock_get_hook.return_value = mock_hook
- op = MockHiveOperator(task_id='test_mapred_job_name', hql=self.hql, dag=self.dag)
+ op = HiveOperator(task_id='test_mapred_job_name', hql=self.hql, dag=self.dag)
fake_dagrun_id = "test_mapred_job_name"
fake_execution_date = timezone.datetime(2018, 6, 19)
diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py b/tests/providers/apache/hive/operators/test_hive_stats.py
index b51420b..3c2260b 100644
--- a/tests/providers/apache/hive/operators/test_hive_stats.py
+++ b/tests/providers/apache/hive/operators/test_hive_stats.py
@@ -20,14 +20,21 @@ import os
import re
import unittest
from collections import OrderedDict
-from unittest.mock import patch
+from unittest.mock import MagicMock, patch
import pytest
from airflow.exceptions import AirflowException
from airflow.providers.apache.hive.operators.hive_stats import HiveStatsCollectionOperator
-from tests.providers.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, TestHiveEnvironment
-from tests.test_utils.mock_hooks import MockHiveMetastoreHook, MockMySqlHook, MockPrestoHook
+from airflow.providers.presto.hooks.presto import PrestoHook
+from tests.providers.apache.hive import (
+ DEFAULT_DATE,
+ DEFAULT_DATE_DS,
+ MockConnectionCursor,
+ MockHiveMetastoreHook,
+ MockMySqlHook,
+ TestHiveEnvironment,
+)
class _FakeCol:
@@ -39,6 +46,20 @@ class _FakeCol:
fake_col = _FakeCol('col', 'string')
+class MockPrestoHook(PrestoHook):
+ def __init__(self, *args, **kwargs):
+ self.conn = MockConnectionCursor()
+
+ self.conn.execute = MagicMock()
+ self.get_conn = MagicMock(return_value=self.conn)
+ self.get_first = MagicMock(return_value=[['val_0', 'val_1'], 'val_2'])
+
+ super().__init__(*args, **kwargs)
+
+ def get_connection(self, *args):
+ return self.conn
+
+
class TestHiveStatsCollectionOperator(TestHiveEnvironment):
def setUp(self):
self.kwargs = dict(
diff --git a/tests/providers/apache/hive/sensors/test_hive_partition.py b/tests/providers/apache/hive/sensors/test_hive_partition.py
index e992ec0..f28c0e0 100644
--- a/tests/providers/apache/hive/sensors/test_hive_partition.py
+++ b/tests/providers/apache/hive/sensors/test_hive_partition.py
@@ -21,8 +21,7 @@ import unittest
from unittest.mock import patch
from airflow.providers.apache.hive.sensors.hive_partition import HivePartitionSensor
-from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
-from tests.test_utils.mock_hooks import MockHiveMetastoreHook
+from tests.providers.apache.hive import DEFAULT_DATE, MockHiveMetastoreHook, TestHiveEnvironment
@unittest.skipIf('AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set")
diff --git a/tests/providers/apache/hive/sensors/test_metastore_partition.py b/tests/providers/apache/hive/sensors/test_metastore_partition.py
index 36f8f12..3e6a044 100644
--- a/tests/providers/apache/hive/sensors/test_metastore_partition.py
+++ b/tests/providers/apache/hive/sensors/test_metastore_partition.py
@@ -21,8 +21,7 @@ import unittest
from unittest import mock
from airflow.providers.apache.hive.sensors.metastore_partition import MetastorePartitionSensor
-from tests.providers.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, TestHiveEnvironment
-from tests.test_utils.mock_process import MockDBConnection
+from tests.providers.apache.hive import DEFAULT_DATE, DEFAULT_DATE_DS, MockDBConnection, TestHiveEnvironment
@unittest.skipIf('AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set")
diff --git a/tests/providers/apache/hive/sensors/test_named_hive_partition.py b/tests/providers/apache/hive/sensors/test_named_hive_partition.py
index a9eea72..bd579ab 100644
--- a/tests/providers/apache/hive/sensors/test_named_hive_partition.py
+++ b/tests/providers/apache/hive/sensors/test_named_hive_partition.py
@@ -16,7 +16,6 @@
# specific language governing permissions and limitations
# under the License.
import os
-import random
import unittest
from datetime import timedelta
from unittest import mock
@@ -27,9 +26,7 @@ from airflow.exceptions import AirflowSensorTimeout
from airflow.models.dag import DAG
from airflow.providers.apache.hive.sensors.named_hive_partition import NamedHivePartitionSensor
from airflow.utils.timezone import datetime
-from tests.providers.apache.hive import TestHiveEnvironment
-from tests.test_utils.mock_hooks import MockHiveMetastoreHook
-from tests.test_utils.mock_operators import MockHiveOperator
+from tests.providers.apache.hive import MockHiveMetastoreHook, TestHiveEnvironment
DEFAULT_DATE = datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
@@ -59,14 +56,6 @@ class TestNamedHivePartitionSensor(unittest.TestCase):
ADD PARTITION({{ params.partition_by }}='{{ ds }}');
"""
self.hook = MockHiveMetastoreHook()
- op = MockHiveOperator(
- task_id='HiveHook_' + str(random.randint(1, 10000)),
- params={'database': self.database, 'table': self.table, 'partition_by': self.partition_by},
- hive_cli_conn_id='hive_cli_default',
- hql=self.hql,
- dag=self.dag,
- )
- op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_parse_partition_name_correct(self):
schema = 'default'
diff --git a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
index e85595f..9cd8381 100644
--- a/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_mysql.py
@@ -26,8 +26,7 @@ from airflow import PY39
from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
from airflow.utils import timezone
from airflow.utils.operator_helpers import context_to_airflow_vars
-from tests.providers.apache.hive import TestHiveEnvironment
-from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockMySqlHook
+from tests.providers.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment
DEFAULT_DATE = timezone.datetime(2015, 1, 1)
diff --git a/tests/providers/apache/hive/transfers/test_hive_to_samba.py b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
index c2a7cde..ce8a00c 100644
--- a/tests/providers/apache/hive/transfers/test_hive_to_samba.py
+++ b/tests/providers/apache/hive/transfers/test_hive_to_samba.py
@@ -23,9 +23,25 @@ import pytest
from airflow import PY39
from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
+from airflow.providers.samba.hooks.samba import SambaHook
from airflow.utils.operator_helpers import context_to_airflow_vars
-from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment
-from tests.test_utils.mock_hooks import MockHiveServer2Hook, MockSambaHook
+from tests.providers.apache.hive import (
+ DEFAULT_DATE,
+ MockConnectionCursor,
+ MockHiveServer2Hook,
+ TestHiveEnvironment,
+)
+
+
+class MockSambaHook(SambaHook):
+ def __init__(self, *args, **kwargs):
+ self.conn = MockConnectionCursor()
+ self.conn.execute = MagicMock()
+ self.get_conn = MagicMock(return_value=self.conn)
+ super().__init__(*args, **kwargs)
+
+ def get_connection(self, *args):
+ return self.conn
@pytest.mark.skipif(
diff --git a/tests/test_utils/mock_hooks.py b/tests/test_utils/mock_hooks.py
deleted file mode 100644
index 2c6e289..0000000
--- a/tests/test_utils/mock_hooks.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from unittest import mock
-
-from airflow.providers.apache.hive.hooks.hive import HiveCliHook, HiveMetastoreHook, HiveServer2Hook
-from airflow.providers.mysql.hooks.mysql import MySqlHook
-from airflow.providers.presto.hooks.presto import PrestoHook
-from airflow.providers.samba.hooks.samba import SambaHook
-from tests.test_utils.mock_process import MockConnectionCursor, MockDBConnection
-
-
-class MockHiveMetastoreHook(HiveMetastoreHook):
- def __init__(self, *args, **kwargs):
- self._find_valid_server = mock.MagicMock(return_value={})
- self.get_metastore_client = mock.MagicMock(return_value=mock.MagicMock())
- super().__init__()
-
-
-class MockHiveCliHook(HiveCliHook):
- def __init__(self, *args, **kwargs):
- super().__init__()
- self.conn = MockConnectionCursor()
- self.conn.schema = 'default'
- self.conn.host = 'localhost'
- self.conn.port = 10000
- self.conn.login = None
- self.conn.password = None
-
- self.conn.execute = mock.MagicMock()
- self.get_conn = mock.MagicMock(return_value=self.conn)
- self.get_connection = mock.MagicMock(return_value=MockDBConnection({}))
-
-
-class MockSambaHook(SambaHook):
- def __init__(self, *args, **kwargs):
- self.conn = MockConnectionCursor()
- self.conn.execute = mock.MagicMock()
- self.get_conn = mock.MagicMock(return_value=self.conn)
- super().__init__(*args, **kwargs)
-
- def get_connection(self, *args):
- return self.conn
-
-
-class MockPrestoHook(PrestoHook):
- def __init__(self, *args, **kwargs):
- self.conn = MockConnectionCursor()
-
- self.conn.execute = mock.MagicMock()
- self.get_conn = mock.MagicMock(return_value=self.conn)
- self.get_first = mock.MagicMock(return_value=[['val_0', 'val_1'], 'val_2'])
-
- super().__init__(*args, **kwargs)
-
- def get_connection(self, *args):
- return self.conn
-
-
-class MockMySqlHook(MySqlHook):
- def __init__(self, *args, **kwargs):
- self.conn = MockConnectionCursor()
-
- self.conn.execute = mock.MagicMock()
- self.get_conn = mock.MagicMock(return_value=self.conn)
- self.get_records = mock.MagicMock(return_value=[])
- self.insert_rows = mock.MagicMock(return_value=True)
- super().__init__(*args, **kwargs)
-
- def get_connection(self, *args, **kwargs):
- return self.conn
-
-
-class MockHiveServer2Hook(HiveServer2Hook):
- def __init__(self, *args, **kwargs):
- super().__init__()
- self.mock_cursor = kwargs.get('connection_cursor', MockConnectionCursor())
- self.mock_cursor.execute = mock.MagicMock()
- self.get_conn = mock.MagicMock(return_value=self.mock_cursor)
- self.get_connection = mock.MagicMock(return_value=MockDBConnection({}))
diff --git a/tests/test_utils/mock_operators.py b/tests/test_utils/mock_operators.py
index aa6a4fd..416424c 100644
--- a/tests/test_utils/mock_operators.py
+++ b/tests/test_utils/mock_operators.py
@@ -15,23 +15,15 @@
# specific language governing permissions and limitations
# under the License.
import warnings
-from typing import NamedTuple, Sequence
-from unittest import mock
+from typing import Sequence
import attr
from airflow.models.baseoperator import BaseOperator, BaseOperatorLink
from airflow.models.xcom import XCom
-from airflow.providers.apache.hive.operators.hive import HiveOperator
from airflow.utils.context import Context
-# Namedtuple for testing purposes
-class MockNamedTuple(NamedTuple):
- var1: str
- var2: str
-
-
class MockOperator(BaseOperator):
"""Operator for testing purposes."""
@@ -54,7 +46,7 @@ class AirflowLink(BaseOperatorLink):
name = 'airflow'
def get_link(self, operator, dttm):
- return 'should_be_overridden'
+ return 'https://airflow.apache.org'
class Dummy2TestOperator(BaseOperator):
@@ -162,12 +154,6 @@ class GithubLink(BaseOperatorLink):
return 'https://github.com/apache/airflow'
-class MockHiveOperator(HiveOperator):
- def __init__(self, *args, **kwargs):
- self.run = mock.MagicMock()
- super().__init__(*args, **kwargs)
-
-
class DeprecatedOperator(BaseOperator):
def __init__(self, **kwargs):
warnings.warn("This operator is deprecated.", DeprecationWarning, stacklevel=2)
diff --git a/tests/test_utils/mock_process.py b/tests/test_utils/mock_process.py
deleted file mode 100644
index af33bd8..0000000
--- a/tests/test_utils/mock_process.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from typing import Optional
-from unittest import mock
-
-
-class MockDBConnection:
- def __init__(self, extra_dejson=None, *args, **kwargs):
- self.extra_dejson = extra_dejson
- self.get_records = mock.MagicMock(return_value=[['test_record']])
-
- output = kwargs.get('output', ['' for _ in range(10)])
- self.readline = mock.MagicMock(side_effect=[line.encode() for line in output])
-
- def status(self, *args, **kwargs):
- return True
-
-
-class MockStdOut:
- def __init__(self, *args, **kwargs):
- output = kwargs.get('output', ['' for _ in range(10)])
- self.readline = mock.MagicMock(side_effect=[line.encode() for line in output])
-
-
-class MockSubProcess:
- PIPE = -1
- STDOUT = -2
- returncode: Optional[int] = None
-
- def __init__(self, *args, **kwargs):
- self.stdout = MockStdOut(*args, **kwargs)
-
- def wait(self):
- return
-
-
-class BaseMockConnectionCursor:
- def __init__(self, **kwargs):
- self.arraysize = None
- self.description = [
- ('hive_server_hook.a', 'INT_TYPE', None, None, None, None, True),
- ('hive_server_hook.b', 'INT_TYPE', None, None, None, None, True),
- ]
- self.conn_exists = kwargs.get('exists', True)
-
- def close(self):
- pass
-
- def cursor(self):
- return self
-
- def execute(self, values=None):
- pass
-
- def exists(self):
- return self.conn_exists
-
- def isfile(self):
- return True
-
- def remove(self):
- pass
-
- def upload(self, local_filepath, destination_filepath):
- pass
-
- def __next__(self):
- return self.iterable
-
- def __iter__(self):
- yield from self.iterable
-
-
-class MockConnectionCursor(BaseMockConnectionCursor):
- def __init__(self):
- super().__init__()
- self.iterable = [(1, 1), (2, 2)]
-
-
-class EmptyMockConnectionCursor(BaseMockConnectionCursor):
- def __init__(self):
- super().__init__()
- self.iterable = []
diff --git a/tests/www/views/test_views_extra_links.py b/tests/www/views/test_views_extra_links.py
index fef430a..e3a5147 100644
--- a/tests/www/views/test_views_extra_links.py
+++ b/tests/www/views/test_views_extra_links.py
@@ -29,7 +29,7 @@ from airflow.utils.session import create_session
from airflow.utils.state import DagRunState, TaskInstanceState
from airflow.utils.types import DagRunType
from tests.test_utils.db import clear_db_runs
-from tests.test_utils.mock_operators import Dummy2TestOperator, Dummy3TestOperator
+from tests.test_utils.mock_operators import AirflowLink, Dummy2TestOperator, Dummy3TestOperator
from tests.test_utils.www import check_content_in_response
DEFAULT_DATE = timezone.datetime(2017, 1, 1)
@@ -58,13 +58,6 @@ class FooBarLink(BaseOperatorLink):
return f"http://www.example.com/{operator.task_id}/foo-bar/{dttm}"
-class AirflowLink(BaseOperatorLink):
- name = 'airflow'
-
- def get_link(self, operator, dttm):
- return 'https://airflow.apache.org'
-
-
class DummyTestOperator(BaseOperator):
operator_extra_links = (
RaiseErrorLink(),