You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by mx...@apache.org on 2016/12/22 09:46:42 UTC
[2/3] incubator-ariatosca git commit:
ARIA-39-Genericize-storage-models
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index baded7f..a6b55ba 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -23,7 +23,7 @@ from aria.orchestrator import (
workflow,
operation,
)
-from aria.storage import models
+from aria.storage import model
from aria.orchestrator.workflows import (
api,
exceptions,
@@ -146,7 +146,7 @@ class TestEngine(BaseTest):
execution = workflow_context.execution
assert execution.started_at <= execution.ended_at <= datetime.utcnow()
assert execution.error is None
- assert execution.status == models.Execution.TERMINATED
+ assert execution.status == model.Execution.TERMINATED
def test_single_task_successful_execution(self, workflow_context, executor):
@workflow
@@ -175,7 +175,7 @@ class TestEngine(BaseTest):
execution = workflow_context.execution
assert execution.started_at <= execution.ended_at <= datetime.utcnow()
assert execution.error is not None
- assert execution.status == models.Execution.FAILED
+ assert execution.status == model.Execution.FAILED
def test_two_tasks_execution_order(self, workflow_context, executor):
@workflow
@@ -236,7 +236,7 @@ class TestCancel(BaseTest):
execution = workflow_context.execution
assert execution.started_at <= execution.ended_at <= datetime.utcnow()
assert execution.error is None
- assert execution.status == models.Execution.CANCELLED
+ assert execution.status == model.Execution.CANCELLED
def test_cancel_pending_execution(self, workflow_context, executor):
@workflow
@@ -247,7 +247,7 @@ class TestCancel(BaseTest):
executor=executor)
eng.cancel_execution()
execution = workflow_context.execution
- assert execution.status == models.Execution.CANCELLED
+ assert execution.status == model.Execution.CANCELLED
class TestRetries(BaseTest):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index fc11548..5381f5d 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -60,7 +60,7 @@ class TestOperationTask(object):
node.operations['aria.interfaces.lifecycle.create'] = {'plugin': 'plugin1'}
api_task, core_task = self._create_operation_task(ctx, node_instance)
storage_task = ctx.model.task.get_by_name(core_task.name)
- assert storage_task.execution_id == ctx.execution.id
+ assert storage_task.execution_name == ctx.execution.name
assert core_task.model_task == storage_task
assert core_task.name == api_task.name
assert core_task.operation_mapping == api_task.operation_mapping
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 5ded4fb..d78d75c 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -28,7 +28,8 @@ except ImportError:
_celery = None
app = None
-from aria.storage import models
+import aria
+from aria.storage import model
from aria.orchestrator import events
from aria.orchestrator.workflows.executor import (
thread,
@@ -91,7 +92,7 @@ class MockContext(object):
class MockTask(object):
- INFINITE_RETRIES = models.Task.INFINITE_RETRIES
+ INFINITE_RETRIES = model.Task.INFINITE_RETRIES
def __init__(self, func, inputs=None):
self.states = []
@@ -108,7 +109,7 @@ class MockTask(object):
self.max_attempts = 1
self.plugin_id = None
- for state in models.Task.STATES:
+ for state in model.Task.STATES:
setattr(self, state.upper(), state)
@contextmanager
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 0098f30..b1bb3f5 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -22,7 +22,7 @@ from contextlib import contextmanager
import pytest
from aria import application_model_storage
-from aria.storage import models
+from aria.storage import model as aria_model
from aria.utils.plugin import create as create_plugin
from aria.storage.sql_mapi import SQLAlchemyModelAPI
from aria.orchestrator import events
@@ -117,7 +117,7 @@ class MockContext(object):
class MockTask(object):
- INFINITE_RETRIES = models.Task.INFINITE_RETRIES
+ INFINITE_RETRIES = aria_model.Task.INFINITE_RETRIES
def __init__(self, plugin, operation):
self.id = str(uuid.uuid4())
@@ -131,7 +131,7 @@ class MockTask(object):
self.plugin_id = plugin.id
self.plugin = plugin
- for state in models.Task.STATES:
+ for state in aria_model.Task.STATES:
setattr(self, state.upper(), state)
@contextmanager
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index edff982..9101fd0 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -17,14 +17,13 @@ import platform
from tempfile import mkdtemp
from shutil import rmtree
+from aria.storage import model
from sqlalchemy import (
create_engine,
orm)
from sqlalchemy.orm import scoped_session
from sqlalchemy.pool import StaticPool
-from aria.storage import structures
-
class TestFileSystem(object):
@@ -60,7 +59,7 @@ def get_sqlite_api_kwargs(base_dir=None, filename='db.sqlite'):
session_factory = orm.sessionmaker(bind=engine)
session = scoped_session(session_factory=session_factory) if base_dir else session_factory()
- structures.Model.metadata.create_all(engine)
+ model.DeclarativeBase.metadata.create_all(bind=engine)
return dict(engine=engine, session=session)
@@ -77,4 +76,4 @@ def release_sqlite_storage(storage):
session.rollback()
session.close()
for engine in set(mapi._engine for mapi in mapis):
- structures.Model.metadata.drop_all(engine)
+ model.DeclarativeBase.metadata.drop_all(engine)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index 48cd02c..0555a4e 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -15,64 +15,101 @@
import pytest
+from sqlalchemy import Column, Text, Integer
+
from aria.storage import (
ModelStorage,
- models,
+ model,
exceptions,
sql_mapi,
+ structure,
+ type as aria_type,
)
from aria import application_model_storage
-from tests.storage import get_sqlite_api_kwargs, release_sqlite_storage
+from ..storage import get_sqlite_api_kwargs, release_sqlite_storage
+from ..mock import context as mock_context
+
+class MockModel(model.DeclarativeBase, structure.ModelMixin): #pylint: disable=abstract-method
+ __tablename__ = 'mock_models'
+ some_dict = Column(aria_type.Dict)
+ value = Column(Integer)
+ name = Column(Text)
@pytest.fixture
def storage():
base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_kwargs=get_sqlite_api_kwargs())
+ base_storage.register(MockModel)
yield base_storage
release_sqlite_storage(base_storage)
+@pytest.fixture(scope='module', autouse=True)
+def module_cleanup():
+ model.DeclarativeBase.metadata.remove(MockModel.__table__) #pylint: disable=no-member
+
+
def test_storage_base(storage):
with pytest.raises(AttributeError):
storage.non_existent_attribute()
def test_model_storage(storage):
- storage.register(models.ProviderContext)
+ mock_model = MockModel(value=0, name='model_name')
+ storage.mock_model.put(mock_model)
- pc = models.ProviderContext(context={}, name='context_name')
- storage.provider_context.put(pc)
+ assert storage.mock_model.get_by_name('model_name') == mock_model
- assert storage.provider_context.get_by_name('context_name') == pc
+ assert [mm_from_storage for mm_from_storage in storage.mock_model.iter()] == [mock_model]
+ assert [mm_from_storage for mm_from_storage in storage.mock_model] == [mock_model]
- assert [pc_from_storage for pc_from_storage in storage.provider_context.iter()] == [pc]
- assert [pc_from_storage for pc_from_storage in storage.provider_context] == [pc]
+ storage.mock_model.delete(mock_model)
+ with pytest.raises(exceptions.StorageError):
+ storage.mock_model.get(mock_model.id)
- new_context = {'update_key': 0}
- pc.context = new_context
- storage.provider_context.update(pc)
- assert storage.provider_context.get(pc.id).context == new_context
- storage.provider_context.delete(pc)
- with pytest.raises(exceptions.StorageError):
- storage.provider_context.get(pc.id)
+def test_inner_dict_update(storage):
+ inner_dict = {'inner_value': 1}
+ mock_model = MockModel(some_dict={'inner_dict': inner_dict, 'value': 0})
+ storage.mock_model.put(mock_model)
-def test_storage_driver(storage):
- storage.register(models.ProviderContext)
+ storage_mm = storage.mock_model.get(mock_model.id)
+ assert storage_mm == mock_model
- pc = models.ProviderContext(context={}, name='context_name')
- storage.registered['provider_context'].put(entry=pc)
+ storage_mm.some_dict['inner_dict']['inner_value'] = 2
+ storage_mm.some_dict['value'] = -1
+ storage.mock_model.update(storage_mm)
+ storage_mm = storage.mock_model.get(storage_mm.id)
- assert storage.registered['provider_context'].get_by_name('context_name') == pc
+ assert storage_mm.some_dict['inner_dict']['inner_value'] == 2
+ assert storage_mm.some_dict['value'] == -1
- assert next(i for i in storage.registered['provider_context'].iter()) == pc
- assert [i for i in storage.provider_context] == [pc]
- storage.registered['provider_context'].delete(pc)
+def test_model_to_dict():
+ context = mock_context.simple(get_sqlite_api_kwargs())
+ deployment = context.deployment
+ deployment_dict = deployment.to_dict()
- with pytest.raises(exceptions.StorageError):
- storage.registered['provider_context'].get(pc.id)
+ expected_keys = [
+ 'created_at',
+ 'description',
+ 'inputs',
+ 'groups',
+ 'permalink',
+ 'policy_triggers',
+ 'policy_types',
+ 'outputs',
+ 'scaling_groups',
+ 'updated_at',
+ 'workflows',
+ 'blueprint_name',
+ ]
+
+ for expected_key in expected_keys:
+ assert expected_key in deployment_dict
+
+ assert 'blueprint_fk' not in deployment_dict
def test_application_storage_factory():
@@ -87,6 +124,5 @@ def test_application_storage_factory():
assert storage.deployment_update_step
assert storage.deployment_modification
assert storage.execution
- assert storage.provider_context
release_sqlite_storage(storage)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/35a837f7/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
index 0651957..7f107ed 100644
--- a/tests/storage/test_models.py
+++ b/tests/storage/test_models.py
@@ -12,19 +12,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from contextlib import contextmanager
from datetime import datetime
+from contextlib import contextmanager
+
import pytest
from aria import application_model_storage
-from aria.storage import exceptions
-from aria.storage import sql_mapi
-from aria.storage.models import (
+from aria.storage import (
+ exceptions,
+ sql_mapi,
+)
+from aria.storage.model import (
DeploymentUpdateStep,
Blueprint,
Execution,
Task,
- ProviderContext,
Plugin,
Deployment,
Node,
@@ -73,7 +75,7 @@ def _deployment_storage():
def _deployment_update_storage():
storage = _deployment_storage()
deployment_update = DeploymentUpdate(
- deployment_id=storage.deployment.list()[0].id,
+ deployment=storage.deployment.list()[0],
created_at=now,
deployment_plan={},
)
@@ -194,15 +196,15 @@ class TestBlueprint(object):
)
def test_blueprint_model_creation(self, empty_storage, is_valid, plan, description, created_at,
updated_at, main_file_name):
- if not is_valid:
- with pytest.raises(exceptions.StorageError):
- empty_storage.blueprint.put(Blueprint(plan=plan, description=description,
- created_at=created_at, updated_at=updated_at,
- main_file_name=main_file_name))
- else:
- empty_storage.blueprint.put(Blueprint(plan=plan, description=description,
- created_at=created_at, updated_at=updated_at,
- main_file_name=main_file_name))
+ _test_model(is_valid=is_valid,
+ storage=empty_storage,
+ model_name='blueprint',
+ model_cls=Blueprint,
+ model_kwargs=dict(plan=plan,
+ description=description,
+ created_at=created_at,
+ updated_at=updated_at,
+ main_file_name=main_file_name))
class TestDeployment(object):
@@ -211,32 +213,32 @@ class TestDeployment(object):
'is_valid, name, created_at, description, inputs, groups, permalink, policy_triggers, '
'policy_types, outputs, scaling_groups, updated_at, workflows',
[
- (False, m_cls, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', m_cls, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, m_cls, {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', m_cls, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, m_cls, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, m_cls, {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', m_cls, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, m_cls, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, m_cls, {}, now, {}),
+ (False, m_cls, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (False, 'name', m_cls, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (False, 'name', now, m_cls, {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (False, 'name', now, 'desc', m_cls, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (False, 'name', now, 'desc', {}, m_cls, 'perlnk', {}, {}, {}, [], now, {}),
+ (False, 'name', now, 'desc', {}, {}, m_cls, {}, {}, {}, [], now, {}),
+ (False, 'name', now, 'desc', {}, {}, 'perlnk', m_cls, {}, {}, [], now, {}),
+ (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, m_cls, {}, [], now, {}),
+ (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, m_cls, [], now, {}),
(False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, m_cls, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, m_cls, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, m_cls),
-
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, None, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, None, {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', None, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, None, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, None, {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', None, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, None, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, None, {}, now, {}),
+ (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], m_cls, {}),
+ (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, m_cls),
+
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (True, None, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (True, 'name', now, None, {}, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (True, 'name', now, 'desc', None, {}, 'perlnk', {}, {}, {}, [], now, {}),
+ (True, 'name', now, 'desc', {}, None, 'perlnk', {}, {}, {}, [], now, {}),
+ (True, 'name', now, 'desc', {}, {}, None, {}, {}, {}, [], now, {}),
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', None, {}, {}, [], now, {}),
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, None, {}, [], now, {}),
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, None, [], now, {}),
(True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, None, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, None, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, None),
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], None, {}),
+ (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, [], now, None),
]
)
def test_deployment_model_creation(self, deployment_storage, is_valid, name, created_at,
@@ -249,7 +251,7 @@ class TestDeployment(object):
model_cls=Deployment,
model_kwargs=dict(
name=name,
- blueprint_id=deployment_storage.blueprint.list()[0].id,
+ blueprint=deployment_storage.blueprint.list()[0],
created_at=created_at,
description=description,
inputs=inputs,
@@ -295,8 +297,7 @@ class TestExecution(object):
model_name='execution',
model_cls=Execution,
model_kwargs=dict(
- deployment_id=deployment_storage.deployment.list()[0].id,
- blueprint_id=deployment_storage.blueprint.list()[0].id,
+ deployment=deployment_storage.deployment.list()[0],
created_at=created_at,
started_at=started_at,
ended_at=ended_at,
@@ -406,7 +407,7 @@ class TestDeploymentUpdate(object):
model_name='deployment_update',
model_cls=DeploymentUpdate,
model_kwargs=dict(
- deployment_id=deployment_storage.deployment.list()[0].id,
+ deployment=deployment_storage.deployment.list()[0],
created_at=created_at,
deployment_plan=deployment_plan,
deployment_update_node_instances=deployment_update_node_instances,
@@ -441,7 +442,7 @@ class TestDeploymentUpdateStep(object):
model_name='deployment_update_step',
model_cls=DeploymentUpdateStep,
model_kwargs=dict(
- deployment_update_id=deployment_update_storage.deployment_update.list()[0].id,
+ deployment_update=deployment_update_storage.deployment_update.list()[0],
action=action,
entity_id=entity_id,
entity_type=entity_type
@@ -517,7 +518,7 @@ class TestDeploymentModification(object):
model_name='deployment_modification',
model_cls=DeploymentModification,
model_kwargs=dict(
- deployment_id=deployment_storage.deployment.list()[0].id,
+ deployment=deployment_storage.deployment.list()[0],
context=context,
created_at=created_at,
ended_at=ended_at,
@@ -576,7 +577,7 @@ class TestNode(object):
operations=operations,
type=type,
type_hierarchy=type_hierarchy,
- deployment_id=deployment_storage.deployment.list()[0].id
+ deployment=deployment_storage.deployment.list()[0]
))
if is_valid:
assert node.deployment == deployment_storage.deployment.list()[0]
@@ -611,8 +612,8 @@ class TestRelationship(object):
model_name='relationship',
model_cls=Relationship,
model_kwargs=dict(
- source_node_id=nodes_storage.node.list()[1].id,
- target_node_id=nodes_storage.node.list()[0].id,
+ source_node=nodes_storage.node.list()[1],
+ target_node=nodes_storage.node.list()[0],
source_interfaces=source_interfaces,
source_operations=source_operations,
target_interfaces=target_interfaces,
@@ -651,8 +652,7 @@ class TestNodeInstance(object):
model_name='node_instance',
model_cls=NodeInstance,
model_kwargs=dict(
- node_id=node_storage.node.list()[0].id,
- deployment_id=node_storage.deployment.list()[0].id,
+ node=node_storage.node.list()[0],
name=name,
runtime_properties=runtime_properties,
scaling_groups=scaling_groups,
@@ -681,33 +681,15 @@ class TestRelationshipInstance(object):
model_name='relationship_instance',
model_cls=RelationshipInstance,
model_kwargs=dict(
- relationship_id=relationship.id,
- source_node_instance_id=source_node_instance.id,
- target_node_instance_id=target_node_instance.id
+ relationship=relationship,
+ source_node_instance=source_node_instance,
+ target_node_instance=target_node_instance
))
assert relationship_instance.relationship == relationship
assert relationship_instance.source_node_instance == source_node_instance
assert relationship_instance.target_node_instance == target_node_instance
-class TestProviderContext(object):
- @pytest.mark.parametrize(
- 'is_valid, name, context',
- [
- (False, None, {}),
- (False, 'name', None),
- (True, 'name', {}),
- ]
- )
- def test_provider_context_model_creation(self, empty_storage, is_valid, name, context):
- _test_model(is_valid=is_valid,
- storage=empty_storage,
- model_name='provider_context',
- model_cls=ProviderContext,
- model_kwargs=dict(name=name, context=context)
- )
-
-
class TestPlugin(object):
@pytest.mark.parametrize(
'is_valid, archive_name, distribution, distribution_release, '
@@ -715,48 +697,48 @@ class TestPlugin(object):
'package_version, supported_platform, supported_py_versions, uploaded_at, wheels',
[
(False, m_cls, 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', m_cls, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', 'dis_name', m_cls, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', m_cls, 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', m_cls, 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', m_cls, 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', m_cls,
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
'pak_ver', m_cls, [], now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', m_cls, now, []),
+ 'pak_ver', 'sup_plat', m_cls, now, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', [], m_cls, []),
+ 'pak_ver', 'sup_plat', [], m_cls, []),
(False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', [], now, m_cls),
+ 'pak_ver', 'sup_plat', [], now, m_cls),
- (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', [], now, []),
+ (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+ 'sup_plat', [], now, []),
(True, 'arc_name', None, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(True, 'arc_name', 'dis_name', None, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', None, 'pak_name', 'pak_src', 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', [], now, []),
+ 'pak_ver', 'sup_plat', {}, now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', None, 'pak_ver',
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', None,
- 'sup_pla', [], now, []),
+ 'sup_plat', [], now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', None, [], now, []),
+ 'pak_ver', None, {}, now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', None, now, []),
+ 'pak_ver', 'sup_plat', None, now, []),
(True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
- 'pak_ver', 'sup_pla', [], now, []),
+ 'pak_ver', 'sup_plat', [], now, []),
]
)
def test_plugin_model_creation(self, empty_storage, is_valid, archive_name, distribution,
@@ -823,7 +805,7 @@ class TestTask(object):
model_cls=Task,
model_kwargs=dict(
status=status,
- execution_id=execution_storage.execution.list()[0].id,
+ execution=execution_storage.execution.list()[0],
due_at=due_at,
started_at=started_at,
ended_at=ended_at,
@@ -834,16 +816,16 @@ class TestTask(object):
name=name,
operation_mapping=operation_mapping,
inputs=inputs,
- plugin_id=plugin_id,
+ plugin_fk=plugin_id,
))
if is_valid:
assert task.execution == execution_storage.execution.list()[0]
- if task.plugin_id:
+ if task.plugin:
assert task.plugin == execution_storage.plugin.list()[0]
def test_task_max_attempts_validation(self):
def create_task(max_attempts):
- Task(execution_id='eid',
+ Task(execution_fk='eid',
name='name',
operation_mapping='',
inputs={},
@@ -855,23 +837,3 @@ class TestTask(object):
create_task(max_attempts=0)
with pytest.raises(ValueError):
create_task(max_attempts=-2)
-
-
-def test_inner_dict_update(empty_storage):
- inner_dict = {'inner_value': 1}
- pc = ProviderContext(name='name', context={
- 'inner_dict': {'inner_value': inner_dict},
- 'value': 0
- })
- empty_storage.provider_context.put(pc)
-
- storage_pc = empty_storage.provider_context.get(pc.id)
- assert storage_pc == pc
-
- storage_pc.context['inner_dict']['inner_value'] = 2
- storage_pc.context['value'] = -1
- empty_storage.provider_context.update(storage_pc)
- storage_pc = empty_storage.provider_context.get(pc.id)
-
- assert storage_pc.context['inner_dict']['inner_value'] == 2
- assert storage_pc.context['value'] == -1