You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by mx...@apache.org on 2017/02/16 14:33:17 UTC
[05/13] incubator-ariatosca git commit: ARIA-44 Merge parser and
storage model
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index b39a81f..26564c5 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -28,30 +28,43 @@ from aria.orchestrator.workflows import (
from tests import mock, storage
+OP_NAME = 'tosca.interfaces.node.lifecycle.Standard.create'
+RELATIONSHIP_OP_NAME = 'tosca.interfaces.relationship.Configure.pre_configure'
+
@pytest.fixture
def ctx(tmpdir):
context = mock.context.simple(str(tmpdir))
+
+ relationship = context.model.relationship.list()[0]
+ relationship.interfaces = [
+ mock.models.get_interface(RELATIONSHIP_OP_NAME, edge='source'),
+ mock.models.get_interface(RELATIONSHIP_OP_NAME, edge='target')
+ ]
+ context.model.relationship.update(relationship)
+
+ dependent_node = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ dependent_node.interfaces = [mock.models.get_interface(OP_NAME)]
+ context.model.node.update(dependent_node)
+
yield context
storage.release_sqlite_storage(context.model)
class TestOperationTask(object):
- def _create_node_operation_task(self, ctx, node_instance):
+ def _create_node_operation_task(self, ctx, node):
with workflow_context.current.push(ctx):
- api_task = api.task.OperationTask.node_instance(
- instance=node_instance,
+ api_task = api.task.OperationTask.node(
+ instance=node,
name='tosca.interfaces.node.lifecycle.Standard.create')
core_task = core.task.OperationTask(api_task=api_task)
return api_task, core_task
- def _create_relationship_operation_task(self, ctx, relationship_instance, operation_end):
+ def _create_relationship_operation_task(self, ctx, relationship, operation_name, edge):
with workflow_context.current.push(ctx):
- api_task = api.task.OperationTask.relationship_instance(
- instance=relationship_instance,
- name='tosca.interfaces.relationship.Configure.pre_configure_source',
- operation_end=operation_end)
+ api_task = api.task.OperationTask.relationship(
+ instance=relationship, name=operation_name, edge=edge)
core_task = core.task.OperationTask(api_task=api_task)
return api_task, core_task
@@ -60,45 +73,47 @@ class TestOperationTask(object):
storage_plugin_other = mock.models.get_plugin(package_name='p0', package_version='0.0')
ctx.model.plugin.put(storage_plugin_other)
ctx.model.plugin.put(storage_plugin)
- node_instance = ctx.model.node_instance.get_by_name(
- mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
- node = node_instance.node
+ node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ node_template = node.node_template
plugin_name = 'plugin1'
- node.plugins = [{'name': plugin_name,
- 'package_name': 'p1',
- 'package_version': '0.1'}]
- node.operations['tosca.interfaces.node.lifecycle.Standard.create'] = {'plugin': plugin_name}
- api_task, core_task = self._create_node_operation_task(ctx, node_instance)
+ node_template.plugins = [{'name': 'plugin1',
+ 'package_name': 'p1',
+ 'package_version': '0.1'}]
+ node.interfaces = [mock.models.get_interface(
+ 'tosca.interfaces.node.lifecycle.Standard.create',
+ operation_kwargs=dict(plugin='plugin1')
+ )]
+ ctx.model.node_template.update(node_template)
+ ctx.model.node.update(node)
+ api_task, core_task = self._create_node_operation_task(ctx, node)
storage_task = ctx.model.task.get_by_name(core_task.name)
assert storage_task.plugin_name == plugin_name
assert storage_task.execution_name == ctx.execution.name
- assert storage_task.runs_on.id == core_task.context.node_instance.id
+ assert storage_task.runs_on == core_task.context.node
assert core_task.model_task == storage_task
assert core_task.name == api_task.name
- assert core_task.operation_mapping == api_task.operation_mapping
- assert core_task.actor == api_task.actor == node_instance
+ assert core_task.implementation == api_task.implementation
+ assert core_task.actor == api_task.actor == node
assert core_task.inputs == api_task.inputs == storage_task.inputs
assert core_task.plugin == storage_plugin
def test_source_relationship_operation_task_creation(self, ctx):
- relationship_instance = ctx.model.relationship_instance.list()[0]
+ relationship = ctx.model.relationship.list()[0]
+ ctx.model.relationship.update(relationship)
_, core_task = self._create_relationship_operation_task(
- ctx, relationship_instance,
- api.task.OperationTask.SOURCE_OPERATION)
- assert core_task.model_task.runs_on.id == relationship_instance.source_node_instance.id
+ ctx, relationship, RELATIONSHIP_OP_NAME, 'source')
+ assert core_task.model_task.runs_on == relationship.source_node
def test_target_relationship_operation_task_creation(self, ctx):
- relationship_instance = ctx.model.relationship_instance.list()[0]
+ relationship = ctx.model.relationship.list()[0]
_, core_task = self._create_relationship_operation_task(
- ctx, relationship_instance,
- api.task.OperationTask.TARGET_OPERATION)
- assert core_task.model_task.runs_on.id == relationship_instance.target_node_instance.id
+ ctx, relationship, RELATIONSHIP_OP_NAME, 'target')
+ assert core_task.model_task.runs_on == relationship.target_node
def test_operation_task_edit_locked_attribute(self, ctx):
- node_instance = \
- ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
- _, core_task = self._create_node_operation_task(ctx, node_instance)
+ _, core_task = self._create_node_operation_task(ctx, node)
now = datetime.utcnow()
with pytest.raises(exceptions.TaskException):
core_task.status = core_task.STARTED
@@ -112,10 +127,9 @@ class TestOperationTask(object):
core_task.due_at = now
def test_operation_task_edit_attributes(self, ctx):
- node_instance = \
- ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
- _, core_task = self._create_node_operation_task(ctx, node_instance)
+ _, core_task = self._create_node_operation_task(ctx, node)
future_time = datetime.utcnow() + timedelta(seconds=3)
with core_task._update():
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index 57be075..167004e 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -25,21 +25,23 @@ from tests import storage
def test_task_graph_into_execution_graph(tmpdir):
operation_name = 'tosca.interfaces.node.lifecycle.Standard.create'
task_context = mock.context.simple(str(tmpdir))
- node_instance = \
- task_context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ node.interfaces = [mock.models.get_interface(operation_name)]
+ task_context.model.node.update(node)
+
def sub_workflow(name, **_):
return api.task_graph.TaskGraph(name)
with context.workflow.current.push(task_context):
test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
- simple_before_task = api.task.OperationTask.node_instance(instance=node_instance,
- name=operation_name)
- simple_after_task = api.task.OperationTask.node_instance(instance=node_instance,
- name=operation_name)
+ simple_before_task = api.task.OperationTask.node(instance=node,
+ name=operation_name)
+ simple_after_task = api.task.OperationTask.node(instance=node,
+ name=operation_name)
inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
- inner_task = api.task.OperationTask.node_instance(instance=node_instance,
- name=operation_name)
+ inner_task = api.task.OperationTask.node(instance=node,
+ name=operation_name)
inner_task_graph.add_tasks(inner_task)
test_task_graph.add_tasks(simple_before_task)
@@ -91,7 +93,7 @@ def test_task_graph_into_execution_graph(tmpdir):
def _assert_execution_is_api_task(execution_task, api_task):
assert execution_task.id == api_task.id
assert execution_task.name == api_task.name
- assert execution_task.operation_mapping == api_task.operation_mapping
+ assert execution_task.implementation == api_task.implementation
assert execution_task.actor == api_task.actor
assert execution_task.inputs == api_task.inputs
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index d983fe9..580bf8b 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -28,7 +28,7 @@ except ImportError:
_celery = None
app = None
-from aria.storage import model
+from aria.storage.modeling import model
from aria.orchestrator import events
from aria.orchestrator.workflows.executor import (
thread,
@@ -43,7 +43,7 @@ def test_execute(executor):
expected_value = 'value'
successful_task = MockTask(mock_successful_task)
failing_task = MockTask(mock_failing_task)
- task_with_inputs = MockTask(mock_task_with_input, inputs={'input': expected_value})
+ task_with_inputs = MockTask(mock_task_with_input, inputs=dict(input='value'))
for task in [successful_task, failing_task, task_with_inputs]:
executor.execute(task)
@@ -105,8 +105,9 @@ class MockTask(object):
self.exception = None
self.id = str(uuid.uuid4())
name = func.__name__
- operation = 'tests.orchestrator.workflows.executor.test_executor.{name}'.format(name=name)
- self.operation_mapping = operation
+ implementation = 'tests.orchestrator.workflows.executor.test_executor.{name}'.format(
+ name=name)
+ self.implementation = implementation
self.logger = logging.getLogger()
self.name = name
self.inputs = inputs or {}
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index ff5dce6..e904eb3 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -42,7 +42,7 @@ class TestProcessExecutor(object):
def test_plugin_execution(self, executor, mock_plugin):
task = MockTask(plugin=mock_plugin,
- operation='mock_plugin1.operation')
+ implementation='mock_plugin1.operation')
queue = Queue.Queue()
@@ -131,11 +131,11 @@ class MockTask(object):
INFINITE_RETRIES = aria_model.Task.INFINITE_RETRIES
- def __init__(self, plugin, operation):
+ def __init__(self, plugin, implementation):
self.id = str(uuid.uuid4())
- self.operation_mapping = operation
+ self.implementation = implementation
self.logger = logging.getLogger()
- self.name = operation
+ self.name = implementation
self.inputs = {}
self.context = MockContext()
self.retry_count = 0
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 18957f1..ac95554 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -30,15 +30,18 @@ def test_decorate_extension(context, executor):
inputs = {'input1': 1, 'input2': 2}
def get_node_instance(ctx):
- return ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
@workflow
def mock_workflow(ctx, graph):
node_instance = get_node_instance(ctx)
op = 'test.op'
- op_dict = {'operation': '{0}.{1}'.format(__name__, _mock_operation.__name__)}
- node_instance.node.operations['test.op'] = op_dict
- task = api.task.OperationTask.node_instance(instance=node_instance, name=op, inputs=inputs)
+ node_instance.interfaces = [mock.models.get_interface(
+ op,
+ operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
+ _mock_operation.__name__))
+ )]
+ task = api.task.OperationTask.node(instance=node_instance, name=op, inputs=inputs)
graph.add_tasks(task)
return graph
graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter
@@ -55,7 +58,7 @@ class MockProcessExecutorExtension(object):
def decorate(self):
def decorator(function):
def wrapper(ctx, **operation_inputs):
- ctx.node_instance.runtime_properties['out'] = {'wrapper_inputs': operation_inputs}
+ ctx.node.runtime_properties['out'] = {'wrapper_inputs': operation_inputs}
function(ctx=ctx, **operation_inputs)
return wrapper
return decorator
@@ -63,7 +66,7 @@ class MockProcessExecutorExtension(object):
@operation
def _mock_operation(ctx, **operation_inputs):
- ctx.node_instance.runtime_properties['out']['function_inputs'] = operation_inputs
+ ctx.node.runtime_properties['out']['function_inputs'] = operation_inputs
@pytest.fixture
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index e383859..d3b3300 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -45,13 +45,13 @@ def test_track_changes_of_failed_operation(context, executor):
def _assert_tracked_changes_are_applied(context):
- instance = context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ instance = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
assert instance.runtime_properties == _TEST_RUNTIME_PROPERTIES
def _update_runtime_properties(context):
- context.node_instance.runtime_properties.clear()
- context.node_instance.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
+ context.node.runtime_properties.clear()
+ context.node.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
def test_refresh_state_of_tracked_attributes(context, executor):
@@ -66,7 +66,7 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
'changed_but_refreshed': {'some': 'newer', 'properties': 'right there'}
}
- expected_initial = context.model.node_instance.get_by_name(
+ expected_initial = context.model.node.get_by_name(
mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
out = _run_workflow(context=context, executor=executor, op_func=_mock_updating_operation,
@@ -87,17 +87,18 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
def _run_workflow(context, executor, op_func, inputs=None):
@workflow
def mock_workflow(ctx, graph):
- node_instance = ctx.model.node_instance.get_by_name(
- mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
- node_instance.node.operations['test.op'] = {'operation': _operation_mapping(op_func)}
- task = api.task.OperationTask.node_instance(instance=node_instance, name='test.op',
- inputs=inputs or {})
+ node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+ node.interfaces = [mock.models.get_interface(
+ 'test.op', operation_kwargs=dict(implementation=_operation_mapping(op_func)))]
+ task = api.task.OperationTask.node(instance=node,
+ name='test.op',
+ inputs=inputs or {})
graph.add_tasks(task)
return graph
graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter
eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
eng.execute()
- return context.model.node_instance.get_by_name(
+ return context.model.node.get_by_name(
mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties.get('out')
@@ -114,25 +115,25 @@ def _mock_fail_operation(ctx):
@operation
def _mock_refreshing_operation(ctx):
- out = {'initial': copy.deepcopy(ctx.node_instance.runtime_properties)}
- ctx.node_instance.runtime_properties.update({'some': 'new', 'properties': 'right here'})
- out['after_change'] = copy.deepcopy(ctx.node_instance.runtime_properties)
- ctx.model.node_instance.refresh(ctx.node_instance)
- out['after_refresh'] = copy.deepcopy(ctx.node_instance.runtime_properties)
- ctx.node_instance.runtime_properties['out'] = out
+ out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
+ ctx.node.runtime_properties.update({'some': 'new', 'properties': 'right here'})
+ out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+ ctx.model.node.refresh(ctx.node)
+ out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
+ ctx.node.runtime_properties['out'] = out
@operation
def _mock_updating_operation(ctx, committed, changed_but_refreshed):
- out = {'initial': copy.deepcopy(ctx.node_instance.runtime_properties)}
- ctx.node_instance.runtime_properties.update(committed)
- ctx.model.node_instance.update(ctx.node_instance)
- out['after_update'] = copy.deepcopy(ctx.node_instance.runtime_properties)
- ctx.node_instance.runtime_properties.update(changed_but_refreshed)
- out['after_change'] = copy.deepcopy(ctx.node_instance.runtime_properties)
- ctx.model.node_instance.refresh(ctx.node_instance)
- out['after_refresh'] = copy.deepcopy(ctx.node_instance.runtime_properties)
- ctx.node_instance.runtime_properties['out'] = out
+ out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
+ ctx.node.runtime_properties.update(committed)
+ ctx.model.node.update(ctx.node)
+ out['after_update'] = copy.deepcopy(ctx.node.runtime_properties)
+ ctx.node.runtime_properties.update(changed_but_refreshed)
+ out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+ ctx.model.node.refresh(ctx.node)
+ out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
+ ctx.node.runtime_properties['out'] = out
def _operation_mapping(func):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/resources/scripts/test_ssh.sh
----------------------------------------------------------------------
diff --git a/tests/resources/scripts/test_ssh.sh b/tests/resources/scripts/test_ssh.sh
index 6f18278..90202c7 100644
--- a/tests/resources/scripts/test_ssh.sh
+++ b/tests/resources/scripts/test_ssh.sh
@@ -4,7 +4,7 @@ set -u
set -e
test_run_script_basic() {
- ctx node-instance runtime-properties test_value $test_value
+ ctx node runtime-properties test_value $test_value
}
test_run_script_as_sudo() {
@@ -12,7 +12,7 @@ test_run_script_as_sudo() {
}
test_run_script_default_base_dir() {
- ctx node-instance runtime-properties work_dir $PWD
+ ctx node runtime-properties work_dir $PWD
}
test_run_script_with_hide() {
@@ -20,44 +20,44 @@ test_run_script_with_hide() {
}
test_run_script_process_config() {
- ctx node-instance runtime-properties env_value $test_value_env
- ctx node-instance runtime-properties bash_version $BASH_VERSION
- ctx node-instance runtime-properties arg1_value $1
- ctx node-instance runtime-properties arg2_value $2
- ctx node-instance runtime-properties cwd $PWD
- ctx node-instance runtime-properties ctx_path $(which ctx)
+ ctx node runtime-properties env_value $test_value_env
+ ctx node runtime-properties bash_version $BASH_VERSION
+ ctx node runtime-properties arg1_value $1
+ ctx node runtime-properties arg2_value $2
+ ctx node runtime-properties cwd $PWD
+ ctx node runtime-properties ctx_path $(which ctx)
}
test_run_script_command_prefix() {
- ctx node-instance runtime-properties dollar_dash $-
+ ctx node runtime-properties dollar_dash $-
}
test_run_script_reuse_existing_ctx_1() {
- ctx node-instance runtime-properties test_value1 $test_value1
+ ctx node runtime-properties test_value1 $test_value1
}
test_run_script_reuse_existing_ctx_2() {
- ctx node-instance runtime-properties test_value2 $test_value2
+ ctx node runtime-properties test_value2 $test_value2
}
test_run_script_download_resource_plain() {
local destination=$(mktemp)
ctx download-resource ${destination} test_resource
- ctx node-instance runtime-properties test_value "$(cat ${destination})"
+ ctx node runtime-properties test_value "$(cat ${destination})"
}
test_run_script_download_resource_and_render() {
local destination=$(mktemp)
ctx download-resource-and-render ${destination} test_resource
- ctx node-instance runtime-properties test_value "$(cat ${destination})"
+ ctx node runtime-properties test_value "$(cat ${destination})"
}
test_run_script_inputs_as_env_variables_no_override() {
- ctx node-instance runtime-properties test_value "$custom_env_var"
+ ctx node runtime-properties test_value "$custom_env_var"
}
test_run_script_inputs_as_env_variables_process_env_override() {
- ctx node-instance runtime-properties test_value "$custom_env_var"
+ ctx node runtime-properties test_value "$custom_env_var"
}
test_run_script_error_in_script() {
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/resources/service_templates/node-cellar/workflows.py
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/workflows.py b/tests/resources/service_templates/node-cellar/workflows.py
index b3546bb..fff78bf 100644
--- a/tests/resources/service_templates/node-cellar/workflows.py
+++ b/tests/resources/service_templates/node-cellar/workflows.py
@@ -11,9 +11,6 @@ def maintenance(ctx, graph, enabled):
operation = 'Maintenance.enable' if enabled else 'Maintenance.disable'
- for node_instance in ctx.model.node_instance.iter():
- if operation in node_instance.node.operations:
- task = OperationTask.node_instance(
- instance=node_instance,
- name=operation)
- graph.add_tasks(task)
+ for node in ctx.model.node.iter():
+ for interface in node.interfaces.filter_by(name='Maintenance', type_name='Maintenance'):
+ graph.add_tasks(OperationTask.node(instance=node, name=operation))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index b798e01..4278831 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -14,8 +14,8 @@
# limitations under the License.
import os
import platform
-from tempfile import mkdtemp
from shutil import rmtree
+from tempfile import mkdtemp
from sqlalchemy import (
create_engine,
@@ -29,13 +29,14 @@ from sqlalchemy import (
from aria.storage import (
model,
- structure,
type as aria_type,
+ structure,
+ modeling
)
-class MockModel(model.DeclarativeBase, structure.ModelMixin): #pylint: disable=abstract-method
- __tablename__ = 'mock_models'
+class MockModel(model.aria_declarative_base, structure.ModelMixin): #pylint: disable=abstract-method
+ __tablename__ = 'mock_model'
model_dict = Column(aria_type.Dict)
model_list = Column(aria_type.List)
value = Column(Integer)
@@ -64,7 +65,7 @@ def release_sqlite_storage(storage):
session.rollback()
session.close()
for engine in set(mapi._engine for mapi in mapis):
- model.DeclarativeBase.metadata.drop_all(engine)
+ model.aria_declarative_base.metadata.drop_all(engine)
def init_inmemory_model_storage():
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/storage/test_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_instrumentation.py b/tests/storage/test_instrumentation.py
index 771342c..08d5ae0 100644
--- a/tests/storage/test_instrumentation.py
+++ b/tests/storage/test_instrumentation.py
@@ -17,17 +17,16 @@ import pytest
from sqlalchemy import Column, Text, Integer, event
from aria.storage import (
- model,
structure,
- type as aria_type,
ModelStorage,
sql_mapi,
instrumentation,
- exceptions
+ exceptions,
+ type as aria_type,
+ model
)
from ..storage import release_sqlite_storage, init_inmemory_model_storage
-
STUB = instrumentation._STUB
Value = instrumentation._Value
instruments_holder = []
@@ -346,15 +345,15 @@ class _MockModel(structure.ModelMixin):
string2 = Column(Text)
-class MockModel1(model.DeclarativeBase, _MockModel):
- __tablename__ = 'mock_model1'
+class MockModel1(_MockModel, model.aria_declarative_base):
+ __tablename__ = 'mock_model_1'
-class MockModel2(model.DeclarativeBase, _MockModel):
- __tablename__ = 'mock_model2'
+class MockModel2(_MockModel, model.aria_declarative_base):
+ __tablename__ = 'mock_model_2'
-class StrictMockModel(model.DeclarativeBase):
+class StrictMockModel(structure.ModelMixin, model.aria_declarative_base):
__tablename__ = 'strict_mock_model'
strict_dict = Column(aria_type.StrictDict(basestring, basestring))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index 34cc5df..f88080a 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -17,9 +17,9 @@ import pytest
from aria.storage import (
ModelStorage,
- model,
exceptions,
sql_mapi,
+ modeling,
)
from aria import application_model_storage
from ..storage import release_sqlite_storage, init_inmemory_model_storage
@@ -38,7 +38,7 @@ def storage():
@pytest.fixture(scope='module', autouse=True)
def module_cleanup():
- model.DeclarativeBase.metadata.remove(MockModel.__table__) #pylint: disable=no-member
+ modeling.model.aria_declarative_base.metadata.remove(MockModel.__table__) #pylint: disable=no-member
def test_storage_base(storage):
@@ -63,14 +63,41 @@ def test_model_storage(storage):
def test_application_storage_factory():
storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
initiator=init_inmemory_model_storage)
+
+ assert storage.parameter
+ assert storage.mapping_template
+ assert storage.substitution_template
+ assert storage.service_template
+ assert storage.node_template
+ assert storage.group_template
+ assert storage.interface_template
+ assert storage.operation_template
+ assert storage.artifact_template
+ assert storage.policy_template
+ assert storage.group_policy_template
+ assert storage.group_policy_trigger_template
+ assert storage.requirement_template
+ assert storage.capability_template
+
+ assert storage.mapping
+ assert storage.substitution
+ assert storage.service_instance
assert storage.node
- assert storage.node_instance
- assert storage.plugin
- assert storage.blueprint
- assert storage.deployment
- assert storage.deployment_update
- assert storage.deployment_update_step
- assert storage.deployment_modification
+ assert storage.group
+ assert storage.interface
+ assert storage.operation
+ assert storage.capability
+ assert storage.artifact
+ assert storage.policy
+ assert storage.group_policy
+ assert storage.group_policy_trigger
+ assert storage.relationship
+
assert storage.execution
+ assert storage.service_instance_update
+ assert storage.service_instance_update_step
+ assert storage.service_instance_modification
+ assert storage.plugin
+ assert storage.task
release_sqlite_storage(storage)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
index 6450152..1e8d6b5 100644
--- a/tests/storage/test_models.py
+++ b/tests/storage/test_models.py
@@ -22,22 +22,21 @@ from aria.storage import (
exceptions,
sql_mapi,
)
-from aria.storage.model import (
- DeploymentUpdateStep,
- Blueprint,
+from aria.storage.modeling.model import (
+ ServiceTemplate,
+ ServiceInstance,
+ ServiceInstanceUpdate,
+ ServiceInstanceUpdateStep,
+ ServiceInstanceModification,
Execution,
Task,
Plugin,
- Deployment,
- Node,
- NodeInstance,
Relationship,
- RelationshipInstance,
- DeploymentUpdate,
- DeploymentModification,
+ NodeTemplate,
+ Node,
+ Parameter
)
-
from tests import mock
from ..storage import release_sqlite_storage, init_inmemory_model_storage
@@ -58,61 +57,56 @@ def _empty_storage():
initiator=init_inmemory_model_storage)
-def _blueprint_storage():
+def _service_template_storage():
storage = _empty_storage()
- blueprint = mock.models.get_blueprint()
- storage.blueprint.put(blueprint)
+ service_template = mock.models.get_blueprint()
+ storage.service_template.put(service_template)
return storage
-def _deployment_storage():
- storage = _blueprint_storage()
- deployment = mock.models.get_deployment(storage.blueprint.list()[0])
- storage.deployment.put(deployment)
+def _service_instance_storage():
+ storage = _service_template_storage()
+ service_instance = mock.models.get_deployment(
+ storage.service_template.get_by_name(mock.models.BLUEPRINT_NAME))
+ storage.service_instance.put(service_instance)
return storage
-def _deployment_update_storage():
- storage = _deployment_storage()
- deployment_update = DeploymentUpdate(
- deployment=storage.deployment.list()[0],
+def _service_instance_update_storage():
+ storage = _service_instance_storage()
+ service_instance_update = ServiceInstanceUpdate(
+ service_instance=storage.service_instance.list()[0],
created_at=now,
- deployment_plan={},
+ service_instance_plan={},
)
- storage.deployment_update.put(deployment_update)
+ storage.service_instance_update.put(service_instance_update)
return storage
-def _node_storage():
- storage = _deployment_storage()
- node = mock.models.get_dependency_node(storage.deployment.list()[0])
- storage.node.put(node)
+def _node_template_storage():
+ storage = _service_instance_storage()
+ node_template = mock.models.get_dependency_node(storage.service_instance.list()[0])
+ storage.node_template.put(node_template)
return storage
def _nodes_storage():
- storage = _deployment_storage()
- dependent_node = mock.models.get_dependent_node(storage.deployment.list()[0])
- dependency_node = mock.models.get_dependency_node(storage.deployment.list()[0])
- storage.node.put(dependent_node)
- storage.node.put(dependency_node)
- return storage
-
-
-def _node_instances_storage():
storage = _nodes_storage()
- dependent_node = storage.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
- dependency_node = storage.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
- dependency_node_instance = mock.models.get_dependency_node_instance(dependency_node)
- dependent_node_instance = mock.models.get_dependent_node_instance(dependent_node)
- storage.node_instance.put(dependency_node_instance)
- storage.node_instance.put(dependent_node_instance)
+ service_instance = storage.service_instance.get_by_name(mock.models.DEPLOYMENT_NAME)
+ dependent_node_template = storage.node_template.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+ dependency_node_template = storage.node_template.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+ dependency_node = mock.models.get_dependency_node_instance(dependency_node_template,
+ service_instance)
+ dependent_node = mock.models.get_dependent_node_instance(dependent_node_template,
+ service_instance)
+ storage.node.put(dependency_node)
+ storage.node.put(dependent_node)
return storage
def _execution_storage():
- storage = _deployment_storage()
- execution = mock.models.get_execution(storage.deployment.list()[0])
+ storage = _service_instance_storage()
+ execution = mock.models.get_execution(storage.service_instance.list()[0])
plugin = mock.models.get_plugin()
storage.execution.put(execution)
storage.plugin.put(plugin)
@@ -126,26 +120,26 @@ def empty_storage():
@pytest.fixture
-def blueprint_storage():
- with sql_storage(_blueprint_storage) as storage:
+def service_template_storage():
+ with sql_storage(_service_template_storage) as storage:
yield storage
@pytest.fixture
-def deployment_storage():
- with sql_storage(_deployment_storage) as storage:
+def service_instance_storage():
+ with sql_storage(_service_instance_storage) as storage:
yield storage
@pytest.fixture
-def deployment_update_storage():
- with sql_storage(_deployment_update_storage) as storage:
+def service_instance_update_storage():
+ with sql_storage(_service_instance_update_storage) as storage:
yield storage
@pytest.fixture
-def node_storage():
- with sql_storage(_node_storage) as storage:
+def node_template_storage():
+ with sql_storage(_node_template_storage) as storage:
yield storage
@@ -156,12 +150,6 @@ def nodes_storage():
@pytest.fixture
-def node_instances_storage():
- with sql_storage(_node_instances_storage) as storage:
- yield storage
-
-
-@pytest.fixture
def execution_storage():
with sql_storage(_execution_storage) as storage:
yield storage
@@ -171,17 +159,17 @@ m_cls = type('MockClass')
now = datetime.utcnow()
-def _test_model(is_valid, storage, model_name, model_cls, model_kwargs):
+def _test_model(is_valid, storage, model_cls, model_kwargs):
if is_valid:
model = model_cls(**model_kwargs)
- getattr(storage, model_name).put(model)
+ getattr(storage, model_cls.__modelname__).put(model)
return model
else:
- with pytest.raises(exceptions.StorageError):
- getattr(storage, model_name).put(model_cls(**model_kwargs))
+ with pytest.raises((exceptions.StorageError, TypeError),):
+ getattr(storage, model_cls.__modelname__).put(model_cls(**model_kwargs))
-class TestBlueprint(object):
+class TestServiceTemplate(object):
@pytest.mark.parametrize(
'is_valid, plan, description, created_at, updated_at, main_file_name',
@@ -198,74 +186,71 @@ class TestBlueprint(object):
updated_at, main_file_name):
_test_model(is_valid=is_valid,
storage=empty_storage,
- model_name='blueprint',
- model_cls=Blueprint,
- model_kwargs=dict(plan=plan,
- description=description,
- created_at=created_at,
- updated_at=updated_at,
- main_file_name=main_file_name))
+ model_cls=ServiceTemplate,
+ model_kwargs=dict(
+ plan=plan,
+ description=description,
+ created_at=created_at,
+ updated_at=updated_at,
+ main_file_name=main_file_name)
+ )
-class TestDeployment(object):
+class TestServiceInstance(object):
@pytest.mark.parametrize(
- 'is_valid, name, created_at, description, inputs, groups, permalink, policy_triggers, '
+ 'is_valid, name, created_at, description, inputs, permalink, policy_triggers, '
'policy_types, outputs, scaling_groups, updated_at, workflows',
[
- (False, m_cls, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', m_cls, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, m_cls, {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', m_cls, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, m_cls, 'perlnk', {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, m_cls, {}, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', m_cls, {}, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, m_cls, {}, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, m_cls, {}, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, m_cls, now, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, m_cls, {}),
- (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, m_cls),
-
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, None, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, None, {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', None, {}, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, None, 'perlnk', {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, None, {}, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', None, {}, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, None, {}, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, None, {}, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, None, now, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, None, {}),
- (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, None),
+ (False, m_cls, now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
+ (False, 'name', m_cls, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
+ (False, 'name', now, m_cls, [], 'perlnk', {}, {}, [], {}, now, {}),
+ (False, 'name', now, 'desc', [], m_cls, {}, {}, [], {}, now, {}),
+ (False, 'name', now, 'desc', [], 'perlnk', m_cls, {}, [], {}, now, {}),
+ (False, 'name', now, 'desc', [], 'perlnk', {}, m_cls, [], {}, now, {}),
+ (False, 'name', now, 'desc', [], 'perlnk', {}, {}, m_cls, {}, now, {}),
+ (False, 'name', now, 'desc', [], 'perlnk', {}, {}, [], m_cls, now, {}),
+ (False, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, m_cls, {}),
+ (False, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, m_cls),
+
+ (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
+ (True, None, now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
+ (True, 'name', now, None, [], 'perlnk', {}, {}, [], {}, now, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
+ (True, 'name', now, 'desc', [], None, {}, {}, [], {}, now, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', None, {}, [], {}, now, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', {}, None, [], {}, now, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], None, now, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, None, {}),
+ (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, None),
]
)
- def test_deployment_model_creation(self, deployment_storage, is_valid, name, created_at,
- description, inputs, groups, permalink, policy_triggers,
+ def test_deployment_model_creation(self, service_instance_storage, is_valid, name, created_at,
+ description, inputs, permalink, policy_triggers,
policy_types, outputs, scaling_groups, updated_at,
workflows):
- deployment = _test_model(is_valid=is_valid,
- storage=deployment_storage,
- model_name='deployment',
- model_cls=Deployment,
- model_kwargs=dict(
- name=name,
- blueprint=deployment_storage.blueprint.list()[0],
- created_at=created_at,
- description=description,
- inputs=inputs,
- groups=groups,
- permalink=permalink,
- policy_triggers=policy_triggers,
- policy_types=policy_types,
- outputs=outputs,
- scaling_groups=scaling_groups,
- updated_at=updated_at,
- workflows=workflows
- ))
+ service_instance = _test_model(
+ is_valid=is_valid,
+ storage=service_instance_storage,
+ model_cls=ServiceInstance,
+ model_kwargs=dict(
+ name=name,
+ service_template=service_instance_storage.service_template.list()[0],
+ created_at=created_at,
+ description=description,
+ inputs=inputs,
+ permalink=permalink,
+ policy_triggers=policy_triggers,
+ policy_types=policy_types,
+ outputs=outputs,
+ scaling_groups=scaling_groups,
+ updated_at=updated_at,
+ workflows=workflows
+ ))
if is_valid:
- assert deployment.blueprint == deployment_storage.blueprint.list()[0]
+ assert service_instance.service_template == \
+ service_instance_storage.service_template.list()[0]
class TestExecution(object):
@@ -289,27 +274,27 @@ class TestExecution(object):
(True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
]
)
- def test_execution_model_creation(self, deployment_storage, is_valid, created_at, started_at,
- ended_at, error, is_system_workflow, parameters, status,
- workflow_name):
- execution = _test_model(is_valid=is_valid,
- storage=deployment_storage,
- model_name='execution',
- model_cls=Execution,
- model_kwargs=dict(
- deployment=deployment_storage.deployment.list()[0],
- created_at=created_at,
- started_at=started_at,
- ended_at=ended_at,
- error=error,
- is_system_workflow=is_system_workflow,
- parameters=parameters,
- status=status,
- workflow_name=workflow_name,
- ))
+ def test_execution_model_creation(self, service_instance_storage, is_valid, created_at,
+ started_at, ended_at, error, is_system_workflow, parameters,
+ status, workflow_name):
+ execution = _test_model(
+ is_valid=is_valid,
+ storage=service_instance_storage,
+ model_cls=Execution,
+ model_kwargs=dict(
+ service_instance=service_instance_storage.service_instance.list()[0],
+ created_at=created_at,
+ started_at=started_at,
+ ended_at=ended_at,
+ error=error,
+ is_system_workflow=is_system_workflow,
+ parameters=parameters,
+ status=status,
+ workflow_name=workflow_name,
+ ))
if is_valid:
- assert execution.deployment == deployment_storage.deployment.list()[0]
- assert execution.blueprint == deployment_storage.blueprint.list()[0]
+ assert execution.service_instance == service_instance_storage.service_instance.list()[0]
+ assert execution.service_template == service_instance_storage.service_template.list()[0]
def test_execution_status_transition(self):
def create_execution(status):
@@ -376,10 +361,11 @@ class TestExecution(object):
execution.status = transitioned_status
-class TestDeploymentUpdate(object):
+class TestServiceInstanceUpdate(object):
@pytest.mark.parametrize(
- 'is_valid, created_at, deployment_plan, deployment_update_node_instances, '
- 'deployment_update_deployment, deployment_update_nodes, modified_entity_ids, state',
+ 'is_valid, created_at, deployment_plan, service_instance_update_node_instances, '
+ 'service_instance_update_service_instance, service_instance_update_nodes, '
+ 'modified_entity_ids, state',
[
(False, m_cls, {}, {}, {}, [], {}, 'state'),
(False, now, m_cls, {}, {}, [], {}, 'state'),
@@ -397,74 +383,74 @@ class TestDeploymentUpdate(object):
(True, now, {}, {}, {}, [], {}, None),
]
)
- def test_deployment_update_model_creation(self, deployment_storage, is_valid, created_at,
- deployment_plan, deployment_update_node_instances,
- deployment_update_deployment, deployment_update_nodes,
- modified_entity_ids, state):
- deployment_update = _test_model(
+ def test_service_instance_update_model_creation(
+ self, service_instance_storage, is_valid, created_at, deployment_plan,
+ service_instance_update_node_instances, service_instance_update_service_instance,
+ service_instance_update_nodes, modified_entity_ids, state):
+ service_instance_update = _test_model(
is_valid=is_valid,
- storage=deployment_storage,
- model_name='deployment_update',
- model_cls=DeploymentUpdate,
+ storage=service_instance_storage,
+ model_cls=ServiceInstanceUpdate,
model_kwargs=dict(
- deployment=deployment_storage.deployment.list()[0],
+ service_instance=service_instance_storage.service_instance.list()[0],
created_at=created_at,
- deployment_plan=deployment_plan,
- deployment_update_node_instances=deployment_update_node_instances,
- deployment_update_deployment=deployment_update_deployment,
- deployment_update_nodes=deployment_update_nodes,
+ service_instance_plan=deployment_plan,
+ service_instance_update_node_instances=service_instance_update_node_instances,
+ service_instance_update_service_instance=service_instance_update_service_instance,
+ service_instance_update_nodes=service_instance_update_nodes,
modified_entity_ids=modified_entity_ids,
state=state,
))
if is_valid:
- assert deployment_update.deployment == deployment_storage.deployment.list()[0]
+ assert service_instance_update.service_instance == \
+ service_instance_storage.service_instance.list()[0]
-class TestDeploymentUpdateStep(object):
+class TestServiceInstanceUpdateStep(object):
@pytest.mark.parametrize(
'is_valid, action, entity_id, entity_type',
[
- (False, m_cls, 'id', DeploymentUpdateStep.ENTITY_TYPES.NODE),
- (False, DeploymentUpdateStep.ACTION_TYPES.ADD, m_cls,
- DeploymentUpdateStep.ENTITY_TYPES.NODE),
- (False, DeploymentUpdateStep.ACTION_TYPES.ADD, 'id', m_cls),
+ (False, m_cls, 'id', ServiceInstanceUpdateStep.ENTITY_TYPES.NODE),
+ (False, ServiceInstanceUpdateStep.ACTION_TYPES.ADD, m_cls,
+ ServiceInstanceUpdateStep.ENTITY_TYPES.NODE),
+ (False, ServiceInstanceUpdateStep.ACTION_TYPES.ADD, 'id', m_cls),
- (True, DeploymentUpdateStep.ACTION_TYPES.ADD, 'id',
- DeploymentUpdateStep.ENTITY_TYPES.NODE)
+ (True, ServiceInstanceUpdateStep.ACTION_TYPES.ADD, 'id',
+ ServiceInstanceUpdateStep.ENTITY_TYPES.NODE)
]
)
- def test_deployment_update_step_model_creation(self, deployment_update_storage, is_valid,
+ def test_deployment_update_step_model_creation(self, service_instance_update_storage, is_valid,
action, entity_id, entity_type):
- deployment_update_step = _test_model(
+ service_instance_update_step = _test_model(
is_valid=is_valid,
- storage=deployment_update_storage,
- model_name='deployment_update_step',
- model_cls=DeploymentUpdateStep,
+ storage=service_instance_update_storage,
+ model_cls=ServiceInstanceUpdateStep,
model_kwargs=dict(
- deployment_update=deployment_update_storage.deployment_update.list()[0],
+ service_instance_update=
+ service_instance_update_storage.service_instance_update.list()[0],
action=action,
entity_id=entity_id,
entity_type=entity_type
))
if is_valid:
- assert deployment_update_step.deployment_update == \
- deployment_update_storage.deployment_update.list()[0]
+ assert service_instance_update_step.service_instance_update == \
+ service_instance_update_storage.service_instance_update.list()[0]
def test_deployment_update_step_order(self):
- add_node = DeploymentUpdateStep(
+ add_node = ServiceInstanceUpdateStep(
id='add_step',
action='add',
entity_type='node',
entity_id='node_id')
- modify_node = DeploymentUpdateStep(
+ modify_node = ServiceInstanceUpdateStep(
id='modify_step',
action='modify',
entity_type='node',
entity_id='node_id')
- remove_node = DeploymentUpdateStep(
+ remove_node = ServiceInstanceUpdateStep(
id='remove_step',
action='remove',
entity_type='node',
@@ -476,13 +462,13 @@ class TestDeploymentUpdateStep(object):
assert remove_node < modify_node < add_node
assert not remove_node > modify_node > add_node
- add_rel = DeploymentUpdateStep(
+ add_rel = ServiceInstanceUpdateStep(
id='add_step',
action='add',
entity_type='relationship',
entity_id='relationship_id')
- remove_rel = DeploymentUpdateStep(
+ remove_rel = ServiceInstanceUpdateStep(
id='remove_step',
action='remove',
entity_type='relationship',
@@ -494,140 +480,84 @@ class TestDeploymentUpdateStep(object):
class TestDeploymentModification(object):
@pytest.mark.parametrize(
- 'is_valid, context, created_at, ended_at, modified_nodes, node_instances, status',
+ 'is_valid, context, created_at, ended_at, modified_node_templates, nodes, status',
[
- (False, m_cls, now, now, {}, {}, DeploymentModification.STARTED),
- (False, {}, m_cls, now, {}, {}, DeploymentModification.STARTED),
- (False, {}, now, m_cls, {}, {}, DeploymentModification.STARTED),
- (False, {}, now, now, m_cls, {}, DeploymentModification.STARTED),
- (False, {}, now, now, {}, m_cls, DeploymentModification.STARTED),
+ (False, m_cls, now, now, {}, {}, ServiceInstanceModification.STARTED),
+ (False, {}, m_cls, now, {}, {}, ServiceInstanceModification.STARTED),
+ (False, {}, now, m_cls, {}, {}, ServiceInstanceModification.STARTED),
+ (False, {}, now, now, m_cls, {}, ServiceInstanceModification.STARTED),
+ (False, {}, now, now, {}, m_cls, ServiceInstanceModification.STARTED),
(False, {}, now, now, {}, {}, m_cls),
- (True, {}, now, now, {}, {}, DeploymentModification.STARTED),
- (True, {}, now, None, {}, {}, DeploymentModification.STARTED),
- (True, {}, now, now, None, {}, DeploymentModification.STARTED),
- (True, {}, now, now, {}, None, DeploymentModification.STARTED),
+ (True, {}, now, now, {}, {}, ServiceInstanceModification.STARTED),
+ (True, {}, now, None, {}, {}, ServiceInstanceModification.STARTED),
+ (True, {}, now, now, None, {}, ServiceInstanceModification.STARTED),
+ (True, {}, now, now, {}, None, ServiceInstanceModification.STARTED),
]
)
- def test_deployment_modification_model_creation(self, deployment_storage, is_valid, context,
- created_at, ended_at, modified_nodes,
- node_instances, status):
+ def test_deployment_modification_model_creation(
+ self, service_instance_storage, is_valid, context, created_at, ended_at,
+ modified_node_templates, nodes, status):
deployment_modification = _test_model(
is_valid=is_valid,
- storage=deployment_storage,
- model_name='deployment_modification',
- model_cls=DeploymentModification,
+ storage=service_instance_storage,
+ model_cls=ServiceInstanceModification,
model_kwargs=dict(
- deployment=deployment_storage.deployment.list()[0],
+ service_instance=service_instance_storage.service_instance.list()[0],
context=context,
created_at=created_at,
ended_at=ended_at,
- modified_nodes=modified_nodes,
- node_instances=node_instances,
+ modified_nodes=modified_node_templates,
+ node_instances=nodes,
status=status,
))
if is_valid:
- assert deployment_modification.deployment == deployment_storage.deployment.list()[0]
+ assert deployment_modification.service_instance == \
+ service_instance_storage.service_instance.list()[0]
-class TestNode(object):
+class TestNodeTemplate(object):
@pytest.mark.parametrize(
- 'is_valid, name, deploy_number_of_instances, max_number_of_instances, '
- 'min_number_of_instances, number_of_instances, planned_number_of_instances, plugins, '
- 'properties, operations, type, type_hierarchy',
+ 'is_valid, name, default_instances, max_instances, min_instances, plugins, properties, '
+ 'type_name, type_hierarchy',
[
- (False, m_cls, 1, 1, 1, 1, 1, [], {}, {}, 'type', []),
- (False, 'name', m_cls, 1, 1, 1, 1, [], {}, {}, 'type', []),
- (False, 'name', 1, m_cls, 1, 1, 1, [], {}, {}, 'type', []),
- (False, 'name', 1, 1, m_cls, 1, 1, [], {}, {}, 'type', []),
- (False, 'name', 1, 1, 1, m_cls, 1, [], {}, {}, 'type', []),
- (False, 'name', 1, 1, 1, 1, m_cls, [], {}, {}, 'type', []),
- (False, 'name', 1, 1, 1, 1, 1, m_cls, {}, {}, 'type', []),
- (False, 'name', 1, 1, 1, 1, 1, [], m_cls, {}, 'type', []),
- (False, 'name', 1, 1, 1, 1, 1, [], {}, m_cls, 'type', []),
- (False, 'name', 1, 1, 1, 1, 1, [], {}, {}, m_cls, []),
- (False, 'name', 1, 1, 1, 1, 1, [], {}, {}, 'type', m_cls),
-
- (True, 'name', 1, 1, 1, 1, 1, [], {}, {}, 'type', []),
- (True, 'name', 1, 1, 1, 1, 1, None, {}, {}, 'type', []),
- (True, 'name', 1, 1, 1, 1, 1, [], None, {}, 'type', []),
- (True, 'name', 1, 1, 1, 1, 1, [], {}, None, 'type', []),
- (True, 'name', 1, 1, 1, 1, 1, [], {}, {}, 'type', None),
+ (False, m_cls, 1, 1, 1, [], [], 'type', []),
+ (False, 'name', m_cls, 1, 1, [], [], 'type', []),
+ (False, 'name', 1, m_cls, 1, [], [], 'type', []),
+ (False, 'name', 1, 1, m_cls, [], [], 'type', []),
+ (False, 'name', 1, 1, 1, m_cls, [], 'type', []),
+ (False, 'name', 1, 1, 1, [], [], m_cls, []),
+ (False, 'name', 1, 1, 1, [], [], 'type', m_cls),
+ #
+ (True, 'name', 1, 1, 1, [], [], 'type', []),
+ (True, 'name', 1, 1, 1, None, [], 'type', []),
+ (True, 'name', 1, 1, 1, [], [], 'type', None),
]
)
- def test_node_model_creation(self, deployment_storage, is_valid, name,
- deploy_number_of_instances, max_number_of_instances,
- min_number_of_instances, number_of_instances,
- planned_number_of_instances, plugins,
- properties, operations, type, type_hierarchy):
- node = _test_model(
+ def test_node_model_creation(self, service_instance_storage, is_valid, name,
+ default_instances, max_instances, min_instances, plugins,
+ properties, type_name, type_hierarchy):
+ node_template = _test_model(
is_valid=is_valid,
- storage=deployment_storage,
- model_name='node',
- model_cls=Node,
+ storage=service_instance_storage,
+ model_cls=NodeTemplate,
model_kwargs=dict(
name=name,
- deploy_number_of_instances=deploy_number_of_instances,
- max_number_of_instances=max_number_of_instances,
- min_number_of_instances=min_number_of_instances,
- number_of_instances=number_of_instances,
- planned_number_of_instances=planned_number_of_instances,
+ default_instances=default_instances,
+ max_instances=max_instances,
+ min_instances=min_instances,
plugins=plugins,
properties=properties,
- operations=operations,
- type=type,
+ type_name=type_name,
type_hierarchy=type_hierarchy,
- deployment=deployment_storage.deployment.list()[0]
+ service_template=service_instance_storage.service_template.list()[0]
))
if is_valid:
- assert node.deployment == deployment_storage.deployment.list()[0]
+ assert node_template.service_template == \
+ service_instance_storage.service_template.list()[0]
-class TestRelationship(object):
- @pytest.mark.parametrize(
- 'is_valid, source_interfaces, source_operations, target_interfaces, target_operations, '
- 'type, type_hierarchy, properties',
- [
- (False, m_cls, {}, {}, {}, 'type', [], {}),
- (False, {}, m_cls, {}, {}, 'type', [], {}),
- (False, {}, {}, m_cls, {}, 'type', [], {}),
- (False, {}, {}, {}, m_cls, 'type', [], {}),
- (False, {}, {}, {}, {}, m_cls, [], {}),
- (False, {}, {}, {}, {}, 'type', m_cls, {}),
- (False, {}, {}, {}, {}, 'type', [], m_cls),
-
- (True, {}, {}, {}, {}, 'type', [], {}),
- (True, None, {}, {}, {}, 'type', [], {}),
- (True, {}, {}, None, {}, 'type', [], {}),
- (True, {}, {}, {}, {}, 'type', None, {}),
- (True, {}, {}, {}, {}, 'type', [], None),
- ]
- )
- def test_relationship_model_ceration(self, nodes_storage, is_valid, source_interfaces,
- source_operations, target_interfaces, target_operations,
- type, type_hierarchy, properties):
- relationship = _test_model(
- is_valid=is_valid,
- storage=nodes_storage,
- model_name='relationship',
- model_cls=Relationship,
- model_kwargs=dict(
- source_node=nodes_storage.node.list()[1],
- target_node=nodes_storage.node.list()[0],
- source_interfaces=source_interfaces,
- source_operations=source_operations,
- target_interfaces=target_interfaces,
- target_operations=target_operations,
- type=type,
- type_hierarchy=type_hierarchy,
- properties=properties,
- ))
- if is_valid:
- assert relationship.source_node == nodes_storage.node.list()[1]
- assert relationship.target_node == nodes_storage.node.list()[0]
-
-
-class TestNodeInstance(object):
+class TestNode(object):
@pytest.mark.parametrize(
'is_valid, name, runtime_properties, scaling_groups, state, version',
[
@@ -644,94 +574,90 @@ class TestNodeInstance(object):
(True, 'name', {}, [], 'state', None),
]
)
- def test_node_instance_model_creation(self, node_storage, is_valid, name, runtime_properties,
- scaling_groups, state, version):
+ def test_node_instance_model_creation(self, node_template_storage, is_valid, name,
+ runtime_properties, scaling_groups, state, version):
node_instance = _test_model(
is_valid=is_valid,
- storage=node_storage,
- model_name='node_instance',
- model_cls=NodeInstance,
+ storage=node_template_storage,
+ model_cls=Node,
model_kwargs=dict(
- node=node_storage.node.list()[0],
+ node_template=node_template_storage.node_template.list()[0],
name=name,
runtime_properties=runtime_properties,
scaling_groups=scaling_groups,
state=state,
version=version,
+ service_instance=node_template_storage.service_instance.list()[0]
))
if is_valid:
- assert node_instance.node == node_storage.node.list()[0]
- assert node_instance.deployment == node_storage.deployment.list()[0]
+ assert node_instance.node_template == node_template_storage.node_template.list()[0]
+ assert node_instance.service_instance == \
+ node_template_storage.service_instance.list()[0]
class TestNodeInstanceIP(object):
ip = '1.1.1.1'
- def test_ip_on_none_hosted_node_instance(self, deployment_storage):
- node = self._node(deployment_storage, ip='not considered')
- node_instance = self._node_instance(deployment_storage, node,
- is_host=False,
- ip='not considered')
- assert node_instance.ip is None
-
- def test_property_ip_on_host_node_instance(self, deployment_storage):
- node = self._node(deployment_storage, ip=self.ip)
- node_instance = self._node_instance(deployment_storage, node,
- is_host=True,
- ip=None)
- assert node_instance.ip == self.ip
-
- def test_runtime_property_ip_on_host_node_instance(self, deployment_storage):
- node = self._node(deployment_storage, ip='not considered')
- node_instance = self._node_instance(deployment_storage, node,
- is_host=True,
- ip=self.ip)
- assert node_instance.ip == self.ip
-
- def test_no_ip_configured_on_host_node_instance(self, deployment_storage):
- node = self._node(deployment_storage, ip=None)
- node_instance = self._node_instance(deployment_storage, node,
- is_host=True,
- ip=None)
- assert node_instance.ip is None
-
- def test_runtime_property_on_hosted_node_instance(self, deployment_storage):
- host_node = self._node(deployment_storage, ip=None)
- host_node_instance = self._node_instance(deployment_storage, host_node,
- is_host=True,
- ip=self.ip)
- node = self._node(deployment_storage, ip=None)
- node_instance = self._node_instance(deployment_storage, node,
- is_host=False,
- ip=None,
- host_fk=host_node_instance.id)
- assert node_instance.ip == self.ip
-
- def _node(self, storage, ip):
+ def test_ip_on_none_hosted_node_instance(self, service_instance_storage):
+ node_template = self._node_template(service_instance_storage, ip='not considered')
+ node = self._node(service_instance_storage,
+ node_template,
+ is_host=False,
+ ip='not considered')
+ assert node.ip is None
+
+ def test_property_ip_on_host_node_instance(self, service_instance_storage):
+ node_template = self._node_template(service_instance_storage, ip=self.ip)
+ node = self._node(service_instance_storage, node_template, is_host=True, ip=None)
+ assert node.ip == self.ip
+
+ def test_runtime_property_ip_on_host_node_instance(self, service_instance_storage):
+ node_template = self._node_template(service_instance_storage, ip='not considered')
+ node = self._node(service_instance_storage, node_template, is_host=True, ip=self.ip)
+ assert node.ip == self.ip
+
+ def test_no_ip_configured_on_host_node_instance(self, service_instance_storage):
+ node_template = self._node_template(service_instance_storage, ip=None)
+ node = self._node(service_instance_storage, node_template, is_host=True, ip=None)
+ assert node.ip is None
+
+ def test_runtime_property_on_hosted_node_instance(self, service_instance_storage):
+ host_node_template = self._node_template(service_instance_storage, ip=None)
+ host_node = self._node(service_instance_storage,
+ host_node_template,
+ is_host=True,
+ ip=self.ip)
+ node_template = self._node_template(service_instance_storage, ip=None)
+ node = self._node(service_instance_storage,
+ node_template,
+ is_host=False,
+ ip=None,
+ host_fk=host_node.id)
+ assert node.ip == self.ip
+
+ def _node_template(self, storage, ip):
kwargs = dict(
- name='node',
- deploy_number_of_instances=1,
- max_number_of_instances=1,
- min_number_of_instances=1,
- number_of_instances=1,
- planned_number_of_instances=1,
- properties={},
- type='',
- deployment=storage.deployment.list()[0]
+ name='node_template',
+ default_instances=1,
+ max_instances=1,
+ min_instances=1,
+ type_name='',
+ service_template=storage.service_template.list()[0]
)
if ip:
- kwargs['properties']['ip'] = ip
- node = Node(**kwargs)
- storage.node.put(node)
+ kwargs['properties'] = [Parameter(name='ip', type='str', str_value=str(ip))]
+ node = NodeTemplate(**kwargs)
+ storage.node_template.put(node)
return node
- def _node_instance(self, storage, node, is_host, ip, host_fk=None):
+ def _node(self, storage, node, is_host, ip, host_fk=None):
kwargs = dict(
- name='node_instance',
- node=node,
+ name='node',
+ node_template=node,
runtime_properties={},
- state=''
+ state='',
+ service_instance=storage.service_instance.list()[0]
)
if ip:
kwargs['runtime_properties']['ip'] = ip
@@ -739,27 +665,26 @@ class TestNodeInstanceIP(object):
kwargs['host_fk'] = 1
elif host_fk:
kwargs['host_fk'] = host_fk
- node_instance = NodeInstance(**kwargs)
- storage.node_instance.put(node_instance)
+ node_instance = Node(**kwargs)
+ storage.node.put(node_instance)
return node_instance
+@pytest.mark.skip('Should be reworked into relationship')
class TestRelationshipInstance(object):
- def test_relatiship_instance_model_creation(self, node_instances_storage):
+ def test_relatiship_instance_model_creation(self, nodes_storage):
relationship = mock.models.get_relationship(
- source=node_instances_storage.node.get_by_name(mock.models.DEPENDENT_NODE_NAME),
- target=node_instances_storage.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+ target=nodes_storage.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
)
- node_instances_storage.relationship.put(relationship)
- node_instances = node_instances_storage.node_instance
+ nodes_storage.relationship.put(relationship)
+ node_instances = nodes_storage.node
source_node_instance = node_instances.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
target_node_instance = node_instances.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
relationship_instance = _test_model(
is_valid=True,
- storage=node_instances_storage,
- model_name='relationship_instance',
- model_cls=RelationshipInstance,
+ storage=nodes_storage,
+ model_cls=Relationship,
model_kwargs=dict(
relationship=relationship,
source_node_instance=source_node_instance,
@@ -827,7 +752,6 @@ class TestPlugin(object):
supported_platform, supported_py_versions, uploaded_at, wheels):
_test_model(is_valid=is_valid,
storage=empty_storage,
- model_name='plugin',
model_cls=Plugin,
model_kwargs=dict(
archive_name=archive_name,
@@ -881,7 +805,6 @@ class TestTask(object):
task = _test_model(
is_valid=is_valid,
storage=execution_storage,
- model_name='task',
model_cls=Task,
model_kwargs=dict(
status=status,
@@ -894,7 +817,7 @@ class TestTask(object):
retry_interval=retry_interval,
ignore_failure=ignore_failure,
name=name,
- operation_mapping=operation_mapping,
+ implementation=operation_mapping,
inputs=inputs,
plugin_fk=plugin_id,
))
@@ -907,7 +830,7 @@ class TestTask(object):
def create_task(max_attempts):
Task(execution_fk='eid',
name='name',
- operation_mapping='',
+ implementation='',
inputs={},
max_attempts=max_attempts)
create_task(max_attempts=1)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b6193359/tests/storage/test_structures.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_structures.py b/tests/storage/test_structures.py
index 4127905..30f0064 100644
--- a/tests/storage/test_structures.py
+++ b/tests/storage/test_structures.py
@@ -20,23 +20,23 @@ import sqlalchemy
from aria.storage import (
ModelStorage,
sql_mapi,
- model,
+ exceptions,
type,
- exceptions
+ modeling,
)
from ..storage import release_sqlite_storage, structure, init_inmemory_model_storage
from . import MockModel
from ..mock import (
models,
- operations,
context as mock_context
)
@pytest.fixture
def storage():
- base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI, initiator=init_inmemory_model_storage)
+ base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
+ initiator=init_inmemory_model_storage)
base_storage.register(MockModel)
yield base_storage
release_sqlite_storage(base_storage)
@@ -44,7 +44,7 @@ def storage():
@pytest.fixture(scope='module', autouse=True)
def module_cleanup():
- model.DeclarativeBase.metadata.remove(MockModel.__table__) #pylint: disable=no-member
+ modeling.model.aria_declarative_base.metadata.remove(MockModel.__table__) #pylint: disable=no-member
@pytest.fixture
@@ -89,144 +89,87 @@ def test_inner_list_update(storage):
def test_model_to_dict(context):
- deployment = context.deployment
- deployment_dict = deployment.to_dict()
+ service_instance = context.service_instance
+ service_instance = service_instance.to_dict()
expected_keys = [
- 'created_at',
'description',
- 'inputs',
- 'groups',
+ '_metadata',
+ 'created_at',
'permalink',
'policy_triggers',
'policy_types',
- 'outputs',
'scaling_groups',
'updated_at',
'workflows',
- 'blueprint_name',
]
for expected_key in expected_keys:
- assert expected_key in deployment_dict
-
- assert 'blueprint_fk' not in deployment_dict
+ assert expected_key in service_instance
def test_relationship_model_ordering(context):
- deployment = context.model.deployment.get_by_name(models.DEPLOYMENT_NAME)
- source_node = context.model.node.get_by_name(models.DEPENDENT_NODE_NAME)
- source_node_instance = context.model.node_instance.get_by_name(
- models.DEPENDENT_NODE_INSTANCE_NAME)
- target_node = context.model.node.get_by_name(models.DEPENDENCY_NODE_NAME)
- target_node_instance = context.model.node_instance.get_by_name(
- models.DEPENDENCY_NODE_INSTANCE_NAME)
- new_node = model.Node(
+ service_instance = context.model.service_instance.get_by_name(models.DEPLOYMENT_NAME)
+ source_node = context.model.node.get_by_name(models.DEPENDENT_NODE_INSTANCE_NAME)
+ target_node = context.model.node.get_by_name(models.DEPENDENCY_NODE_INSTANCE_NAME)
+ new_node_template = modeling.model.NodeTemplate(
name='new_node',
- type='test_node_type',
- type_hierarchy=[],
- number_of_instances=1,
- planned_number_of_instances=1,
- deploy_number_of_instances=1,
- properties={},
- operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
- min_number_of_instances=1,
- max_number_of_instances=1,
- deployment=deployment
- )
- source_to_new_relationship = model.Relationship(
- source_node=source_node,
- target_node=new_node,
- source_interfaces={},
- source_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS),
- target_interfaces={},
- target_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS),
- type='rel_type',
+ type_name='test_node_type',
type_hierarchy=[],
- properties={},
+ default_instances=1,
+ min_instances=1,
+ max_instances=1,
+ service_template=service_instance.service_template
)
- new_node_instance = model.NodeInstance(
+ new_node = modeling.model.Node(
name='new_node_instance',
runtime_properties={},
+ service_instance=service_instance,
version=None,
- node=new_node,
+ node_template=new_node_template,
state='',
scaling_groups=[]
)
- source_to_new_relationship_instance = model.RelationshipInstance(
- relationship=source_to_new_relationship,
- source_node_instance=source_node_instance,
- target_node_instance=new_node_instance,
+
+ source_to_new_relationship = modeling.model.Relationship(
+ target_node=new_node,
+ source_node=source_node,
)
- new_to_target_relationship = model.Relationship(
+ new_to_target_relationship = modeling.model.Relationship(
source_node=new_node,
target_node=target_node,
- source_interfaces={},
- source_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS),
- target_interfaces={},
- target_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS),
- type='rel_type',
- type_hierarchy=[],
- properties={},
- )
- new_to_target_relationship_instance = model.RelationshipInstance(
- relationship=new_to_target_relationship,
- source_node_instance=new_node_instance,
- target_node_instance=target_node_instance,
)
+ context.model.node_template.put(new_node_template)
context.model.node.put(new_node)
- context.model.node_instance.put(new_node_instance)
context.model.relationship.put(source_to_new_relationship)
context.model.relationship.put(new_to_target_relationship)
- context.model.relationship_instance.put(source_to_new_relationship_instance)
- context.model.relationship_instance.put(new_to_target_relationship_instance)
- def flip_and_assert(node_instance, direction):
+ def flip_and_assert(node, direction):
"""
Reversed the order of relationships and assert effects took place.
- :param node_instance: the node instance to operatate on
+ :param node: the node instance to operatate on
:param direction: the type of relationships to flip (inbound/outbount)
:return:
"""
assert direction in ('inbound', 'outbound')
- relationships = getattr(node_instance.node, direction + '_relationships')
- relationship_instances = getattr(node_instance, direction + '_relationship_instances')
+ relationships = getattr(node, direction + '_relationships').all()
assert len(relationships) == 2
- assert len(relationship_instances) == 2
-
- first_rel, second_rel = relationships
- first_rel_instance, second_rel_instance = relationship_instances
- assert getattr(first_rel, relationships.ordering_attr) == 0
- assert getattr(second_rel, relationships.ordering_attr) == 1
- assert getattr(first_rel_instance, relationship_instances.ordering_attr) == 0
- assert getattr(second_rel_instance, relationship_instances.ordering_attr) == 1
-
- reversed_relationships = list(reversed(relationships))
- reversed_relationship_instances = list(reversed(relationship_instances))
-
- assert relationships != reversed_relationships
- assert relationship_instances != reversed_relationship_instances
-
- relationships[:] = reversed_relationships
- relationship_instances[:] = reversed_relationship_instances
- context.model.node_instance.update(node_instance)
- assert relationships == reversed_relationships
- assert relationship_instances == reversed_relationship_instances
+ reversed_relationship_instances = list(reversed(relationships))
+ assert relationships != reversed_relationship_instances
- assert getattr(first_rel, relationships.ordering_attr) == 1
- assert getattr(second_rel, relationships.ordering_attr) == 0
- assert getattr(first_rel_instance, relationship_instances.ordering_attr) == 1
- assert getattr(second_rel_instance, relationship_instances.ordering_attr) == 0
+ relationships[:] = reversed_relationship_instances
+ context.model.node.update(node)
+ assert relationships == reversed_relationship_instances
- flip_and_assert(source_node_instance, 'outbound')
- flip_and_assert(target_node_instance, 'inbound')
+ flip_and_assert(source_node, 'outbound')
+ flip_and_assert(target_node, 'inbound')
-class StrictClass(model.DeclarativeBase, structure.ModelMixin):
+class StrictClass(modeling.model.aria_declarative_base, structure.ModelMixin):
__tablename__ = 'strict_class'
strict_dict = sqlalchemy.Column(type.StrictDict(basestring, basestring))