You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by av...@apache.org on 2017/05/09 13:48:00 UTC
[3/4] incubator-ariatosca git commit: ARIA-160 Tests fail spordically
over parameter binding
ARIA-160 Tests fail spordically over parameter binding
Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/16fcca45
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/16fcca45
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/16fcca45
Branch: refs/heads/ARIA-210-handle-relative-paths-in-cli-service-templates
Commit: 16fcca45f60f81261757c9fee8f61782c479e30e
Parents: 1febf80
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 14:46:58 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 14:07:24 2017 +0300
----------------------------------------------------------------------
aria/orchestrator/workflows/core/engine.py | 3 +-
tests/helpers.py | 23 +++--
tests/orchestrator/context/test_operation.py | 107 ++++++++++++----------
tests/orchestrator/context/test_toolbelt.py | 40 ++++----
tests/storage/__init__.py | 3 +-
5 files changed, 92 insertions(+), 84 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index fd0dd6d..561265c 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -42,10 +42,9 @@ class Engine(logger.LoggerMixin):
super(Engine, self).__init__(**kwargs)
self._workflow_context = workflow_context
self._execution_graph = networkx.DiGraph()
- self._executor = executor
translation.build_execution_graph(task_graph=tasks_graph,
execution_graph=self._execution_graph,
- default_executor=self._executor)
+ default_executor=executor)
def execute(self):
"""
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/helpers.py
----------------------------------------------------------------------
diff --git a/tests/helpers.py b/tests/helpers.py
index 423e63f..3c3efc9 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -14,7 +14,6 @@
# limitations under the License.
import os
-import tempfile
import json
from . import ROOT_DIR
@@ -34,16 +33,19 @@ def get_service_template_uri(*args):
class FilesystemDataHolder(object):
- _tmpfile = tempfile.NamedTemporaryFile('w')
+
+ def __init__(self, path, reset=False):
+ self._path = path
+ if reset or not os.path.exists(self._path) or open(self._path).read() == '':
+ self._dump({})
def _load(self):
- return json.load(open(self._tmpfile.name))
+ with open(self._path) as f:
+ return json.load(f)
def _dump(self, value):
- return json.dump(value, open(self._tmpfile.name, 'w'))
-
- def __init__(self):
- self.clear()
+ with open(self._path, 'w') as f:
+ return json.dump(value, f)
def __setitem__(self, key, value):
dict_ = self._load()
@@ -56,9 +58,6 @@ class FilesystemDataHolder(object):
def __iter__(self):
return iter(self._load())
- def clear(self):
- self._dump({})
-
def get(self, item, default=None):
return self._load().get(item, default)
@@ -67,3 +66,7 @@ class FilesystemDataHolder(object):
return_value = dict_.setdefault(key, value)
self._dump(dict_)
return return_value
+
+ @property
+ def path(self):
+ return self._path
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 3180d89..cdeb5fa 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -38,7 +38,6 @@ from . import (
execute,
)
-global_test_holder = helpers.FilesystemDataHolder()
@pytest.fixture
def ctx(tmpdir):
@@ -68,11 +67,18 @@ def thread_executor():
ex.close()
-def test_node_operation_task_execution(ctx, thread_executor):
+@pytest.fixture
+def dataholder(tmpdir):
+ dataholder_path = str(tmpdir.join('dataholder'))
+ holder = helpers.FilesystemDataHolder(dataholder_path)
+ return holder
+
+
+def test_node_operation_task_execution(ctx, thread_executor, dataholder):
interface_name = 'Standard'
operation_name = 'create'
- inputs = {'putput': True}
+ inputs = {'putput': True, 'holder_path': dataholder.path}
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
interface = mock.models.create_interface(
node.service,
@@ -97,11 +103,11 @@ def test_node_operation_task_execution(ctx, thread_executor):
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
- assert global_test_holder['ctx_name'] == context.operation.NodeOperationContext.__name__
+ assert dataholder['ctx_name'] == context.operation.NodeOperationContext.__name__
# Task bases assertions
- assert global_test_holder['actor_name'] == node.name
- assert global_test_holder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
+ assert dataholder['actor_name'] == node.name
+ assert dataholder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
type='node',
name=node.name,
interface=interface_name,
@@ -109,19 +115,19 @@ def test_node_operation_task_execution(ctx, thread_executor):
)
operations = interface.operations
assert len(operations) == 1
- assert global_test_holder['implementation'] == operations.values()[0].implementation # pylint: disable=no-member
- assert global_test_holder['inputs']['putput'] is True
+ assert dataholder['implementation'] == operations.values()[0].implementation # pylint: disable=no-member
+ assert dataholder['inputs']['putput'] is True
# Context based attributes (sugaring)
- assert global_test_holder['template_name'] == node.node_template.name
- assert global_test_holder['node_name'] == node.name
+ assert dataholder['template_name'] == node.node_template.name
+ assert dataholder['node_name'] == node.name
-def test_relationship_operation_task_execution(ctx, thread_executor):
+def test_relationship_operation_task_execution(ctx, thread_executor, dataholder):
interface_name = 'Configure'
operation_name = 'post_configure'
- inputs = {'putput': True}
+ inputs = {'putput': True, 'holder_path': dataholder.path}
relationship = ctx.model.relationship.list()[0]
interface = mock.models.create_interface(
relationship.source_node.service,
@@ -148,14 +154,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
- assert global_test_holder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
+ assert dataholder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
# Task bases assertions
- assert global_test_holder['actor_name'] == relationship.name
- assert interface_name in global_test_holder['task_name']
+ assert dataholder['actor_name'] == relationship.name
+ assert interface_name in dataholder['task_name']
operations = interface.operations
- assert global_test_holder['implementation'] == operations.values()[0].implementation # pylint: disable=no-member
- assert global_test_holder['inputs']['putput'] is True
+ assert dataholder['implementation'] == operations.values()[0].implementation # pylint: disable=no-member
+ assert dataholder['inputs']['putput'] is True
# Context based attributes (sugaring)
dependency_node_template = ctx.model.node_template.get_by_name(
@@ -165,14 +171,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
- assert global_test_holder['target_node_template_name'] == dependency_node_template.name
- assert global_test_holder['target_node_name'] == dependency_node.name
- assert global_test_holder['relationship_name'] == relationship.name
- assert global_test_holder['source_node_template_name'] == dependent_node_template.name
- assert global_test_holder['source_node_name'] == dependent_node.name
+ assert dataholder['target_node_template_name'] == dependency_node_template.name
+ assert dataholder['target_node_name'] == dependency_node.name
+ assert dataholder['relationship_name'] == relationship.name
+ assert dataholder['source_node_template_name'] == dependent_node_template.name
+ assert dataholder['source_node_name'] == dependent_node.name
-def test_invalid_task_operation_id(ctx, thread_executor):
+def test_invalid_task_operation_id(ctx, thread_executor, dataholder):
"""
Checks that the right id is used. The task created with id == 1, thus running the task on
node with id == 2. will check that indeed the node uses the correct id.
@@ -191,7 +197,8 @@ def test_invalid_task_operation_id(ctx, thread_executor):
node.service,
interface_name=interface_name,
operation_name=operation_name,
- operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__))
+ operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__),
+ inputs={'holder_path': dataholder.path})
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
@@ -202,12 +209,13 @@ def test_invalid_task_operation_id(ctx, thread_executor):
api.task.OperationTask(
node,
interface_name=interface_name,
- operation_name=operation_name)
+ operation_name=operation_name,
+ )
)
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
- op_node_id = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
+ op_node_id = dataholder[api.task.OperationTask.NAME_FORMAT.format(
type='node',
name=node.name,
interface=interface_name,
@@ -376,42 +384,41 @@ def logged_operation(ctx, **_):
@operation
-def basic_node_operation(ctx, **_):
- operation_common(ctx)
- global_test_holder['template_name'] = ctx.node_template.name
- global_test_holder['node_name'] = ctx.node.name
+def basic_node_operation(ctx, holder_path, **_):
+ holder = helpers.FilesystemDataHolder(holder_path)
+
+ operation_common(ctx, holder)
+ holder['template_name'] = ctx.node_template.name
+ holder['node_name'] = ctx.node.name
@operation
-def basic_relationship_operation(ctx, **_):
- operation_common(ctx)
- global_test_holder['target_node_template_name'] = ctx.target_node_template.name
- global_test_holder['target_node_name'] = ctx.target_node.name
- global_test_holder['relationship_name'] = ctx.relationship.name
- global_test_holder['source_node_template_name'] = ctx.source_node_template.name
- global_test_holder['source_node_name'] = ctx.source_node.name
+def basic_relationship_operation(ctx, holder_path, **_):
+ holder = helpers.FilesystemDataHolder(holder_path)
+
+ operation_common(ctx, holder)
+ holder['target_node_template_name'] = ctx.target_node_template.name
+ holder['target_node_name'] = ctx.target_node.name
+ holder['relationship_name'] = ctx.relationship.name
+ holder['source_node_template_name'] = ctx.source_node_template.name
+ holder['source_node_name'] = ctx.source_node.name
-def operation_common(ctx):
- global_test_holder['ctx_name'] = ctx.__class__.__name__
+def operation_common(ctx, holder):
+ holder['ctx_name'] = ctx.__class__.__name__
- global_test_holder['actor_name'] = ctx.task.actor.name
- global_test_holder['task_name'] = ctx.task.name
- global_test_holder['implementation'] = ctx.task.implementation
- global_test_holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
+ holder['actor_name'] = ctx.task.actor.name
+ holder['task_name'] = ctx.task.name
+ holder['implementation'] = ctx.task.implementation
+ holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
@operation
-def get_node_id(ctx, **_):
- global_test_holder[ctx.name] = ctx.node.id
+def get_node_id(ctx, holder_path, **_):
+ helpers.FilesystemDataHolder(holder_path)[ctx.name] = ctx.node.id
@operation
def _test_plugin_workdir(ctx, filename, content):
with open(os.path.join(ctx.plugin_workdir, filename), 'w') as f:
f.write(content)
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
- global_test_holder.clear()
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 822ac5a..d199954 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -30,12 +30,10 @@ from . import (
execute,
)
-global_test_holder = helpers.FilesystemDataHolder()
-
@pytest.fixture
def workflow_context(tmpdir):
- context = mock.context.simple(str(tmpdir), inmemory=True)
+ context = mock.context.simple(str(tmpdir))
yield context
storage.release_sqlite_storage(context.model)
@@ -49,6 +47,13 @@ def executor():
result.close()
+@pytest.fixture
+def dataholder(tmpdir):
+ dataholder_path = str(tmpdir.join('dataholder'))
+ holder = helpers.FilesystemDataHolder(dataholder_path)
+ return holder
+
+
def _get_elements(workflow_context):
dependency_node_template = workflow_context.model.node_template.get_by_name(
mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
@@ -75,17 +80,17 @@ def _get_elements(workflow_context):
relationship
-def test_host_ip(workflow_context, executor):
+def test_host_ip(workflow_context, executor, dataholder):
+
interface_name = 'Standard'
operation_name = 'create'
_, dependency_node, _, _, _ = _get_elements(workflow_context)
- inputs = {'putput': True}
+ inputs = {'putput': True, 'holder_path': dataholder.path}
interface = mock.models.create_interface(
dependency_node.service,
interface_name=interface_name,
operation_name=operation_name,
- operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__),
- inputs=inputs)
+ operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), inputs=inputs)
)
dependency_node.interfaces[interface.name] = interface
dependency_node.runtime_properties['ip'] = '1.1.1.1'
@@ -105,14 +110,14 @@ def test_host_ip(workflow_context, executor):
execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
- assert global_test_holder.get('host_ip') == dependency_node.runtime_properties.get('ip')
+ assert dataholder.get('host_ip') == dependency_node.runtime_properties.get('ip')
-def test_relationship_tool_belt(workflow_context, executor):
+def test_relationship_tool_belt(workflow_context, executor, dataholder):
interface_name = 'Configure'
operation_name = 'post_configure'
_, _, _, _, relationship = _get_elements(workflow_context)
- inputs = {'putput': True}
+ inputs = {'putput': True, 'holder_path': dataholder.path}
interface = mock.models.create_interface(
relationship.source_node.service,
interface_name=interface_name,
@@ -136,7 +141,7 @@ def test_relationship_tool_belt(workflow_context, executor):
execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
- assert global_test_holder.get(api.task.OperationTask.NAME_FORMAT.format(
+ assert dataholder.get(api.task.OperationTask.NAME_FORMAT.format(
type='relationship',
name=relationship.name,
interface=interface_name,
@@ -149,15 +154,10 @@ def test_wrong_model_toolbelt():
@operation(toolbelt=True)
-def host_ip(toolbelt, **_):
- global_test_holder['host_ip'] = toolbelt.host_ip
+def host_ip(toolbelt, holder_path, **_):
+ helpers.FilesystemDataHolder(holder_path)['host_ip'] = toolbelt.host_ip
@operation(toolbelt=True)
-def relationship_operation(ctx, toolbelt, **_):
- global_test_holder[ctx.name] = toolbelt._op_context.source_node.name
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
- global_test_holder.clear()
+def relationship_operation(ctx, toolbelt, holder_path, **_):
+ helpers.FilesystemDataHolder(holder_path)[ctx.name] = toolbelt._op_context.source_node.name
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 66424db..8ca1480 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -51,6 +51,5 @@ def init_inmemory_model_storage():
engine = create_engine(uri, **engine_kwargs)
session_factory = orm.sessionmaker(bind=engine)
- session = orm.scoped_session(session_factory=session_factory)
- return dict(engine=engine, session=session)
+ return dict(engine=engine, session=session_factory())