You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by em...@apache.org on 2017/05/09 20:37:50 UTC

[1/9] incubator-ariatosca git commit: ARIA-156 Better handle exceptions in the process executor [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-140-version-utils 658da4eb7 -> eae44d0b0 (forced update)


ARIA-156 Better handle exceptions in the process executor

Previously, if an exception was raised during the starting of a task,
the task's process was permanently blocked on receiving a message.

The reason was that the exception caused the 'listener thread' to
not send a response to the task's process, as the exception was not
handled inside the 'with' block of the listener thread.

The first change I introduced was to wrap the yielding of the message and
the response inside a try-except-finally block, so the exception will be
handled within the 'with' scope, and to ensure a response is sent to the
task's process.

The second change is to move the sending of the 'task started' message in
the task's process to a place where encountering an exception will be
handled via sending a 'task failed' message back to the listener thread.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/1cb3086f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/1cb3086f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/1cb3086f

Branch: refs/heads/ARIA-140-version-utils
Commit: 1cb3086f38bc35a6a5e588aa5e340cb4fa5cacf5
Parents: 8553977
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Sun May 7 11:42:58 2017 +0300
Committer: Avia Efrat <av...@gigaspaces.com>
Committed: Sun May 7 14:44:01 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/executor/process.py | 11 ++++++++---
 1 file changed, 8 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1cb3086f/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 2378e0a..8481406 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -213,8 +213,13 @@ class ProcessExecutor(base.BaseExecutor):
         with contextlib.closing(self._server_socket.accept()[0]) as connection:
             message = _recv_message(connection)
             response = {}
-            yield message, response
-            _send_message(connection, response)
+            try:
+                yield message, response
+            except BaseException as e:
+                response['exception'] = exceptions.wrap_if_needed(e)
+                raise
+            finally:
+                _send_message(connection, response)
 
     def _handle_task_started_request(self, task_id, **kwargs):
         self._task_started(self._tasks[task_id])
@@ -378,7 +383,6 @@ def _main():
     task_id = arguments['task_id']
     port = arguments['port']
     messenger = _Messenger(task_id=task_id, port=port)
-    messenger.started()
 
     implementation = arguments['implementation']
     operation_inputs = arguments['operation_inputs']
@@ -390,6 +394,7 @@ def _main():
 
     with instrumentation.track_changes() as instrument:
         try:
+            messenger.started()
             ctx = context_dict['context_cls'].deserialize_from_dict(**context_dict['context'])
             _patch_session(ctx=ctx, messenger=messenger, instrument=instrument)
             task_func = imports.load_attribute(implementation)


[7/9] incubator-ariatosca git commit: ARIA-160 Tests fail spordically over parameter binding

Posted by em...@apache.org.
ARIA-160 Tests fail spordically over parameter binding


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/16fcca45
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/16fcca45
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/16fcca45

Branch: refs/heads/ARIA-140-version-utils
Commit: 16fcca45f60f81261757c9fee8f61782c479e30e
Parents: 1febf80
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 14:46:58 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 14:07:24 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/core/engine.py   |   3 +-
 tests/helpers.py                             |  23 +++--
 tests/orchestrator/context/test_operation.py | 107 ++++++++++++----------
 tests/orchestrator/context/test_toolbelt.py  |  40 ++++----
 tests/storage/__init__.py                    |   3 +-
 5 files changed, 92 insertions(+), 84 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index fd0dd6d..561265c 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -42,10 +42,9 @@ class Engine(logger.LoggerMixin):
         super(Engine, self).__init__(**kwargs)
         self._workflow_context = workflow_context
         self._execution_graph = networkx.DiGraph()
-        self._executor = executor
         translation.build_execution_graph(task_graph=tasks_graph,
                                           execution_graph=self._execution_graph,
-                                          default_executor=self._executor)
+                                          default_executor=executor)
 
     def execute(self):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/helpers.py
----------------------------------------------------------------------
diff --git a/tests/helpers.py b/tests/helpers.py
index 423e63f..3c3efc9 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -14,7 +14,6 @@
 # limitations under the License.
 
 import os
-import tempfile
 import json
 
 from . import ROOT_DIR
@@ -34,16 +33,19 @@ def get_service_template_uri(*args):
 
 
 class FilesystemDataHolder(object):
-    _tmpfile = tempfile.NamedTemporaryFile('w')
+
+    def __init__(self, path, reset=False):
+        self._path = path
+        if reset or not os.path.exists(self._path) or open(self._path).read() == '':
+            self._dump({})
 
     def _load(self):
-        return json.load(open(self._tmpfile.name))
+        with open(self._path) as f:
+            return json.load(f)
 
     def _dump(self, value):
-        return json.dump(value, open(self._tmpfile.name, 'w'))
-
-    def __init__(self):
-        self.clear()
+        with open(self._path, 'w') as f:
+            return json.dump(value, f)
 
     def __setitem__(self, key, value):
         dict_ = self._load()
@@ -56,9 +58,6 @@ class FilesystemDataHolder(object):
     def __iter__(self):
         return iter(self._load())
 
-    def clear(self):
-        self._dump({})
-
     def get(self, item, default=None):
         return self._load().get(item, default)
 
@@ -67,3 +66,7 @@ class FilesystemDataHolder(object):
         return_value = dict_.setdefault(key, value)
         self._dump(dict_)
         return return_value
+
+    @property
+    def path(self):
+        return self._path

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 3180d89..cdeb5fa 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -38,7 +38,6 @@ from . import (
     execute,
 )
 
-global_test_holder = helpers.FilesystemDataHolder()
 
 @pytest.fixture
 def ctx(tmpdir):
@@ -68,11 +67,18 @@ def thread_executor():
         ex.close()
 
 
-def test_node_operation_task_execution(ctx, thread_executor):
+@pytest.fixture
+def dataholder(tmpdir):
+    dataholder_path = str(tmpdir.join('dataholder'))
+    holder = helpers.FilesystemDataHolder(dataholder_path)
+    return holder
+
+
+def test_node_operation_task_execution(ctx, thread_executor, dataholder):
     interface_name = 'Standard'
     operation_name = 'create'
 
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
@@ -97,11 +103,11 @@ def test_node_operation_task_execution(ctx, thread_executor):
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    assert global_test_holder['ctx_name'] == context.operation.NodeOperationContext.__name__
+    assert dataholder['ctx_name'] == context.operation.NodeOperationContext.__name__
 
     # Task bases assertions
-    assert global_test_holder['actor_name'] == node.name
-    assert global_test_holder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
+    assert dataholder['actor_name'] == node.name
+    assert dataholder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
         type='node',
         name=node.name,
         interface=interface_name,
@@ -109,19 +115,19 @@ def test_node_operation_task_execution(ctx, thread_executor):
     )
     operations = interface.operations
     assert len(operations) == 1
-    assert global_test_holder['implementation'] == operations.values()[0].implementation             # pylint: disable=no-member
-    assert global_test_holder['inputs']['putput'] is True
+    assert dataholder['implementation'] == operations.values()[0].implementation             # pylint: disable=no-member
+    assert dataholder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
-    assert global_test_holder['template_name'] == node.node_template.name
-    assert global_test_holder['node_name'] == node.name
+    assert dataholder['template_name'] == node.node_template.name
+    assert dataholder['node_name'] == node.name
 
 
-def test_relationship_operation_task_execution(ctx, thread_executor):
+def test_relationship_operation_task_execution(ctx, thread_executor, dataholder):
     interface_name = 'Configure'
     operation_name = 'post_configure'
 
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     relationship = ctx.model.relationship.list()[0]
     interface = mock.models.create_interface(
         relationship.source_node.service,
@@ -148,14 +154,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    assert global_test_holder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
+    assert dataholder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
 
     # Task bases assertions
-    assert global_test_holder['actor_name'] == relationship.name
-    assert interface_name in global_test_holder['task_name']
+    assert dataholder['actor_name'] == relationship.name
+    assert interface_name in dataholder['task_name']
     operations = interface.operations
-    assert global_test_holder['implementation'] == operations.values()[0].implementation           # pylint: disable=no-member
-    assert global_test_holder['inputs']['putput'] is True
+    assert dataholder['implementation'] == operations.values()[0].implementation           # pylint: disable=no-member
+    assert dataholder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
     dependency_node_template = ctx.model.node_template.get_by_name(
@@ -165,14 +171,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
         mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
     dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
 
-    assert global_test_holder['target_node_template_name'] == dependency_node_template.name
-    assert global_test_holder['target_node_name'] == dependency_node.name
-    assert global_test_holder['relationship_name'] == relationship.name
-    assert global_test_holder['source_node_template_name'] == dependent_node_template.name
-    assert global_test_holder['source_node_name'] == dependent_node.name
+    assert dataholder['target_node_template_name'] == dependency_node_template.name
+    assert dataholder['target_node_name'] == dependency_node.name
+    assert dataholder['relationship_name'] == relationship.name
+    assert dataholder['source_node_template_name'] == dependent_node_template.name
+    assert dataholder['source_node_name'] == dependent_node.name
 
 
-def test_invalid_task_operation_id(ctx, thread_executor):
+def test_invalid_task_operation_id(ctx, thread_executor, dataholder):
     """
     Checks that the right id is used. The task created with id == 1, thus running the task on
     node with id == 2. will check that indeed the node uses the correct id.
@@ -191,7 +197,8 @@ def test_invalid_task_operation_id(ctx, thread_executor):
         node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__),
+                              inputs={'holder_path': dataholder.path})
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
@@ -202,12 +209,13 @@ def test_invalid_task_operation_id(ctx, thread_executor):
             api.task.OperationTask(
                 node,
                 interface_name=interface_name,
-                operation_name=operation_name)
+                operation_name=operation_name,
+            )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    op_node_id = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
+    op_node_id = dataholder[api.task.OperationTask.NAME_FORMAT.format(
         type='node',
         name=node.name,
         interface=interface_name,
@@ -376,42 +384,41 @@ def logged_operation(ctx, **_):
 
 
 @operation
-def basic_node_operation(ctx, **_):
-    operation_common(ctx)
-    global_test_holder['template_name'] = ctx.node_template.name
-    global_test_holder['node_name'] = ctx.node.name
+def basic_node_operation(ctx, holder_path, **_):
+    holder = helpers.FilesystemDataHolder(holder_path)
+
+    operation_common(ctx, holder)
+    holder['template_name'] = ctx.node_template.name
+    holder['node_name'] = ctx.node.name
 
 
 @operation
-def basic_relationship_operation(ctx, **_):
-    operation_common(ctx)
-    global_test_holder['target_node_template_name'] = ctx.target_node_template.name
-    global_test_holder['target_node_name'] = ctx.target_node.name
-    global_test_holder['relationship_name'] = ctx.relationship.name
-    global_test_holder['source_node_template_name'] = ctx.source_node_template.name
-    global_test_holder['source_node_name'] = ctx.source_node.name
+def basic_relationship_operation(ctx, holder_path, **_):
+    holder = helpers.FilesystemDataHolder(holder_path)
+
+    operation_common(ctx, holder)
+    holder['target_node_template_name'] = ctx.target_node_template.name
+    holder['target_node_name'] = ctx.target_node.name
+    holder['relationship_name'] = ctx.relationship.name
+    holder['source_node_template_name'] = ctx.source_node_template.name
+    holder['source_node_name'] = ctx.source_node.name
 
 
-def operation_common(ctx):
-    global_test_holder['ctx_name'] = ctx.__class__.__name__
+def operation_common(ctx, holder):
+    holder['ctx_name'] = ctx.__class__.__name__
 
-    global_test_holder['actor_name'] = ctx.task.actor.name
-    global_test_holder['task_name'] = ctx.task.name
-    global_test_holder['implementation'] = ctx.task.implementation
-    global_test_holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
+    holder['actor_name'] = ctx.task.actor.name
+    holder['task_name'] = ctx.task.name
+    holder['implementation'] = ctx.task.implementation
+    holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
 
 
 @operation
-def get_node_id(ctx, **_):
-    global_test_holder[ctx.name] = ctx.node.id
+def get_node_id(ctx, holder_path, **_):
+    helpers.FilesystemDataHolder(holder_path)[ctx.name] = ctx.node.id
 
 
 @operation
 def _test_plugin_workdir(ctx, filename, content):
     with open(os.path.join(ctx.plugin_workdir, filename), 'w') as f:
         f.write(content)
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
-    global_test_holder.clear()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 822ac5a..d199954 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -30,12 +30,10 @@ from . import (
     execute,
 )
 
-global_test_holder = helpers.FilesystemDataHolder()
-
 
 @pytest.fixture
 def workflow_context(tmpdir):
-    context = mock.context.simple(str(tmpdir), inmemory=True)
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 
@@ -49,6 +47,13 @@ def executor():
         result.close()
 
 
+@pytest.fixture
+def dataholder(tmpdir):
+    dataholder_path = str(tmpdir.join('dataholder'))
+    holder = helpers.FilesystemDataHolder(dataholder_path)
+    return holder
+
+
 def _get_elements(workflow_context):
     dependency_node_template = workflow_context.model.node_template.get_by_name(
         mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
@@ -75,17 +80,17 @@ def _get_elements(workflow_context):
         relationship
 
 
-def test_host_ip(workflow_context, executor):
+def test_host_ip(workflow_context, executor, dataholder):
+
     interface_name = 'Standard'
     operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     interface = mock.models.create_interface(
         dependency_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__),
-                              inputs=inputs)
+        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
     dependency_node.runtime_properties['ip'] = '1.1.1.1'
@@ -105,14 +110,14 @@ def test_host_ip(workflow_context, executor):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert global_test_holder.get('host_ip') == dependency_node.runtime_properties.get('ip')
+    assert dataholder.get('host_ip') == dependency_node.runtime_properties.get('ip')
 
 
-def test_relationship_tool_belt(workflow_context, executor):
+def test_relationship_tool_belt(workflow_context, executor, dataholder):
     interface_name = 'Configure'
     operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name=interface_name,
@@ -136,7 +141,7 @@ def test_relationship_tool_belt(workflow_context, executor):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert global_test_holder.get(api.task.OperationTask.NAME_FORMAT.format(
+    assert dataholder.get(api.task.OperationTask.NAME_FORMAT.format(
         type='relationship',
         name=relationship.name,
         interface=interface_name,
@@ -149,15 +154,10 @@ def test_wrong_model_toolbelt():
 
 
 @operation(toolbelt=True)
-def host_ip(toolbelt, **_):
-    global_test_holder['host_ip'] = toolbelt.host_ip
+def host_ip(toolbelt, holder_path, **_):
+    helpers.FilesystemDataHolder(holder_path)['host_ip'] = toolbelt.host_ip
 
 
 @operation(toolbelt=True)
-def relationship_operation(ctx, toolbelt, **_):
-    global_test_holder[ctx.name] = toolbelt._op_context.source_node.name
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
-    global_test_holder.clear()
+def relationship_operation(ctx, toolbelt, holder_path, **_):
+    helpers.FilesystemDataHolder(holder_path)[ctx.name] = toolbelt._op_context.source_node.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 66424db..8ca1480 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -51,6 +51,5 @@ def init_inmemory_model_storage():
 
     engine = create_engine(uri, **engine_kwargs)
     session_factory = orm.sessionmaker(bind=engine)
-    session = orm.scoped_session(session_factory=session_factory)
 
-    return dict(engine=engine, session=session)
+    return dict(engine=engine, session=session_factory())


[5/9] incubator-ariatosca git commit: ARIA-230 Dry execution doesn't log empty operations

Posted by em...@apache.org.
ARIA-230 Dry execution doesn't log empty operations


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/b11fbc94
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/b11fbc94
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/b11fbc94

Branch: refs/heads/ARIA-140-version-utils
Commit: b11fbc946ae6d26ad6c70a283cc6d6501c320273
Parents: 0ec2370
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 17:29:29 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 01:02:46 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/executor/dry.py | 26 ++++++++++++------------
 1 file changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b11fbc94/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index f6fb7a6..63ec392 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -32,20 +32,20 @@ class DryExecutor(BaseExecutor):
             task.started_at = datetime.utcnow()
             task.status = task.STARTED
 
+        dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
+        logger = task.context.logger.info if task.implementation else task.context.logger.debug
+
+        if hasattr(task.actor, 'source_node'):
+            name = '{source_node.name}->{target_node.name}'.format(
+                source_node=task.actor.source_node, target_node=task.actor.target_node)
+        else:
+            name = task.actor.name
+
         if task.implementation:
-            if hasattr(task.actor, 'source_node'):
-                name = '{source_node.name}->{target_node.name}'.format(
-                    source_node=task.actor.source_node, target_node=task.actor.target_node)
-            else:
-                name = task.actor.name
-
-            task.context.logger.info(
-                '<dry> {name} {task.interface_name}.{task.operation_name} started...'
-                .format(name=name, task=task))
-
-            task.context.logger.info(
-                '<dry> {name} {task.interface_name}.{task.operation_name} successful'
-                .format(name=name, task=task))
+            logger(dry_msg.format(name=name, task=task, suffix='started...'))
+            logger(dry_msg.format(name=name, task=task, suffix='successful'))
+        else:
+            logger(dry_msg.format(name=name, task=task, suffix='has no implementation'))
 
         # updating the task manually instead of calling self._task_succeeded(task),
         # to avoid any side effects raising that event might cause


[2/9] incubator-ariatosca git commit: ARIA-160 Operation toolbelt unit tests fail spordically

Posted by em...@apache.org.
ARIA-160 Operation toolbelt unit tests fail spordically

Inroduced a filesystem based data holder


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/45c158ef
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/45c158ef
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/45c158ef

Branch: refs/heads/ARIA-140-version-utils
Commit: 45c158eff0d7ea97be0937cba9009522733948ae
Parents: 1cb3086
Author: max-orlov <ma...@gigaspaces.com>
Authored: Wed May 3 12:42:27 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Sun May 7 14:59:29 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/core/task.py     |  5 +-
 tests/helpers.py                             | 38 ++++++++++
 tests/orchestrator/context/test_operation.py | 87 +++++++++++++----------
 tests/orchestrator/context/test_toolbelt.py  | 18 +++--
 4 files changed, 100 insertions(+), 48 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/45c158ef/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 0e081c2..78159c4 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -175,10 +175,9 @@ class OperationTask(BaseTask):
         self._update_fields = {}
         try:
             yield
-            task = self.model_task
             for key, value in self._update_fields.items():
-                setattr(task, key, value)
-            self.model_task = task
+                setattr(self.model_task, key, value)
+            self.model_task = self.model_task
         finally:
             self._update_fields = None
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/45c158ef/tests/helpers.py
----------------------------------------------------------------------
diff --git a/tests/helpers.py b/tests/helpers.py
index 472d696..423e63f 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -14,6 +14,8 @@
 # limitations under the License.
 
 import os
+import tempfile
+import json
 
 from . import ROOT_DIR
 from .resources import DIR as RESOURCES_DIR
@@ -29,3 +31,39 @@ def get_resource_uri(*args):
 
 def get_service_template_uri(*args):
     return os.path.join(RESOURCES_DIR, 'service-templates', *args)
+
+
+class FilesystemDataHolder(object):
+    _tmpfile = tempfile.NamedTemporaryFile('w')
+
+    def _load(self):
+        return json.load(open(self._tmpfile.name))
+
+    def _dump(self, value):
+        return json.dump(value, open(self._tmpfile.name, 'w'))
+
+    def __init__(self):
+        self.clear()
+
+    def __setitem__(self, key, value):
+        dict_ = self._load()
+        dict_[key] = value
+        self._dump(dict_)
+
+    def __getitem__(self, item):
+        return self._load()[item]
+
+    def __iter__(self):
+        return iter(self._load())
+
+    def clear(self):
+        self._dump({})
+
+    def get(self, item, default=None):
+        return self._load().get(item, default)
+
+    def setdefault(self, key, value):
+        dict_ = self._load()
+        return_value = dict_.setdefault(key, value)
+        self._dump(dict_)
+        return return_value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/45c158ef/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 971e0db..3180d89 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -28,14 +28,17 @@ from aria.orchestrator import context
 from aria.orchestrator.workflows import api
 
 import tests
-from tests import mock, storage
+from tests import (
+    mock,
+    storage,
+    helpers
+)
 from . import (
     op_path,
     execute,
 )
 
-global_test_holder = {}
-
+global_test_holder = helpers.FilesystemDataHolder()
 
 @pytest.fixture
 def ctx(tmpdir):
@@ -75,7 +78,7 @@ def test_node_operation_task_execution(ctx, thread_executor):
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+        operation_kwargs=dict(implementation=op_path(basic_node_operation, module_path=__name__),
                               inputs=inputs)
     )
     node.interfaces[interface.name] = interface
@@ -94,18 +97,11 @@ def test_node_operation_task_execution(ctx, thread_executor):
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    operation_context = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
-        type='node',
-        name=node.name,
-        interface=interface_name,
-        operation=operation_name
-    )]
-
-    assert isinstance(operation_context, context.operation.NodeOperationContext)
+    assert global_test_holder['ctx_name'] == context.operation.NodeOperationContext.__name__
 
     # Task bases assertions
-    assert operation_context.task.actor == node
-    assert operation_context.task.name == api.task.OperationTask.NAME_FORMAT.format(
+    assert global_test_holder['actor_name'] == node.name
+    assert global_test_holder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
         type='node',
         name=node.name,
         interface=interface_name,
@@ -113,12 +109,12 @@ def test_node_operation_task_execution(ctx, thread_executor):
     )
     operations = interface.operations
     assert len(operations) == 1
-    assert operation_context.task.implementation == operations.values()[0].implementation           # pylint: disable=no-member
-    assert operation_context.task.inputs['putput'].value is True
+    assert global_test_holder['implementation'] == operations.values()[0].implementation             # pylint: disable=no-member
+    assert global_test_holder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
-    assert operation_context.node_template == node.node_template
-    assert operation_context.node == node
+    assert global_test_holder['template_name'] == node.node_template.name
+    assert global_test_holder['node_name'] == node.name
 
 
 def test_relationship_operation_task_execution(ctx, thread_executor):
@@ -131,7 +127,8 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+        operation_kwargs=dict(implementation=op_path(basic_relationship_operation,
+                                                     module_path=__name__),
                               inputs=inputs),
     )
 
@@ -151,21 +148,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    operation_context = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
-        type='relationship',
-        name=relationship.name,
-        interface=interface_name,
-        operation=operation_name
-    )]
-
-    assert isinstance(operation_context, context.operation.RelationshipOperationContext)
+    assert global_test_holder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
 
     # Task bases assertions
-    assert operation_context.task.actor == relationship
-    assert interface_name in operation_context.task.name
+    assert global_test_holder['actor_name'] == relationship.name
+    assert interface_name in global_test_holder['task_name']
     operations = interface.operations
-    assert operation_context.task.implementation == operations.values()[0].implementation           # pylint: disable=no-member
-    assert operation_context.task.inputs['putput'].value is True
+    assert global_test_holder['implementation'] == operations.values()[0].implementation           # pylint: disable=no-member
+    assert global_test_holder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
     dependency_node_template = ctx.model.node_template.get_by_name(
@@ -175,11 +165,11 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
         mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
     dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
 
-    assert operation_context.target_node_template == dependency_node_template
-    assert operation_context.target_node == dependency_node
-    assert operation_context.relationship == relationship
-    assert operation_context.source_node_template == dependent_node_template
-    assert operation_context.source_node == dependent_node
+    assert global_test_holder['target_node_template_name'] == dependency_node_template.name
+    assert global_test_holder['target_node_name'] == dependency_node.name
+    assert global_test_holder['relationship_name'] == relationship.name
+    assert global_test_holder['source_node_template_name'] == dependent_node_template.name
+    assert global_test_holder['source_node_name'] == dependent_node.name
 
 
 def test_invalid_task_operation_id(ctx, thread_executor):
@@ -386,8 +376,29 @@ def logged_operation(ctx, **_):
 
 
 @operation
-def basic_operation(ctx, **_):
-    global_test_holder[ctx.name] = ctx
+def basic_node_operation(ctx, **_):
+    operation_common(ctx)
+    global_test_holder['template_name'] = ctx.node_template.name
+    global_test_holder['node_name'] = ctx.node.name
+
+
+@operation
+def basic_relationship_operation(ctx, **_):
+    operation_common(ctx)
+    global_test_holder['target_node_template_name'] = ctx.target_node_template.name
+    global_test_holder['target_node_name'] = ctx.target_node.name
+    global_test_holder['relationship_name'] = ctx.relationship.name
+    global_test_holder['source_node_template_name'] = ctx.source_node_template.name
+    global_test_holder['source_node_name'] = ctx.source_node.name
+
+
+def operation_common(ctx):
+    global_test_holder['ctx_name'] = ctx.__class__.__name__
+
+    global_test_holder['actor_name'] = ctx.task.actor.name
+    global_test_holder['task_name'] = ctx.task.name
+    global_test_holder['implementation'] = ctx.task.implementation
+    global_test_holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
 
 
 @operation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/45c158ef/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index ecc3ac2..822ac5a 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -19,15 +19,18 @@ from aria import workflow, operation
 from aria.orchestrator import context
 from aria.orchestrator.workflows import api
 from aria.orchestrator.workflows.executor import thread
-from aria.orchestrator.context.toolbelt import RelationshipToolBelt
 
-from tests import mock, storage
+from tests import (
+    mock,
+    storage,
+    helpers
+)
 from . import (
     op_path,
     execute,
 )
 
-global_test_holder = {}
+global_test_holder = helpers.FilesystemDataHolder()
 
 
 @pytest.fixture
@@ -85,6 +88,8 @@ def test_host_ip(workflow_context, executor):
                               inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
+    dependency_node.runtime_properties['ip'] = '1.1.1.1'
+
     workflow_context.model.node.update(dependency_node)
 
     @workflow
@@ -131,12 +136,11 @@ def test_relationship_tool_belt(workflow_context, executor):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert isinstance(global_test_holder.get(api.task.OperationTask.NAME_FORMAT.format(
+    assert global_test_holder.get(api.task.OperationTask.NAME_FORMAT.format(
         type='relationship',
         name=relationship.name,
         interface=interface_name,
-        operation=operation_name
-    )), RelationshipToolBelt)
+        operation=operation_name)) == relationship.source_node.name
 
 
 def test_wrong_model_toolbelt():
@@ -151,7 +155,7 @@ def host_ip(toolbelt, **_):
 
 @operation(toolbelt=True)
 def relationship_operation(ctx, toolbelt, **_):
-    global_test_holder[ctx.name] = toolbelt
+    global_test_holder[ctx.name] = toolbelt._op_context.source_node.name
 
 
 @pytest.fixture(autouse=True)


[9/9] incubator-ariatosca git commit: ARIA-140 Version utils

Posted by em...@apache.org.
ARIA-140 Version utils

Provided mainly to support version comparisons for plugins in order to
select the highest version of available plugins that would match a
plugin specification. In the future may be useful for other version
comparisons.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/eae44d0b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/eae44d0b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/eae44d0b

Branch: refs/heads/ARIA-140-version-utils
Commit: eae44d0b0d051fd50cadf94d734bdf7a3b8d3171
Parents: d0411d3
Author: Tal Liron <ta...@gmail.com>
Authored: Fri Apr 14 13:39:02 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Tue May 9 15:37:46 2017 -0500

----------------------------------------------------------------------
 aria/modeling/service_template.py |  17 ++--
 aria/utils/versions.py            | 162 +++++++++++++++++++++++++++++++++
 tests/utils/test_versions.py      |  85 +++++++++++++++++
 3 files changed, 257 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/eae44d0b/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index f721b64..e3320fa 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -33,7 +33,8 @@ from sqlalchemy.ext.associationproxy import association_proxy
 from ..parser import validation
 from ..parser.consumption import ConsumptionContext
 from ..parser.reading import deepcopy_with_locators
-from ..utils import collections, formatting, console
+from ..utils import (collections, formatting, console)
+from ..utils.versions import VersionString
 from .mixins import TemplateModelMixin
 from . import (
     relationship,
@@ -2135,13 +2136,15 @@ class PluginSpecificationBase(TemplateModelMixin):
         # moved to.
         plugins = model_storage.plugin.list()
         matching_plugins = []
-        for plugin in plugins:
-            # TODO: we need to use a version comparator
-            if (plugin.name == self.name) and \
-                    ((self.version is None) or (plugin.package_version >= self.version)):
-                matching_plugins.append(plugin)
+        if plugins:
+            for plugin in plugins:
+                if (plugin.name == self.name) and \
+                    ((self.version is None) or \
+                     (VersionString(plugin.package_version) >= self.version)):
+                    matching_plugins.append(plugin)
         self.plugin = None
         if matching_plugins:
             # Return highest version of plugin
-            self.plugin = sorted(matching_plugins, key=lambda plugin: plugin.package_version)[-1]
+            key = lambda plugin: VersionString(plugin.package_version).key
+            self.plugin = sorted(matching_plugins, key=key)[-1]
         return self.plugin is not None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/eae44d0b/aria/utils/versions.py
----------------------------------------------------------------------
diff --git a/aria/utils/versions.py b/aria/utils/versions.py
new file mode 100644
index 0000000..925f59e
--- /dev/null
+++ b/aria/utils/versions.py
@@ -0,0 +1,162 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+General-purpose version string handling
+"""
+
+import re
+
+
+_INF = float('inf')
+
+_NULL = (), _INF
+
+_DIGITS_RE = re.compile(r'^\d+$')
+
+_PREFIXES = {
+    'dev':   0.0001,
+    'alpha': 0.001,
+    'beta':  0.01,
+    'rc':    0.1
+}
+
+
+class VersionString(unicode):
+    """
+    Version string that can be compared, sorted, made unique in a set, and used as a unique dict
+    key.
+
+    The primary part of the string is one or more dot-separated natural numbers. Trailing zeroes
+    are treated as redundant, e.g. "1.0.0" == "1.0" == "1".
+
+    An optional qualifier can be added after a "-". The qualifier can be a natural number or a
+    specially treated prefixed natural number, e.g. "1.1-beta1" > "1.1-alpha2". The case of the
+    prefix is ignored.
+
+    Numeric qualifiers will always be greater than prefixed integer qualifiers, e.g. "1.1-1" >
+    "1.1-beta1".
+
+    Versions without a qualifier will always be greater than their equivalents with a qualifier,
+    e.g. e.g. "1.1" > "1.1-1".
+
+    Any value that does not conform to this format will be treated as a zero version, which would
+    be lesser than any non-zero version.
+
+    For efficient list sorts use the ``key`` property, e.g.:
+    ``sorted(versions, key=lambda x: x.key)``
+    """
+
+    NULL = None # initialized below
+
+    def __init__(self, value=None):
+        if value is not None:
+            super(VersionString, self).__init__(value)
+        self.key = parse_version_string(self)
+
+    def __eq__(self, version):
+        if not isinstance(version, VersionString):
+            version = VersionString(version)
+        return self.key == version.key
+
+    def __lt__(self, version):
+        if not isinstance(version, VersionString):
+            version = VersionString(version)
+        return self.key < version.key
+
+    def __hash__(self):
+        return self.key.__hash__()
+
+
+def parse_version_string(version): # pylint: disable=too-many-branches
+    """
+    Parses a version string.
+
+    :param version: The version string
+    :returns: The primary tuple and qualifier float
+    :rtype: ((int), float)
+    """
+
+    if version is None:
+        return _NULL
+    version = unicode(version)
+
+    # Split to primary and qualifier on '-'
+    split = version.split('-', 1)
+    if len(split) == 2:
+        primary, qualifier = split
+    else:
+        primary = split[0]
+        qualifier = None
+
+    # Parse primary
+    split = primary.split('.')
+    primary = []
+    for element in split:
+        if _DIGITS_RE.match(element) is None:
+            # Invalid version string
+            return _NULL
+        try:
+            element = int(element)
+        except ValueError:
+            # Invalid version string
+            return _NULL
+        primary.append(element)
+
+    # Remove redundant zeros
+    for element in reversed(primary):
+        if element == 0:
+            primary.pop()
+        else:
+            break
+    primary = tuple(primary)
+
+    # Parse qualifier
+    if qualifier is not None:
+        if _DIGITS_RE.match(qualifier) is not None:
+            # Integer qualifier
+            try:
+                qualifier = float(int(qualifier))
+            except ValueError:
+                # Invalid version string
+                return _NULL
+        else:
+            # Prefixed integer qualifier
+            value = None
+            qualifier = qualifier.lower()
+            for prefix, factor in _PREFIXES.iteritems():
+                if qualifier.startswith(prefix):
+                    value = qualifier[len(prefix):]
+                    if _DIGITS_RE.match(value) is None:
+                        # Invalid version string
+                        return _NULL
+                    try:
+                        value = float(int(value)) * factor
+                    except ValueError:
+                        # Invalid version string
+                        return _NULL
+                    break
+            if value is None:
+                # Invalid version string
+                return _NULL
+            qualifier = value
+    else:
+        # Version strings with no qualifiers are higher
+        qualifier = _INF
+
+    return primary, qualifier
+
+
+VersionString.NULL = VersionString()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/eae44d0b/tests/utils/test_versions.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_versions.py b/tests/utils/test_versions.py
new file mode 100644
index 0000000..222949c
--- /dev/null
+++ b/tests/utils/test_versions.py
@@ -0,0 +1,85 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from aria.utils.versions import (VersionString, parse_version_string)
+
+
+def test_version_string():
+    # No qualifiers
+    assert VersionString('20') == VersionString('20')
+    assert VersionString('20') == VersionString('20.0')
+    assert VersionString('20') == VersionString('20.0.0')
+    assert VersionString('20') < VersionString('20.0.1')
+
+    # With numeric qualifiers
+    assert VersionString('20.0.1-1') < VersionString('20.0.1-2')
+    assert VersionString('20.0.1-0') < VersionString('20.0.1')
+    assert VersionString('20.0.1-1') < VersionString('20.0.1')
+
+    # With prefixed qualifiers
+    assert VersionString('20.0.1-beta1') < VersionString('20.0.1-beta2')
+    assert VersionString('20.0.1-beta1') < VersionString('20.0.1-1')
+    assert VersionString('20.0.1-beta1') < VersionString('20.0.1')
+    assert VersionString('20.0.1-beta2') < VersionString('20.0.1-rc2')
+    assert VersionString('20.0.1-alpha2') < VersionString('20.0.1-beta1')
+    assert VersionString('20.0.1-dev2') < VersionString('20.0.1-alpha1')
+    assert VersionString('20.0.1-DEV2') < VersionString('20.0.1-ALPHA1')
+
+    # Coercive comparisons
+    assert VersionString('20.0.0') == VersionString(10 * 2)
+    assert VersionString('20.0.0') == VersionString(20.0)
+
+    # Non-VersionString comparisons
+    assert VersionString('20.0.0') == 20
+    assert VersionString('20.0.0') < '20.0.1'
+
+    # Nulls
+    assert VersionString() == VersionString()
+    assert VersionString() == VersionString.NULL
+    assert VersionString(None) == VersionString.NULL
+    assert VersionString.NULL == None # pylint: disable=singleton-comparison
+    assert VersionString.NULL == 0
+
+    # Invalid version strings
+    assert VersionString('maxim is maxim') == VersionString.NULL
+    assert VersionString('20.maxim.0') == VersionString.NULL
+    assert VersionString('20.0.0-maxim1') == VersionString.NULL
+    assert VersionString('20.0.1-1.1') == VersionString.NULL
+
+    # Sorts
+    v1 = VersionString('20.0.0')
+    v2 = VersionString('20.0.1-beta1')
+    v3 = VersionString('20.0.1')
+    v4 = VersionString('20.0.2')
+    assert [v1, v2, v3, v4] == sorted([v4, v3, v2, v1], key=lambda v: v.key)
+
+    # Sets
+    v1 = VersionString('20.0.0')
+    v2 = VersionString('20.0')
+    v3 = VersionString('20')
+    assert set([v1]) == set([v1, v2, v3])
+
+    # Dicts
+    the_dict = {v1: 'test'}
+    assert the_dict.get(v2) == 'test'
+
+def test_parse_version_string():
+    # One test of each type from the groups above should be enough
+    assert parse_version_string('20') < parse_version_string('20.0.1')
+    assert parse_version_string('20.0.1-1') < parse_version_string('20.0.1-2')
+    assert parse_version_string('20.0.1-beta1') < parse_version_string('20.0.1-beta2')
+    assert parse_version_string('20.0.0') == parse_version_string(10 * 2)
+    assert parse_version_string(None) == parse_version_string(0)
+    assert parse_version_string(None) == parse_version_string('maxim is maxim')


[6/9] incubator-ariatosca git commit: ARIA-165 Make node name suffix UUIDs become more readable

Posted by em...@apache.org.
ARIA-165 Make node name suffix UUIDs become more readable


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/1febf80d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/1febf80d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/1febf80d

Branch: refs/heads/ARIA-140-version-utils
Commit: 1febf80dee57b837d2ed937bcdaa080bdc3bd822
Parents: b11fbc9
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 15:25:37 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 11:24:07 2017 +0300

----------------------------------------------------------------------
 aria/modeling/service_template.py | 8 ++++++--
 aria/parser/modeling/context.py   | 5 -----
 2 files changed, 6 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1febf80d/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 7fab4fc..f721b64 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -549,9 +549,13 @@ class NodeTemplateBase(TemplateModelMixin):
             ('requirement_templates', formatting.as_raw_list(self.requirement_templates))))
 
     def instantiate(self, container):
-        context = ConsumptionContext.get_thread_local()
         from . import models
-        name = context.modeling.generate_node_id(self.name)
+        if self.nodes:
+            highest_name_suffix = max(int(n.name.rsplit('_', 1)[-1]) for n in self.nodes)
+            suffix = highest_name_suffix + 1
+        else:
+            suffix = 1
+        name = '{name}_{index}'.format(name=self.name, index=suffix)
         node = models.Node(name=name,
                            type=self.type,
                            description=deepcopy_with_locators(self.description),

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1febf80d/aria/parser/modeling/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/context.py b/aria/parser/modeling/context.py
index dff5991..4a53641 100644
--- a/aria/parser/modeling/context.py
+++ b/aria/parser/modeling/context.py
@@ -67,11 +67,6 @@ class ModelingContext(object):
         if self.instance is not None:
             model_storage.service.put(self.instance)
 
-    def generate_node_id(self, template_name):
-        return self.node_id_format.format(
-            template=template_name,
-            id=self.generate_id())
-
     def generate_id(self):
         if self.id_type == IdType.LOCAL_SERIAL:
             return self._serial_id_counter.next()


[3/9] incubator-ariatosca git commit: ARIA-215 Refactor plugin-related code into PluginManager

Posted by em...@apache.org.
ARIA-215 Refactor plugin-related code into PluginManager

Refactored plugin-related code from ProcessExecutor into PluginManager.
Additionally, renamed plugin_prefix to plugin_dir in PluginManager.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/3e1ed14c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/3e1ed14c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/3e1ed14c

Branch: refs/heads/ARIA-140-version-utils
Commit: 3e1ed14c00ea2c83fafdca8ec0e37817bea1d5e8
Parents: 45c158e
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Sun May 7 16:36:39 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Sun May 7 17:27:36 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/plugin.py                     | 36 +++++++++++-
 aria/orchestrator/workflows/executor/process.py | 60 ++++++--------------
 aria/utils/process.py                           | 47 +++++++++++++++
 tests/utils/test_plugin.py                      |  6 +-
 4 files changed, 100 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3e1ed14c/aria/orchestrator/plugin.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/plugin.py b/aria/orchestrator/plugin.py
index f99666c..8fbcf5a 100644
--- a/aria/orchestrator/plugin.py
+++ b/aria/orchestrator/plugin.py
@@ -23,6 +23,9 @@ from datetime import datetime
 import wagon
 
 from . import exceptions
+from ..utils import process as process_utils
+
+_IS_WIN = os.name == 'nt'
 
 
 class PluginManager(object):
@@ -62,11 +65,40 @@ class PluginManager(object):
             raise exceptions.PluginAlreadyExistsError(
                 'Plugin {0}, version {1} already exists'.format(plugin.package_name,
                                                                 plugin.package_version))
-        self._install_wagon(source=source, prefix=self.get_plugin_prefix(plugin))
+        self._install_wagon(source=source, prefix=self.get_plugin_dir(plugin))
         self._model.plugin.put(plugin)
         return plugin
 
-    def get_plugin_prefix(self, plugin):
+    def load_plugin(self, plugin, env=None):
+        """
+        Load the plugin into an environment.
+        Loading the plugin means the plugin's code and binaries paths will be appended to the
+        environment's PATH and PYTHONPATH, thereby allowing usage of the plugin.
+        :param plugin: The plugin to load
+        :param env: The environment to load the plugin into; If `None`, os.environ will be used.
+        """
+        env = env or os.environ
+        plugin_dir = self.get_plugin_dir(plugin)
+
+        # Update PATH environment variable to include plugin's bin dir
+        bin_dir = 'Scripts' if _IS_WIN else 'bin'
+        process_utils.append_to_path(os.path.join(plugin_dir, bin_dir), env=env)
+
+        # Update PYTHONPATH environment variable to include plugin's site-packages
+        # directories
+        if _IS_WIN:
+            pythonpath_dirs = [os.path.join(plugin_dir, 'Lib', 'site-packages')]
+        else:
+            # In some linux environments, there will be both a lib and a lib64 directory
+            # with the latter, containing compiled packages.
+            pythonpath_dirs = [os.path.join(
+                plugin_dir, 'lib{0}'.format(b),
+                'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1]),
+                'site-packages') for b in ('', '64')]
+
+        process_utils.append_to_pythonpath(*pythonpath_dirs, env=env)
+
+    def get_plugin_dir(self, plugin):
         return os.path.join(
             self._plugins_dir,
             '{0}-{1}'.format(plugin.package_name, plugin.package_version))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3e1ed14c/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 8481406..f3daf04 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -47,13 +47,12 @@ from aria.storage import instrumentation
 from aria.extension import process_executor
 from aria.utils import (
     imports,
-    exceptions
+    exceptions,
+    process as process_utils
 )
 from aria.modeling import types as modeling_types
 
 
-_IS_WIN = os.name == 'nt'
-
 _INT_FMT = 'I'
 _INT_SIZE = struct.calcsize(_INT_FMT)
 UPDATE_TRACKED_CHANGES_FAILED_STR = \
@@ -127,13 +126,7 @@ class ProcessExecutor(base.BaseExecutor):
         with open(arguments_json_path, 'wb') as f:
             f.write(pickle.dumps(self._create_arguments_dict(task)))
 
-        env = os.environ.copy()
-        # See _update_env for plugin_prefix usage
-        if task.plugin_fk and self._plugin_manager:
-            plugin_prefix = self._plugin_manager.get_plugin_prefix(task.plugin)
-        else:
-            plugin_prefix = None
-        self._update_env(env=env, plugin_prefix=plugin_prefix)
+        env = self._construct_subprocess_env(task=task)
         # Asynchronously start the operation in a subprocess
         subprocess.Popen(
             '{0} {1} {2}'.format(sys.executable, __file__, arguments_json_path),
@@ -156,40 +149,19 @@ class ProcessExecutor(base.BaseExecutor):
             'context': task.context.serialization_dict,
         }
 
-    def _update_env(self, env, plugin_prefix):
-        pythonpath_dirs = []
-        # If this is a plugin operation, plugin prefix will point to where
-        # This plugin is installed.
-        # We update the environment variables that the subprocess will be started with based on it
-        if plugin_prefix:
-
-            # Update PATH environment variable to include plugin's bin dir
-            bin_dir = 'Scripts' if _IS_WIN else 'bin'
-            env['PATH'] = '{0}{1}{2}'.format(
-                os.path.join(plugin_prefix, bin_dir),
-                os.pathsep,
-                env.get('PATH', ''))
-
-            # Update PYTHONPATH environment variable to include plugin's site-packages
-            # directories
-            if _IS_WIN:
-                pythonpath_dirs = [os.path.join(plugin_prefix, 'Lib', 'site-packages')]
-            else:
-                # In some linux environments, there will be both a lib and a lib64 directory
-                # with the latter, containing compiled packages.
-                pythonpath_dirs = [os.path.join(
-                    plugin_prefix, 'lib{0}'.format(b),
-                    'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1]),
-                    'site-packages') for b in ('', '64')]
-
-        # Add used supplied directories to injected PYTHONPATH
-        pythonpath_dirs.extend(self._python_path)
-
-        if pythonpath_dirs:
-            env['PYTHONPATH'] = '{0}{1}{2}'.format(
-                os.pathsep.join(pythonpath_dirs),
-                os.pathsep,
-                env.get('PYTHONPATH', ''))
+    def _construct_subprocess_env(self, task):
+        env = os.environ.copy()
+
+        if task.plugin_fk and self._plugin_manager:
+            # If this is a plugin operation,
+            # load the plugin on the subprocess env we're constructing
+            self._plugin_manager.load_plugin(task.plugin, env=env)
+
+        # Add user supplied directories to injected PYTHONPATH
+        if self._python_path:
+            process_utils.append_to_pythonpath(*self._python_path, env=env)
+
+        return env
 
     def _listener(self):
         # Notify __init__ method this thread has actually started

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3e1ed14c/aria/utils/process.py
----------------------------------------------------------------------
diff --git a/aria/utils/process.py b/aria/utils/process.py
new file mode 100644
index 0000000..9aeae67
--- /dev/null
+++ b/aria/utils/process.py
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+
+def append_to_path(*args, **kwargs):
+    """
+    Appends one or more paths to the system path of an environment.
+    The environment will be that of the current process unless another is passed using the
+    'env' keyword argument.
+    :param args: paths to append
+    :param kwargs: 'env' may be used to pass a custom environment to use
+    """
+    _append_to_path('PATH', *args, **kwargs)
+
+
+def append_to_pythonpath(*args, **kwargs):
+    """
+    Appends one or more paths to the python path of an environment.
+    The environment will be that of the current process unless another is passed using the
+    'env' keyword argument.
+    :param args: paths to append
+    :param kwargs: 'env' may be used to pass a custom environment to use
+    """
+    _append_to_path('PYTHONPATH', *args, **kwargs)
+
+
+def _append_to_path(path, *args, **kwargs):
+    env = kwargs.get('env') or os.environ
+    env[path] = '{0}{1}{2}'.format(
+        os.pathsep.join(args),
+        os.pathsep,
+        env.get(path, '')
+    )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3e1ed14c/tests/utils/test_plugin.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_plugin.py b/tests/utils/test_plugin.py
index 3350247..c91d0c9 100644
--- a/tests/utils/test_plugin.py
+++ b/tests/utils/test_plugin.py
@@ -38,9 +38,9 @@ class TestPluginManager(object):
         assert plugin.package_name == PACKAGE_NAME
         assert plugin.package_version == PACKAGE_VERSION
         assert plugin == model.plugin.get(plugin.id)
-        plugin_prefix = os.path.join(plugins_dir, '{0}-{1}'.format(PACKAGE_NAME, PACKAGE_VERSION))
-        assert os.path.isdir(plugin_prefix)
-        assert plugin_prefix == plugin_manager.get_plugin_prefix(plugin)
+        plugin_dir = os.path.join(plugins_dir, '{0}-{1}'.format(PACKAGE_NAME, PACKAGE_VERSION))
+        assert os.path.isdir(plugin_dir)
+        assert plugin_dir == plugin_manager.get_plugin_dir(plugin)
 
     def test_install_already_exits(self, plugin_manager, mock_plugin):
         plugin_manager.install(mock_plugin)


[4/9] incubator-ariatosca git commit: ARIA-214 Dry execution changes the state of non implemented operations

Posted by em...@apache.org.
ARIA-214 Dry execution changes the state of non implemented operations


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/0ec23707
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/0ec23707
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/0ec23707

Branch: refs/heads/ARIA-140-version-utils
Commit: 0ec237071ebdeb28cd2feabbc1b51854543d398d
Parents: 3e1ed14
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun May 7 16:12:56 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Sun May 7 22:29:53 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/core/task.py        |  3 ---
 aria/orchestrator/workflows/core/translation.py |  6 +----
 aria/orchestrator/workflows/executor/base.py    | 19 ++++++++------
 aria/orchestrator/workflows/executor/celery.py  |  2 +-
 aria/orchestrator/workflows/executor/dry.py     | 26 ++++++++++----------
 aria/orchestrator/workflows/executor/process.py |  2 +-
 aria/orchestrator/workflows/executor/thread.py  |  2 +-
 tests/orchestrator/workflows/core/test_task.py  |  7 ++----
 .../workflows/executor/test_process_executor.py | 18 +-------------
 9 files changed, 31 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 78159c4..b3dfb3c 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -163,9 +163,6 @@ class OperationTask(BaseTask):
         self._task_id = task_model.id
         self._update_fields = None
 
-    def execute(self):
-        super(OperationTask, self).execute()
-
     @contextmanager
     def _update(self):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/core/translation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/translation.py b/aria/orchestrator/workflows/core/translation.py
index 0bbce90..fec108b 100644
--- a/aria/orchestrator/workflows/core/translation.py
+++ b/aria/orchestrator/workflows/core/translation.py
@@ -48,11 +48,7 @@ def build_execution_graph(
             execution_graph, dependencies, default=[start_task])
 
         if isinstance(api_task, api.task.OperationTask):
-            if api_task.implementation:
-                operation_task = core_task.OperationTask(api_task, executor=default_executor)
-            else:
-                operation_task = core_task.OperationTask(api_task,
-                                                         executor=base.EmptyOperationExecutor())
+            operation_task = core_task.OperationTask(api_task, executor=default_executor)
             _add_task_and_dependencies(execution_graph, operation_task, operation_dependencies)
         elif isinstance(api_task, api.task.WorkflowTask):
             # Build the graph recursively while adding start and end markers

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/executor/base.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/base.py b/aria/orchestrator/workflows/executor/base.py
index a225837..c543278 100644
--- a/aria/orchestrator/workflows/executor/base.py
+++ b/aria/orchestrator/workflows/executor/base.py
@@ -25,13 +25,22 @@ class BaseExecutor(logger.LoggerMixin):
     """
     Base class for executors for running tasks
     """
+    def _execute(self, task):
+        raise NotImplementedError
 
     def execute(self, task):
         """
         Execute a task
         :param task: task to execute
         """
-        raise NotImplementedError
+        if task.implementation:
+            self._execute(task)
+        else:
+            # In this case the task is missing an implementation. This task still gets to an
+            # executor, but since there is nothing to run, we by default simply skip the execution
+            # itself.
+            self._task_started(task)
+            self._task_succeeded(task)
 
     def close(self):
         """
@@ -52,12 +61,6 @@ class BaseExecutor(logger.LoggerMixin):
         events.on_success_task_signal.send(task)
 
 
-class StubTaskExecutor(BaseExecutor):
+class StubTaskExecutor(BaseExecutor):                                                               # pylint: disable=abstract-method
     def execute(self, task):
         task.status = task.SUCCESS
-
-
-class EmptyOperationExecutor(BaseExecutor):
-    def execute(self, task):
-        events.start_task_signal.send(task)
-        events.on_success_task_signal.send(task)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index 7bd9b7c..bbddc25 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -42,7 +42,7 @@ class CeleryExecutor(BaseExecutor):
         self._receiver_thread.start()
         self._started_queue.get(timeout=30)
 
-    def execute(self, task):
+    def _execute(self, task):
         self._tasks[task.id] = task
         inputs = dict(inp.unwrap() for inp in task.inputs.values())
         inputs['ctx'] = task.context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index eb70a41..f6fb7a6 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -21,11 +21,10 @@ from datetime import datetime
 from .base import BaseExecutor
 
 
-class DryExecutor(BaseExecutor):
+class DryExecutor(BaseExecutor):                                                                    # pylint: disable=abstract-method
     """
     Executor which dry runs tasks - prints task information without causing any side effects
     """
-
     def execute(self, task):
         # updating the task manually instead of calling self._task_started(task),
         # to avoid any side effects raising that event might cause
@@ -33,19 +32,20 @@ class DryExecutor(BaseExecutor):
             task.started_at = datetime.utcnow()
             task.status = task.STARTED
 
-        if hasattr(task.actor, 'source_node'):
-            name = '{source_node.name}->{target_node.name}'.format(
-                source_node=task.actor.source_node, target_node=task.actor.target_node)
-        else:
-            name = task.actor.name
+        if task.implementation:
+            if hasattr(task.actor, 'source_node'):
+                name = '{source_node.name}->{target_node.name}'.format(
+                    source_node=task.actor.source_node, target_node=task.actor.target_node)
+            else:
+                name = task.actor.name
 
-        task.context.logger.info(
-            '<dry> {name} {task.interface_name}.{task.operation_name} started...'
-            .format(name=name, task=task))
+            task.context.logger.info(
+                '<dry> {name} {task.interface_name}.{task.operation_name} started...'
+                .format(name=name, task=task))
 
-        task.context.logger.info(
-            '<dry> {name} {task.interface_name}.{task.operation_name} successful'
-            .format(name=name, task=task))
+            task.context.logger.info(
+                '<dry> {name} {task.interface_name}.{task.operation_name} successful'
+                .format(name=name, task=task))
 
         # updating the task manually instead of calling self._task_succeeded(task),
         # to avoid any side effects raising that event might cause

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index f3daf04..e464f7d 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -116,7 +116,7 @@ class ProcessExecutor(base.BaseExecutor):
         self._server_socket.close()
         self._listener_thread.join(timeout=60)
 
-    def execute(self, task):
+    def _execute(self, task):
         self._check_closed()
         self._tasks[task.id] = task
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 836b2bf..f53362a 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -46,7 +46,7 @@ class ThreadExecutor(BaseExecutor):
             thread.start()
             self._pool.append(thread)
 
-    def execute(self, task):
+    def _execute(self, task):
         self._queue.put(task)
 
     def close(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 748ee20..50ca7f5 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -24,7 +24,6 @@ from aria.orchestrator.workflows import (
     api,
     core,
     exceptions,
-    executor
 )
 
 from tests import mock, storage
@@ -71,8 +70,7 @@ class TestOperationTask(object):
                 node,
                 interface_name=NODE_INTERFACE_NAME,
                 operation_name=NODE_OPERATION_NAME)
-            core_task = core.task.OperationTask(api_task=api_task,
-                                                executor=executor.base.EmptyOperationExecutor())
+            core_task = core.task.OperationTask(api_task=api_task, executor=None)
         return api_task, core_task
 
     def _create_relationship_operation_task(self, ctx, relationship):
@@ -81,8 +79,7 @@ class TestOperationTask(object):
                 relationship,
                 interface_name=RELATIONSHIP_INTERFACE_NAME,
                 operation_name=RELATIONSHIP_OPERATION_NAME)
-            core_task = core.task.OperationTask(api_task=api_task,
-                                                executor=executor.base.EmptyOperationExecutor())
+            core_task = core.task.OperationTask(api_task=api_task, executor=None)
         return api_task, core_task
 
     def test_node_operation_task_creation(self, ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0ec23707/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index b353518..5f240b2 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging
 import os
 import Queue
 
@@ -66,7 +65,7 @@ class TestProcessExecutor(object):
     def test_closed(self, executor):
         executor.close()
         with pytest.raises(RuntimeError) as exc_info:
-            executor.execute(task=None)
+            executor.execute(task=MockTask(implementation='some.implementation'))
         assert 'closed' in exc_info.value.message
 
 
@@ -82,18 +81,3 @@ def mock_plugin(plugin_manager, tmpdir):
     source = os.path.join(tests.resources.DIR, 'plugins', 'mock-plugin1')
     plugin_path = create_plugin(source=source, destination_dir=str(tmpdir))
     return plugin_manager.install(source=plugin_path)
-
-
-class MockContext(object):
-
-    def __init__(self, *args, **kwargs):
-        self.logger = logging.getLogger('mock_logger')
-        self.task = type('SubprocessMockTask', (object, ), {'plugin': None})
-        self.serialization_dict = {'context_cls': self.__class__, 'context': {}}
-
-    def __getattr__(self, item):
-        return None
-
-    @classmethod
-    def deserialize_from_dict(cls, **kwargs):
-        return cls()


[8/9] incubator-ariatosca git commit: ARIA-210 Handle relative paths in CLI service-templates

Posted by em...@apache.org.
ARIA-210 Handle relative paths in CLI service-templates

This was a rather simple change, mainly involving adding absolute path
references.

The problems were only in `service-templates store` and in
`service-templates create-archive`.
`service-templates validate` was not affected.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/d0411d3d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/d0411d3d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/d0411d3d

Branch: refs/heads/ARIA-140-version-utils
Commit: d0411d3de37bb31073fda605cd9b73431b685d92
Parents: 16fcca4
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Mon May 8 17:45:23 2017 +0300
Committer: Avia Efrat <av...@gigaspaces.com>
Committed: Tue May 9 17:30:22 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/service_templates.py |  4 +++-
 aria/cli/csar.py                       | 18 ++++++++++--------
 aria/cli/service_template_utils.py     |  2 +-
 tests/cli/test_service_templates.py    | 22 ++++++++++++++++++++++
 4 files changed, 36 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d0411d3d/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index 2537012..e459871 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -195,7 +195,9 @@ def create_archive(service_template_path, destination, logger):
     `destination` is the path of the output CSAR archive file
     """
     logger.info('Creating a CSAR archive')
-    csar.write(os.path.dirname(service_template_path), service_template_path, destination, logger)
+    if not destination.endswith(csar.CSAR_FILE_EXTENSION):
+        destination += csar.CSAR_FILE_EXTENSION
+    csar.write(service_template_path, destination, logger)
     logger.info('CSAR archive created at {0}'.format(destination))
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d0411d3d/aria/cli/csar.py
----------------------------------------------------------------------
diff --git a/aria/cli/csar.py b/aria/cli/csar.py
index 5bc35ac..8f44557 100644
--- a/aria/cli/csar.py
+++ b/aria/cli/csar.py
@@ -22,7 +22,7 @@ import zipfile
 import requests
 from ruamel import yaml
 
-
+CSAR_FILE_EXTENSION = '.csar'
 META_FILE = 'TOSCA-Metadata/TOSCA.meta'
 META_FILE_VERSION_KEY = 'TOSCA-Meta-File-Version'
 META_FILE_VERSION_VALUE = '1.0'
@@ -38,17 +38,19 @@ BASE_METADATA = {
 }
 
 
-def write(source, entry, destination, logger):
-    source = os.path.expanduser(source)
-    destination = os.path.expanduser(destination)
-    entry_definitions = os.path.join(source, entry)
+def write(service_template_path, destination, logger):
+
+    service_template_path = os.path.abspath(os.path.expanduser(service_template_path))
+    source = os.path.dirname(service_template_path)
+    entry = os.path.basename(service_template_path)
+
     meta_file = os.path.join(source, META_FILE)
     if not os.path.isdir(source):
         raise ValueError('{0} is not a directory. Please specify the service template '
                          'directory.'.format(source))
-    if not os.path.isfile(entry_definitions):
+    if not os.path.isfile(service_template_path):
         raise ValueError('{0} does not exists. Please specify a valid entry point.'
-                         .format(entry_definitions))
+                         .format(service_template_path))
     if os.path.exists(destination):
         raise ValueError('{0} already exists. Please provide a path to where the CSAR should be '
                          'created.'.format(destination))
@@ -175,4 +177,4 @@ def read(source, destination=None, logger=None):
 
 
 def is_csar_archive(source):
-    return source.endswith('.csar')
+    return source.endswith(CSAR_FILE_EXTENSION)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d0411d3d/aria/cli/service_template_utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/service_template_utils.py b/aria/cli/service_template_utils.py
index 382cce1..c953c02 100644
--- a/aria/cli/service_template_utils.py
+++ b/aria/cli/service_template_utils.py
@@ -53,7 +53,7 @@ def get(source, service_template_filename):
             return _get_service_template_file_from_archive(source, service_template_filename)
         else:
             # Maybe check if yaml.
-            return source
+            return os.path.abspath(source)
     elif len(source.split('/')) == 2:
         url = _map_to_github_url(source)
         downloaded_file = utils.download_file(url)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d0411d3d/tests/cli/test_service_templates.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_service_templates.py b/tests/cli/test_service_templates.py
index dd9eedd..22a8fc8 100644
--- a/tests/cli/test_service_templates.py
+++ b/tests/cli/test_service_templates.py
@@ -12,6 +12,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import os
+import zipfile
 
 import pytest
 import mock
@@ -131,6 +133,18 @@ class TestServiceTemplatesStore(TestCliBase):
         assert 'Service template {name} stored'.format(
             name=mock_models.SERVICE_TEMPLATE_NAME) in self.logger_output_string
 
+    def test_store_relative_path_single_yaml_file(self, monkeypatch, mock_object):
+        monkeypatch.setattr(Core, 'create_service_template', mock_object)
+        monkeypatch.setattr(os.path, 'isfile', lambda x: True)
+        monkeypatch.setattr(service_template_utils, '_is_archive', lambda x: False)
+
+        self.invoke('service_templates store service_template.yaml {name}'.format(
+            name=mock_models.SERVICE_TEMPLATE_NAME))
+
+        mock_object.assert_called_with(os.path.join(os.getcwd(), 'service_template.yaml'),
+                                       mock.ANY,
+                                       mock.ANY)
+
     def test_store_raises_exception_resulting_from_name_uniqueness(self, monkeypatch, mock_object):
 
         monkeypatch.setattr(service_template_utils, 'get', mock_object)
@@ -244,3 +258,11 @@ class TestServiceTemplatesCreateArchive(TestCliBase):
         monkeypatch.setattr(csar, 'write', mock_object)
         self.invoke('service_templates create_archive stubpath stubdest')
         assert 'CSAR archive created at stubdest' in self.logger_output_string
+
+    def test_create_archive_from_relative_path(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(os.path, 'isfile', mock_object)
+        monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock)
+
+        self.invoke('service_templates create_archive archive stubdest')
+        mock_object.assert_called_with(os.path.join(os.getcwd(), 'archive'))