You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by ra...@apache.org on 2017/04/15 14:11:31 UTC

[1/9] incubator-ariatosca git commit: ARIA-128 Make test_engine more robust [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-48-aria-cli 5c52065d9 -> aaf66420d (forced update)


ARIA-128 Make test_engine more robust


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/3dadc9f6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/3dadc9f6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/3dadc9f6

Branch: refs/heads/ARIA-48-aria-cli
Commit: 3dadc9f648a37119c8eb196e829abf53f2aa5609
Parents: 3a56f12
Author: Tal Liron <ta...@gmail.com>
Authored: Tue Apr 11 15:26:43 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Tue Apr 11 18:21:43 2017 -0500

----------------------------------------------------------------------
 tests/orchestrator/workflows/core/test_engine.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3dadc9f6/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 6f97952..0b48870 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -237,7 +237,8 @@ class TestCancel(BaseTest):
         t.start()
         time.sleep(10)
         eng.cancel_execution()
-        t.join(timeout=30)
+        t.join(timeout=60) # we need to give this a *lot* of time because Travis can be *very* slow
+        assert not t.is_alive() # if join is timed out it will not raise an exception
         assert workflow_context.states == ['start', 'cancel']
         assert workflow_context.exception is None
         invocations = global_test_holder.get('invocations', [])


[5/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index e3612cf..58506ba 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -460,14 +460,15 @@ if __name__ == '__main__':
              env_var='value',
              inputs=None):
         local_script_path = script_path
-        script_path = os.path.basename(local_script_path) if local_script_path else None
+        script_path = os.path.basename(local_script_path) if local_script_path else ''
+        inputs = inputs or {}
+        process = process or {}
         if script_path:
-            workflow_context.resource.deployment.upload(
+            workflow_context.resource.service.upload(
                 entry_id=str(workflow_context.service.id),
                 source=local_script_path,
                 path=script_path)
 
-        inputs = inputs or {}
         inputs.update({
             'script_path': script_path,
             'process': process,
@@ -481,9 +482,11 @@ if __name__ == '__main__':
                 node.service,
                 'test',
                 'op',
-                operation_kwargs=dict(implementation='{0}.{1}'.format(
-                    operations.__name__,
-                    operations.run_script_locally.__name__))
+                operation_kwargs=dict(
+                    implementation='{0}.{1}'.format(
+                        operations.__name__,
+                        operations.run_script_locally.__name__),
+                    inputs=inputs)
             )
             node.interfaces[interface.name] = interface
             graph.add_tasks(api.task.OperationTask.for_node(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index d86b6d2..dcfd88e 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -217,29 +217,39 @@ class TestWithActualSSHServer(object):
         @workflow
         def mock_workflow(ctx, graph):
             node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+            inputs = {
+                'script_path': script_path,
+                'fabric_env': _FABRIC_ENV,
+                'process': process,
+                'use_sudo': use_sudo,
+                'hide_output': hide_output,
+                'custom_env_var': custom_input,
+                'test_operation': '',
+                'commands': commands
+            }
             interface = mock.models.create_interface(
                 node.service,
                 'test',
                 'op',
-                operation_kwargs=dict(implementation='{0}.{1}'.format(
-                    operations.__name__,
-                    operation.__name__))
+                operation_kwargs=dict(
+                    implementation='{0}.{1}'.format(
+                        operations.__name__,
+                        operation.__name__),
+                    inputs=inputs)
             )
             node.interfaces[interface.name] = interface
-            graph.sequence(*[api.task.OperationTask.for_node(
-                node=node,
-                interface_name='test',
-                operation_name='op',
-                inputs={
-                    'script_path': script_path,
-                    'fabric_env': _FABRIC_ENV,
-                    'process': process,
-                    'use_sudo': use_sudo,
-                    'hide_output': hide_output,
-                    'custom_env_var': custom_input,
-                    'test_operation': test_operation,
-                    'commands': commands
-                }) for test_operation in test_operations])
+
+            ops = []
+            for test_operation in test_operations:
+                op_inputs = inputs.copy()
+                op_inputs['test_operation'] = test_operation
+                ops.append(api.task.OperationTask.for_node(
+                    node=node,
+                    interface_name='test',
+                    operation_name='op',
+                    inputs=op_inputs))
+
+            graph.sequence(*ops)
             return graph
         tasks_graph = mock_workflow(ctx=self._workflow_context)  # pylint: disable=no-value-for-parameter
         eng = engine.Engine(
@@ -258,7 +268,7 @@ class TestWithActualSSHServer(object):
         return collected[signal][0]['kwargs']['exception']
 
     def _upload(self, source, path):
-        self._workflow_context.resource.deployment.upload(
+        self._workflow_context.resource.service.upload(
             entry_id=str(self._workflow_context.service.id),
             source=source,
             path=path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/test_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_runner.py b/tests/orchestrator/test_runner.py
deleted file mode 100644
index 74e98ad..0000000
--- a/tests/orchestrator/test_runner.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria import workflow
-from aria.orchestrator import operation
-from aria.orchestrator.workflows.api.task import OperationTask
-from aria.orchestrator.runner import Runner
-
-from tests import mock
-
-import pytest
-
-
-OPERATION_RESULTS = {}
-
-
-@operation
-def mock_create_operation(ctx, key, value, **kwargs): # pylint: disable=unused-argument
-    OPERATION_RESULTS[key] = value
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
-    OPERATION_RESULTS.clear()
-
-
-def test_runner_no_tasks():
-    @workflow
-    def workflow_fn(ctx, graph): # pylint: disable=unused-argument
-        pass
-
-    _test_runner(workflow_fn)
-
-
-def test_runner_tasks():
-    @workflow
-    def workflow_fn(ctx, graph):
-        for node in ctx.model.node:
-            graph.add_tasks(
-                OperationTask.for_node(node=node,
-                                       interface_name='Standard',
-                                       operation_name='create'))
-
-    _test_runner(workflow_fn)
-
-    assert OPERATION_RESULTS.get('create') is True
-
-
-def _initialize_model_storage_fn(model_storage):
-    mock.topology.create_simple_topology_single_node(
-        model_storage,
-        '{0}.{1}'.format(__name__, mock_create_operation.__name__)
-    )
-
-
-def _test_runner(workflow_fn):
-    runner = Runner(workflow_name='runner workflow',
-                    workflow_fn=workflow_fn,
-                    inputs={},
-                    initialize_model_storage_fn=_initialize_model_storage_fn,
-                    service_id_fn=lambda: 1)
-    runner.run()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/test_workflow_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py
new file mode 100644
index 0000000..54e940f
--- /dev/null
+++ b/tests/orchestrator/test_workflow_runner.py
@@ -0,0 +1,292 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from datetime import datetime
+
+import pytest
+import mock
+
+from aria.modeling import exceptions as modeling_exceptions
+from aria.modeling import models
+from aria.orchestrator import exceptions
+from aria.orchestrator.workflow_runner import WorkflowRunner
+from aria.orchestrator.workflows.executor.process import ProcessExecutor
+
+from ..mock import (
+    topology,
+    workflow as workflow_mocks
+)
+from ..fixtures import (  # pylint: disable=unused-import
+    plugins_dir,
+    plugin_manager,
+    fs_model as model,
+    resource_storage as resource
+)
+
+
+def test_undeclared_workflow(request):
+    # validating a proper error is raised when the workflow is not declared in the service
+    with pytest.raises(exceptions.UndeclaredWorkflowError):
+        _create_workflow_runner(request, 'undeclared_workflow')
+
+
+def test_missing_workflow_implementation(service, request):
+    # validating a proper error is raised when the workflow code path does not exist
+    workflow = models.Operation(
+        name='test_workflow',
+        service=service,
+        implementation='nonexistent.workflow.implementation',
+        inputs={})
+    service.workflows['test_workflow'] = workflow
+
+    with pytest.raises(exceptions.WorkflowImplementationNotFoundError):
+        _create_workflow_runner(request, 'test_workflow')
+
+
+def test_builtin_workflow_instantiation(request):
+    # validates the workflow runner instantiates properly when provided with a builtin workflow
+    # (expecting no errors to be raised on undeclared workflow or missing workflow implementation)
+    workflow_runner = _create_workflow_runner(request, 'install')
+    tasks = list(workflow_runner._tasks_graph.tasks)
+    assert len(tasks) == 2  # expecting two WorkflowTasks
+
+
+def test_custom_workflow_instantiation(request):
+    # validates the workflow runner instantiates properly when provided with a custom workflow
+    # (expecting no errors to be raised on undeclared workflow or missing workflow implementation)
+    mock_workflow = _setup_mock_workflow_in_service(request)
+    workflow_runner = _create_workflow_runner(request, mock_workflow)
+    tasks = list(workflow_runner._tasks_graph.tasks)
+    assert len(tasks) == 0  # mock workflow creates no tasks
+
+
+def test_existing_active_executions(request, service, model):
+    existing_active_execution = models.Execution(
+        service=service,
+        status=models.Execution.STARTED,
+        workflow_name='uninstall')
+    model.execution.put(existing_active_execution)
+    with pytest.raises(exceptions.ActiveExecutionsError):
+        _create_workflow_runner(request, 'install')
+
+
+def test_existing_executions_but_no_active_ones(request, service, model):
+    existing_terminated_execution = models.Execution(
+        service=service,
+        status=models.Execution.TERMINATED,
+        workflow_name='uninstall')
+    model.execution.put(existing_terminated_execution)
+    # no active executions exist, so no error should be raised
+    _create_workflow_runner(request, 'install')
+
+
+def test_default_executor(request):
+    # validates the ProcessExecutor is used by the workflow runner by default
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+        _create_workflow_runner(request, mock_workflow)
+        _, engine_kwargs = mock_engine_cls.call_args
+        assert isinstance(engine_kwargs.get('executor'), ProcessExecutor)
+
+
+def test_custom_executor(request):
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    custom_executor = mock.MagicMock()
+    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+        _create_workflow_runner(request, mock_workflow, executor=custom_executor)
+        _, engine_kwargs = mock_engine_cls.call_args
+        assert engine_kwargs.get('executor') == custom_executor
+
+
+def test_task_configuration_parameters(request):
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    task_max_attempts = 5
+    task_retry_interval = 7
+    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+        _create_workflow_runner(request, mock_workflow, task_max_attempts=task_max_attempts,
+                                task_retry_interval=task_retry_interval)
+        _, engine_kwargs = mock_engine_cls.call_args
+        assert engine_kwargs['workflow_context']._task_max_attempts == task_max_attempts
+        assert engine_kwargs['workflow_context']._task_retry_interval == task_retry_interval
+
+
+def test_execute(request, service):
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    mock_engine = mock.MagicMock()
+    with mock.patch('aria.orchestrator.workflow_runner.Engine', return_value=mock_engine) \
+            as mock_engine_cls:
+        workflow_runner = _create_workflow_runner(request, mock_workflow)
+
+        _, engine_kwargs = mock_engine_cls.call_args
+        assert engine_kwargs['workflow_context'].service.id == service.id
+        assert engine_kwargs['workflow_context'].execution.workflow_name == 'test_workflow'
+
+        workflow_runner.execute()
+        mock_engine.execute.assert_called_once_with()
+
+
+def test_cancel_execution(request):
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    mock_engine = mock.MagicMock()
+    with mock.patch('aria.orchestrator.workflow_runner.Engine', return_value=mock_engine):
+        workflow_runner = _create_workflow_runner(request, mock_workflow)
+        workflow_runner.cancel()
+        mock_engine.cancel_execution.assert_called_once_with()
+
+
+def test_execution_model_creation(request, service, model):
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    with mock.patch('aria.orchestrator.workflow_runner.Engine') as mock_engine_cls:
+        workflow_runner = _create_workflow_runner(request, mock_workflow)
+
+        _, engine_kwargs = mock_engine_cls.call_args
+        assert engine_kwargs['workflow_context'].execution == workflow_runner.execution
+        assert model.execution.get(workflow_runner.execution.id) == workflow_runner.execution
+        assert workflow_runner.execution.service.id == service.id
+        assert workflow_runner.execution.workflow_name == mock_workflow
+        assert workflow_runner.execution.created_at <= datetime.utcnow()
+        assert workflow_runner.execution.inputs == dict()
+
+
+def test_execution_inputs_override_workflow_inputs(request):
+    wf_inputs = {'input1': 'value1', 'input2': 'value2', 'input3': 5}
+    mock_workflow = _setup_mock_workflow_in_service(
+        request,
+        inputs=dict((name, models.Parameter.wrap(name, val)) for name, val
+                    in wf_inputs.iteritems()))
+
+    with mock.patch('aria.orchestrator.workflow_runner.Engine'):
+        workflow_runner = _create_workflow_runner(
+            request, mock_workflow, inputs={'input2': 'overriding-value2', 'input3': 7})
+
+        assert len(workflow_runner.execution.inputs) == 3
+        # did not override input1 - expecting the default value from the workflow inputs
+        assert workflow_runner.execution.inputs['input1'].value == 'value1'
+        # overrode input2
+        assert workflow_runner.execution.inputs['input2'].value == 'overriding-value2'
+        # overrode input of integer type
+        assert workflow_runner.execution.inputs['input3'].value == 7
+
+
+def test_execution_inputs_undeclared_inputs(request):
+    mock_workflow = _setup_mock_workflow_in_service(request)
+
+    with pytest.raises(modeling_exceptions.UndeclaredInputsException):
+        _create_workflow_runner(request, mock_workflow, inputs={'undeclared_input': 'value'})
+
+
+def test_execution_inputs_missing_required_inputs(request):
+    mock_workflow = _setup_mock_workflow_in_service(
+        request, inputs={'required_input': models.Parameter.wrap('required_input', value=None)})
+
+    with pytest.raises(modeling_exceptions.MissingRequiredInputsException):
+        _create_workflow_runner(request, mock_workflow, inputs={})
+
+
+def test_execution_inputs_wrong_type_inputs(request):
+    mock_workflow = _setup_mock_workflow_in_service(
+        request, inputs={'input': models.Parameter.wrap('input', 'value')})
+
+    with pytest.raises(modeling_exceptions.InputsOfWrongTypeException):
+        _create_workflow_runner(request, mock_workflow, inputs={'input': 5})
+
+
+def test_execution_inputs_builtin_workflow_with_inputs(request):
+    # built-in workflows don't have inputs
+    with pytest.raises(modeling_exceptions.UndeclaredInputsException):
+        _create_workflow_runner(request, 'install', inputs={'undeclared_input': 'value'})
+
+
+def test_workflow_function_parameters(request, tmpdir):
+    # validating the workflow function is passed with the
+    # merged execution inputs, in dict form
+
+    # the workflow function parameters will be written to this file
+    output_path = str(tmpdir.join('output'))
+    wf_inputs = {'output_path': output_path, 'input1': 'value1', 'input2': 'value2', 'input3': 5}
+
+    mock_workflow = _setup_mock_workflow_in_service(
+        request, inputs=dict((name, models.Parameter.wrap(name, val)) for name, val
+                             in wf_inputs.iteritems()))
+
+    _create_workflow_runner(request, mock_workflow,
+                            inputs={'input2': 'overriding-value2', 'input3': 7})
+
+    with open(output_path) as f:
+        wf_call_kwargs = json.load(f)
+    assert len(wf_call_kwargs) == 3
+    assert wf_call_kwargs.get('input1') == 'value1'
+    assert wf_call_kwargs.get('input2') == 'overriding-value2'
+    assert wf_call_kwargs.get('input3') == 7
+
+
+@pytest.fixture
+def service(model):
+    # sets up a service in the storage
+    service_id = topology.create_simple_topology_two_nodes(model)
+    service = model.service.get(service_id)
+    return service
+
+
+def _setup_mock_workflow_in_service(request, inputs=None):
+    # sets up a mock workflow as part of the service, including uploading
+    # the workflow code to the service's dir on the resource storage
+    service = request.getfuncargvalue('service')
+    resource = request.getfuncargvalue('resource')
+
+    source = workflow_mocks.__file__
+    resource.service_template.upload(str(service.service_template.id), source)
+    mock_workflow_name = 'test_workflow'
+    workflow = models.Operation(
+        name=mock_workflow_name,
+        service=service,
+        implementation='workflow.mock_workflow',
+        inputs=inputs or {})
+    service.workflows[mock_workflow_name] = workflow
+    return mock_workflow_name
+
+
+def _create_workflow_runner(request, workflow_name, inputs=None, executor=None,
+                            task_max_attempts=None, task_retry_interval=None):
+    # helper method for instantiating a workflow runner
+    service_id = request.getfuncargvalue('service').id
+    model = request.getfuncargvalue('model')
+    resource = request.getfuncargvalue('resource')
+    plugin_manager = request.getfuncargvalue('plugin_manager')
+
+    # task configuration parameters can't be set to None, therefore only
+    # passing those if they've been set by the test
+    task_configuration_kwargs = dict()
+    if task_max_attempts is not None:
+        task_configuration_kwargs['task_max_attempts'] = task_max_attempts
+    if task_retry_interval is not None:
+        task_configuration_kwargs['task_retry_interval'] = task_retry_interval
+
+    return WorkflowRunner(
+        workflow_name=workflow_name,
+        service_id=service_id,
+        inputs=inputs or {},
+        executor=executor,
+        model_storage=model,
+        resource_storage=resource,
+        plugin_manager=plugin_manager,
+        **task_configuration_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index a705199..ab62361 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -44,17 +44,19 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.node.update(plugin)
 
+        inputs = {'test_input': True}
+
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path'))
+                                  implementation='op_path',
+                                  inputs=inputs),)
 
         node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
         node.interfaces[interface_name] = interface
         ctx.model.node.update(node)
-        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
         ignore_failure = True
@@ -90,17 +92,19 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.plugin.update(plugin)
 
+        inputs = {'test_input': True}
+
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path')
+                                  implementation='op_path',
+                                  inputs=inputs)
         )
 
         relationship = ctx.model.relationship.list()[0]
         relationship.interfaces[interface.name] = interface
-        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
 
@@ -133,17 +137,19 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.node.update(plugin)
 
+        inputs = {'test_input': True}
+
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path')
+                                  implementation='op_path',
+                                  inputs=inputs)
         )
 
         relationship = ctx.model.relationship.list()[0]
         relationship.interfaces[interface.name] = interface
-        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 0b48870..1a88f13 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -61,12 +61,18 @@ class BaseTest(object):
             retry_interval=None,
             ignore_failure=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+        operation_kwargs = dict(implementation='{name}.{func.__name__}'.format(
+            name=__name__, func=func))
+        if inputs:
+            # the operation has to declare the inputs before those may be passed
+            operation_kwargs['inputs'] = inputs
+
         interface = mock.models.create_interface(
             node.service,
             'aria.interfaces.lifecycle',
             'create',
-            operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
-                                                                                 func=func))
+            operation_kwargs=operation_kwargs
         )
         node.interfaces[interface.name] = interface
         return api.task.OperationTask.for_node(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
deleted file mode 100644
index 514bce9..0000000
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from networkx import topological_sort, DiGraph
-
-from aria.orchestrator import context
-from aria.orchestrator.workflows import api, core
-
-from tests import mock
-from tests import storage
-
-
-def test_task_graph_into_execution_graph(tmpdir):
-    interface_name = 'Standard'
-    operation_name = 'create'
-    task_context = mock.context.simple(str(tmpdir))
-    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    interface = mock.models.create_interface(
-        node.service,
-        interface_name,
-        operation_name,
-        operation_kwargs={'implementation': 'test'}
-    )
-    node.interfaces[interface.name] = interface
-    task_context.model.node.update(node)
-
-    def sub_workflow(name, **_):
-        return api.task_graph.TaskGraph(name)
-
-    with context.workflow.current.push(task_context):
-        test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
-        simple_before_task = api.task.OperationTask.for_node(node=node,
-                                                             interface_name=interface_name,
-                                                             operation_name=operation_name)
-        simple_after_task = api.task.OperationTask.for_node(node=node,
-                                                            interface_name=interface_name,
-                                                            operation_name=operation_name)
-
-        inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
-        inner_task = api.task.OperationTask.for_node(node=node,
-                                                     interface_name=interface_name,
-                                                     operation_name=operation_name)
-        inner_task_graph.add_tasks(inner_task)
-
-    test_task_graph.add_tasks(simple_before_task)
-    test_task_graph.add_tasks(simple_after_task)
-    test_task_graph.add_tasks(inner_task_graph)
-    test_task_graph.add_dependency(inner_task_graph, simple_before_task)
-    test_task_graph.add_dependency(simple_after_task, inner_task_graph)
-
-    # Direct check
-    execution_graph = DiGraph()
-    core.translation.build_execution_graph(task_graph=test_task_graph,
-                                           execution_graph=execution_graph)
-    execution_tasks = topological_sort(execution_graph)
-
-    assert len(execution_tasks) == 7
-
-    expected_tasks_names = [
-        '{0}-Start'.format(test_task_graph.id),
-        simple_before_task.id,
-        '{0}-Start'.format(inner_task_graph.id),
-        inner_task.id,
-        '{0}-End'.format(inner_task_graph.id),
-        simple_after_task.id,
-        '{0}-End'.format(test_task_graph.id)
-    ]
-
-    assert expected_tasks_names == execution_tasks
-
-    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
-                      core.task.StartWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
-                                  simple_before_task)
-    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
-                      core.task.StartSubWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
-                                  inner_task)
-    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
-                      core.task.EndSubWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
-                                  simple_after_task)
-    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
-                      core.task.EndWorkflowTask)
-    storage.release_sqlite_storage(task_context.model)
-
-
-def _assert_execution_is_api_task(execution_task, api_task):
-    assert execution_task.id == api_task.id
-    assert execution_task.name == api_task.name
-    assert execution_task.implementation == api_task.implementation
-    assert execution_task.actor == api_task.actor
-    assert execution_task.inputs == api_task.inputs
-
-
-def _get_task_by_name(task_name, graph):
-    return graph.node[task_name]['task']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
new file mode 100644
index 0000000..514bce9
--- /dev/null
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -0,0 +1,112 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from networkx import topological_sort, DiGraph
+
+from aria.orchestrator import context
+from aria.orchestrator.workflows import api, core
+
+from tests import mock
+from tests import storage
+
+
+def test_task_graph_into_execution_graph(tmpdir):
+    interface_name = 'Standard'
+    operation_name = 'create'
+    task_context = mock.context.simple(str(tmpdir))
+    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
+        operation_name,
+        operation_kwargs={'implementation': 'test'}
+    )
+    node.interfaces[interface.name] = interface
+    task_context.model.node.update(node)
+
+    def sub_workflow(name, **_):
+        return api.task_graph.TaskGraph(name)
+
+    with context.workflow.current.push(task_context):
+        test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
+        simple_before_task = api.task.OperationTask.for_node(node=node,
+                                                             interface_name=interface_name,
+                                                             operation_name=operation_name)
+        simple_after_task = api.task.OperationTask.for_node(node=node,
+                                                            interface_name=interface_name,
+                                                            operation_name=operation_name)
+
+        inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
+        inner_task = api.task.OperationTask.for_node(node=node,
+                                                     interface_name=interface_name,
+                                                     operation_name=operation_name)
+        inner_task_graph.add_tasks(inner_task)
+
+    test_task_graph.add_tasks(simple_before_task)
+    test_task_graph.add_tasks(simple_after_task)
+    test_task_graph.add_tasks(inner_task_graph)
+    test_task_graph.add_dependency(inner_task_graph, simple_before_task)
+    test_task_graph.add_dependency(simple_after_task, inner_task_graph)
+
+    # Direct check
+    execution_graph = DiGraph()
+    core.translation.build_execution_graph(task_graph=test_task_graph,
+                                           execution_graph=execution_graph)
+    execution_tasks = topological_sort(execution_graph)
+
+    assert len(execution_tasks) == 7
+
+    expected_tasks_names = [
+        '{0}-Start'.format(test_task_graph.id),
+        simple_before_task.id,
+        '{0}-Start'.format(inner_task_graph.id),
+        inner_task.id,
+        '{0}-End'.format(inner_task_graph.id),
+        simple_after_task.id,
+        '{0}-End'.format(test_task_graph.id)
+    ]
+
+    assert expected_tasks_names == execution_tasks
+
+    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
+                      core.task.StartWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
+                                  simple_before_task)
+    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
+                      core.task.StartSubWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
+                                  inner_task)
+    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
+                      core.task.EndSubWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
+                                  simple_after_task)
+    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
+                      core.task.EndWorkflowTask)
+    storage.release_sqlite_storage(task_context.model)
+
+
+def _assert_execution_is_api_task(execution_task, api_task):
+    assert execution_task.id == api_task.id
+    assert execution_task.name == api_task.name
+    assert execution_task.implementation == api_task.implementation
+    assert execution_task.actor == api_task.actor
+    assert execution_task.inputs == api_task.inputs
+
+
+def _get_task_by_name(task_name, graph):
+    return graph.node[task_name]['task']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 502c9fd..839b9f1 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -21,19 +21,18 @@ from contextlib import contextmanager
 
 import pytest
 
-from aria import application_model_storage
 from aria.modeling import models as aria_models
-from aria.storage import sql_mapi
-from aria.orchestrator import (
-    events,
-    plugin
-)
+from aria.orchestrator import events
 from aria.utils.plugin import create as create_plugin
 from aria.orchestrator.workflows.executor import process
 
-
 import tests.storage
 import tests.resources
+from tests.fixtures import (  # pylint: disable=unused-import
+    plugins_dir,
+    plugin_manager,
+    fs_model as model
+)
 
 
 class TestProcessExecutor(object):
@@ -75,27 +74,6 @@ class TestProcessExecutor(object):
 
 
 @pytest.fixture
-def model(tmpdir):
-    result = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                       initiator_kwargs=dict(base_dir=str(tmpdir)),
-                                       initiator=sql_mapi.init_storage)
-    yield result
-    tests.storage.release_sqlite_storage(result)
-
-
-@pytest.fixture
-def plugins_dir(tmpdir):
-    result = tmpdir.join('plugins')
-    result.mkdir()
-    return str(result)
-
-
-@pytest.fixture
-def plugin_manager(model, plugins_dir):
-    return plugin.PluginManager(model=model, plugins_dir=plugins_dir)
-
-
-@pytest.fixture
 def executor(plugin_manager):
     result = process.ProcessExecutor(plugin_manager=plugin_manager)
     yield result

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 6d0eb5b..88e7ae0 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -90,7 +90,8 @@ def _test(context, executor, lock_files, func, expected_failure):
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__))
+        operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__),
+                              inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     context.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 0988fae..7ae337d 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -42,7 +42,8 @@ def test_decorate_extension(context, executor):
             interface_name,
             operation_name,
             operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
-                                                                  _mock_operation.__name__))
+                                                                  _mock_operation.__name__),
+                                  inputs=inputs)
         )
         node.interfaces[interface.name] = interface
         task = api.task.OperationTask.for_node(node=node,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index 5512189..3a8c54b 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -90,17 +90,19 @@ def _run_workflow(context, executor, op_func, inputs=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
         interface_name = 'test_interface'
         operation_name = 'operation'
+        wf_inputs = inputs or {}
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(implementation=_operation_mapping(op_func))
+            operation_kwargs=dict(implementation=_operation_mapping(op_func),
+                                  inputs=wf_inputs)
         )
         node.interfaces[interface.name] = interface
         task = api.task.OperationTask.for_node(node=node,
                                                interface_name=interface_name,
                                                operation_name=operation_name,
-                                               inputs=inputs or {})
+                                               inputs=wf_inputs)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/parser/service_templates.py
----------------------------------------------------------------------
diff --git a/tests/parser/service_templates.py b/tests/parser/service_templates.py
index a07fba8..a8fde14 100644
--- a/tests/parser/service_templates.py
+++ b/tests/parser/service_templates.py
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
+
 from aria.utils.caching import cachedmethod
 
 from .utils import (get_example_uri, get_test_uri, create_context, create_consumer)
@@ -23,7 +25,9 @@ def consume_use_case(use_case_name, consumer_class_name='instance', cache=True):
     uri = get_example_uri('tosca-simple-1.0', 'use-cases', use_case_name,
                           '{0}.yaml'.format(use_case_name))
     context = create_context(uri)
-    #context.args.append('--inputs=' + get_example_uri('node-cellar', 'inputs.yaml'))
+    inputs_file = get_example_uri('tosca-simple-1.0', 'use-cases', use_case_name, 'inputs.yaml')
+    if os.path.isfile(inputs_file):
+        context.args.append('--inputs={0}'.format(inputs_file))
     consumer, dumper = create_consumer(context, consumer_class_name)
     consumer.consume()
     context.validation.dump_issues()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/parser/test_tosca_simple_v1_0.py
----------------------------------------------------------------------
diff --git a/tests/parser/test_tosca_simple_v1_0.py b/tests/parser/test_tosca_simple_v1_0.py
new file mode 100644
index 0000000..a583db5
--- /dev/null
+++ b/tests/parser/test_tosca_simple_v1_0.py
@@ -0,0 +1,112 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .service_templates import (consume_use_case, consume_node_cellar)
+
+
+# Use Cases
+
+def test_use_case_compute_1():
+    consume_use_case('compute-1', 'instance')
+
+
+def test_use_case_software_component_1():
+    consume_use_case('software-component-1', 'instance')
+
+
+def test_use_case_block_storage_1():
+    consume_use_case('block-storage-1', 'instance')
+
+
+def test_use_case_block_storage_2():
+    consume_use_case('block-storage-2', 'instance')
+
+
+def test_use_case_block_storage_3():
+    consume_use_case('block-storage-3', 'instance')
+
+
+def test_use_case_block_storage_4():
+    consume_use_case('block-storage-4', 'instance')
+
+
+def test_use_case_block_storage_5():
+    consume_use_case('block-storage-5', 'instance')
+
+
+def test_use_case_block_storage_6():
+    consume_use_case('block-storage-6', 'instance')
+
+
+def test_use_case_object_storage_1():
+    consume_use_case('object-storage-1', 'instance')
+
+
+def test_use_case_network_1():
+    consume_use_case('network-1', 'instance')
+
+
+def test_use_case_network_2():
+    consume_use_case('network-2', 'instance')
+
+
+def test_use_case_network_3():
+    consume_use_case('network-3', 'instance')
+
+
+def test_use_case_network_4():
+    consume_use_case('network-4', 'instance')
+
+
+def test_use_case_webserver_dbms_1():
+    consume_use_case('webserver-dbms-1', 'template')
+
+
+def test_use_case_webserver_dbms_2():
+    consume_use_case('webserver-dbms-2', 'instance')
+
+
+def test_use_case_multi_tier_1():
+    consume_use_case('multi-tier-1', 'instance')
+
+
+def test_use_case_container_1():
+    consume_use_case('container-1', 'template')
+
+
+# NodeCellar
+
+def test_node_cellar_validation():
+    consume_node_cellar('validate')
+
+
+def test_node_cellar_validation_no_cache():
+    consume_node_cellar('validate', False)
+
+
+def test_node_cellar_presentation():
+    consume_node_cellar('presentation')
+
+
+def test_node_cellar_model():
+    consume_node_cellar('template')
+
+
+def test_node_cellar_types():
+    consume_node_cellar('types')
+
+
+def test_node_cellar_instance():
+    consume_node_cellar('instance')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/utils/test_plugin.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_plugin.py b/tests/utils/test_plugin.py
index 09885ef..3350247 100644
--- a/tests/utils/test_plugin.py
+++ b/tests/utils/test_plugin.py
@@ -17,13 +17,14 @@ import os
 
 import pytest
 
-from aria import application_model_storage
 from aria.orchestrator import exceptions
-from aria.orchestrator import plugin
 from aria.utils.plugin import create as create_plugin
-from aria.storage import sql_mapi
 
-from .. import storage
+from ..fixtures import (  # pylint: disable=unused-import
+    plugins_dir,
+    plugin_manager,
+    inmemory_model as model
+)
 
 
 PACKAGE_NAME = 'mock-plugin'
@@ -48,26 +49,6 @@ class TestPluginManager(object):
 
 
 @pytest.fixture
-def model():
-    model = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                      initiator=storage.init_inmemory_model_storage)
-    yield model
-    storage.release_sqlite_storage(model)
-
-
-@pytest.fixture
-def plugins_dir(tmpdir):
-    result = tmpdir.join('plugins')
-    result.mkdir()
-    return str(result)
-
-
-@pytest.fixture
-def plugin_manager(model, plugins_dir):
-    return plugin.PluginManager(model=model, plugins_dir=plugins_dir)
-
-
-@pytest.fixture
 def mock_plugin(tmpdir):
     source_dir = tmpdir.join('mock_plugin')
     source_dir.mkdir()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/utils/test_threading.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_threading.py b/tests/utils/test_threading.py
new file mode 100644
index 0000000..d24661f
--- /dev/null
+++ b/tests/utils/test_threading.py
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import pytest
+
+from aria.utils import threading
+
+
+def test_exception_raised_from_thread():
+
+    def error_raising_func():
+        raise ValueError('This is an error')
+
+    thread = threading.ExceptionThread(target=error_raising_func)
+    thread.start()
+    thread.join()
+
+    assert thread.is_error()
+    with pytest.raises(ValueError):
+        thread.raise_error_if_exists()


[4/9] incubator-ariatosca git commit: ARIA-92 Automatic operation task configuration

Posted by ra...@apache.org.
ARIA-92 Automatic operation task configuration


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/a7e7826e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/a7e7826e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/a7e7826e

Branch: refs/heads/ARIA-48-aria-cli
Commit: a7e7826ed2d8b940b9e74ca15cb0284c39b01001
Parents: 8e1d059
Author: Tal Liron <ta...@gmail.com>
Authored: Fri Mar 24 16:33:11 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Fri Apr 14 13:54:21 2017 -0500

----------------------------------------------------------------------
 aria/cli/dry.py                                 |   9 +-
 aria/modeling/models.py                         |  11 +-
 aria/modeling/orchestration.py                  |  92 +++++----
 aria/modeling/service_common.py                 |  59 +-----
 aria/modeling/service_instance.py               | 199 +++++++++++++------
 aria/modeling/service_template.py               | 133 ++++++++++++-
 aria/orchestrator/execution_plugin/__init__.py  |   2 +
 aria/orchestrator/execution_plugin/common.py    |   2 +-
 .../execution_plugin/instantiation.py           | 191 ++++++++++++++++++
 .../execution_plugin/ssh/operations.py          |   4 +-
 aria/orchestrator/workflows/api/task.py         | 131 +++++-------
 aria/orchestrator/workflows/api/task_graph.py   |   2 +-
 aria/orchestrator/workflows/builtin/utils.py    |   6 +-
 aria/orchestrator/workflows/core/engine.py      |   4 +-
 .../workflows/core/events_handler.py            |   8 +-
 aria/orchestrator/workflows/core/task.py        |   9 +-
 aria/orchestrator/workflows/exceptions.py       |  10 +-
 aria/orchestrator/workflows/executor/process.py |   2 +-
 aria/parser/consumption/modeling.py             |  22 +-
 aria/storage/instrumentation.py                 |   4 +-
 .../custom_types/elasticsearch.yaml             |   2 +
 .../multi-tier-1/custom_types/kibana.yaml       |   2 +
 .../multi-tier-1/custom_types/logstash.yaml     |   2 +
 .../paypalpizzastore_nodejs_app.yaml            |   2 +-
 .../webserver-dbms-2/webserver-dbms-2.yaml      |   6 +-
 .../profiles/aria-1.0/aria-1.0.yaml             |  10 +
 .../profiles/tosca-simple-1.0/capabilities.yaml |   2 +
 .../profiles/tosca-simple-1.0/interfaces.yaml   |  16 ++
 .../profiles/tosca-simple-1.0/nodes.yaml        |   1 +
 .../simple_v1_0/assignments.py                  |  49 +++--
 .../simple_v1_0/modeling/__init__.py            | 104 +++++++---
 .../simple_v1_0/modeling/capabilities.py        |   5 +
 tests/modeling/test_models.py                   |  65 +++---
 tests/orchestrator/context/test_operation.py    |   3 +-
 tests/orchestrator/context/test_serialize.py    |   3 +-
 tests/orchestrator/execution_plugin/test_ssh.py |  17 +-
 tests/orchestrator/workflows/api/test_task.py   |  24 +--
 .../orchestrator/workflows/builtin/__init__.py  |   3 -
 .../workflows/builtin/test_execute_operation.py |   3 +-
 tests/orchestrator/workflows/core/test_task.py  |  29 +--
 .../test_task_graph_into_exececution_graph.py   |   3 +-
 .../workflows/executor/test_executor.py         |   1 +
 .../workflows/executor/test_process_executor.py |   1 +
 .../node-cellar/node-cellar.yaml                |  36 +++-
 .../node-cellar/types/nginx.yaml                |  15 +-
 45 files changed, 886 insertions(+), 418 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/cli/dry.py
----------------------------------------------------------------------
diff --git a/aria/cli/dry.py b/aria/cli/dry.py
index 098638f..fc6c0c5 100644
--- a/aria/cli/dry.py
+++ b/aria/cli/dry.py
@@ -43,14 +43,19 @@ def convert_to_dry(service):
                 for oper in interface.operations.itervalues():
                     convert_operation_to_dry(oper)
 
+    for group in service.groups.itervalues():
+        for interface in group.interfaces.itervalues():
+            for oper in interface.operations.itervalues():
+                convert_operation_to_dry(oper)
+
 
 def convert_operation_to_dry(oper):
     """
     Converts a single :class:`Operation` to run dryly.
     """
 
-    plugin = oper.plugin_specification.name \
-        if oper.plugin_specification is not None else None
+    plugin = oper.plugin.name \
+        if oper.plugin is not None else None
     if oper.inputs is None:
         oper.inputs = OrderedDict()
     oper.inputs['_implementation'] = models.Parameter(name='_implementation',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/modeling/models.py
----------------------------------------------------------------------
diff --git a/aria/modeling/models.py b/aria/modeling/models.py
index a01783b..170efb2 100644
--- a/aria/modeling/models.py
+++ b/aria/modeling/models.py
@@ -48,6 +48,7 @@ __all__ = (
     'InterfaceTemplate',
     'OperationTemplate',
     'ArtifactTemplate',
+    'PluginSpecification',
 
     # Service instance models
     'Service',
@@ -71,7 +72,6 @@ __all__ = (
     'Parameter',
     'Type',
     'Metadata',
-    'PluginSpecification',
 
     # Orchestration models
     'Execution',
@@ -131,6 +131,9 @@ class OperationTemplate(aria_declarative_base, service_template.OperationTemplat
 class ArtifactTemplate(aria_declarative_base, service_template.ArtifactTemplateBase):
     pass
 
+class PluginSpecification(aria_declarative_base, service_template.PluginSpecificationBase):
+    pass
+
 # endregion
 
 
@@ -211,10 +214,6 @@ class Type(aria_declarative_base, service_common.TypeBase):
 class Metadata(aria_declarative_base, service_common.MetadataBase):
     pass
 
-
-class PluginSpecification(aria_declarative_base, service_common.PluginSpecificationBase):
-    pass
-
 # endregion
 
 
@@ -253,6 +252,7 @@ models_to_register = [
     InterfaceTemplate,
     OperationTemplate,
     ArtifactTemplate,
+    PluginSpecification,
 
     # Service instance models
     Service,
@@ -276,7 +276,6 @@ models_to_register = [
     Parameter,
     Type,
     Metadata,
-    PluginSpecification,
 
     # Orchestration models
     Execution,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index f0bd4b2..b32a8a1 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -67,13 +67,13 @@ class ExecutionBase(ModelMixin):
     CANCELLING = 'cancelling'
     FORCE_CANCELLING = 'force_cancelling'
 
-    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, FORCE_CANCELLING]
-    END_STATES = [TERMINATED, FAILED, CANCELLED]
+    STATES = (TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, FORCE_CANCELLING)
+    END_STATES = (TERMINATED, FAILED, CANCELLED)
 
     VALID_TRANSITIONS = {
-        PENDING: [STARTED, CANCELLED],
-        STARTED: END_STATES + [CANCELLING],
-        CANCELLING: END_STATES + [FORCE_CANCELLING]
+        PENDING: (STARTED, CANCELLED),
+        STARTED: END_STATES + (CANCELLING,),
+        CANCELLING: END_STATES + (FORCE_CANCELLING,)
     }
 
     @orm.validates('status')
@@ -219,7 +219,44 @@ class PluginBase(ModelMixin):
 
 class TaskBase(ModelMixin):
     """
-    A Model which represents an task
+    Represents the smallest unit of stateful execution in ARIA. The task state includes inputs,
+    outputs, as well as an atomic status, ensuring that the task can only be running once at any
+    given time.
+
+    Tasks may be "one shot" or may be configured to run repeatedly in the case of failure.
+
+    Tasks are often based on :class:`Operation`, and thus act on either a :class:`Node` or a
+    :class:`Relationship`, however this is not required.
+
+    :ivar node: The node actor (optional)
+    :vartype node: :class:`Node`
+    :ivar relationship: The relationship actor (optional)
+    :vartype relationship: :class:`Relationship`
+    :ivar plugin: The implementing plugin (set to None for default execution plugin)
+    :vartype plugin: :class:`Plugin`
+    :ivar inputs: Parameters that can be used by this task
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar implementation: Python path to an ``@operation`` function
+    :vartype implementation: basestring
+    :ivar max_attempts: Maximum number of retries allowed in case of failure
+    :vartype max_attempts: int
+    :ivar retry_interval: Interval between retries (in seconds)
+    :vartype retry_interval: int
+    :ivar ignore_failure: Set to True to ignore failures
+    :vartype ignore_failure: bool
+    :ivar due_at: Timestamp to start the task
+    :vartype due_at: datetime
+    :ivar execution: Assigned execution
+    :vartype execution: :class:`Execution`
+    :ivar status: Current atomic status ('pending', 'retrying', 'sent', 'started', 'success',
+                  'failed')
+    :vartype status: basestring
+    :ivar started_at: Timestamp for when task started
+    :vartype started_at: datetime
+    :ivar ended_at: Timestamp for when task ended
+    :vartype ended_at: datetime
+    :ivar retry_count: How many retries occurred
+    :vartype retry_count: int
     """
 
     __tablename__ = 'task'
@@ -227,7 +264,7 @@ class TaskBase(ModelMixin):
     __private_fields__ = ['node_fk',
                           'relationship_fk',
                           'plugin_fk',
-                          'execution_fk',
+                          'execution_fk'
                           'node_name',
                           'relationship_name',
                           'execution_name']
@@ -247,11 +284,6 @@ class TaskBase(ModelMixin):
         FAILED,
     )
 
-    RUNS_ON_SOURCE = 'source'
-    RUNS_ON_TARGET = 'target'
-    RUNS_ON_NODE = 'node'
-    RUNS_ON = (RUNS_ON_NODE, RUNS_ON_SOURCE, RUNS_ON_TARGET)
-
     INFINITE_RETRIES = -1
 
     @declared_attr
@@ -278,37 +310,25 @@ class TaskBase(ModelMixin):
     def inputs(cls):
         return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
 
-    status = Column(Enum(*STATES, name='status'), default=PENDING)
+    implementation = Column(String)
+    max_attempts = Column(Integer, default=1)
+    retry_interval = Column(Float, default=0)
+    ignore_failure = Column(Boolean, default=False)
 
+    # State
+    status = Column(Enum(*STATES, name='status'), default=PENDING)
     due_at = Column(DateTime, nullable=False, index=True, default=datetime.utcnow())
     started_at = Column(DateTime, default=None)
     ended_at = Column(DateTime, default=None)
-    max_attempts = Column(Integer, default=1)
     retry_count = Column(Integer, default=0)
-    retry_interval = Column(Float, default=0)
-    ignore_failure = Column(Boolean, default=False)
-
-    # Operation specific fields
-    implementation = Column(String)
-    _runs_on = Column(Enum(*RUNS_ON, name='runs_on'), name='runs_on')
 
     @property
     def has_ended(self):
-        return self.status in [self.SUCCESS, self.FAILED]
+        return self.status in (self.SUCCESS, self.FAILED)
 
     @property
     def is_waiting(self):
-        return self.status in [self.PENDING, self.RETRYING]
-
-    @property
-    def runs_on(self):
-        if self._runs_on == self.RUNS_ON_NODE:
-            return self.node
-        elif self._runs_on == self.RUNS_ON_SOURCE:
-            return self.relationship.source_node  # pylint: disable=no-member
-        elif self._runs_on == self.RUNS_ON_TARGET:
-            return self.relationship.target_node  # pylint: disable=no-member
-        return None
+        return self.status in (self.PENDING, self.RETRYING)
 
     @property
     def actor(self):
@@ -366,12 +386,12 @@ class TaskBase(ModelMixin):
     # endregion
 
     @classmethod
-    def for_node(cls, instance, runs_on, **kwargs):
-        return cls(node=instance, _runs_on=runs_on, **kwargs)
+    def for_node(cls, actor, **kwargs):
+        return cls(node=actor, **kwargs)
 
     @classmethod
-    def for_relationship(cls, instance, runs_on, **kwargs):
-        return cls(relationship=instance, _runs_on=runs_on, **kwargs)
+    def for_relationship(cls, actor, **kwargs):
+        return cls(relationship=actor, **kwargs)
 
     @staticmethod
     def abort(message=None):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index 48c3170..1fcbc5f 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -101,7 +101,8 @@ class ParameterBase(TemplateModelMixin):
 
         from . import models
         return models.Parameter(name=name,
-                                type_name=formatting.full_type_name(value),
+                                type_name=formatting.full_type_name(value)
+                                if value is not None else None,
                                 value=value,
                                 description=description)
 
@@ -248,59 +249,3 @@ class MetadataBase(TemplateModelMixin):
         console.puts('{0}: {1}'.format(
             context.style.property(self.name),
             context.style.literal(self.value)))
-
-
-class PluginSpecificationBase(TemplateModelMixin):
-    """
-    Plugin specification.
-
-    :ivar name: Required plugin name
-    :vartype name: basestring
-    :ivar version: Minimum plugin version
-    :vartype version: basestring
-    """
-
-    __tablename__ = 'plugin_specification'
-
-    __private_fields__ = ['service_template_fk']
-
-    version = Column(Text, nullable=True)
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to PluginSpecification"""
-        return relationship.foreign_key('service_template', nullable=True)
-
-    # endregion
-
-    @declared_attr
-    def service_template(cls):
-        return relationship.many_to_one(cls, 'service_template')
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('version', self.version)))
-
-    def coerce_values(self, container, report_issues):
-        pass
-
-    def instantiate(self, container):
-        from . import models
-        return models.PluginSpecification(name=self.name,
-                                          version=self.version)
-
-    def find_plugin(self, plugins):
-        matching_plugins = []
-        for plugin in plugins:
-            # TODO: we need to use a version comparator
-            if (plugin.name == self.name) and \
-                ((self.version is None) or (plugin.package_version >= self.version)):
-                matching_plugins.append(plugin)
-        if matching_plugins:
-            # Return highest version of plugin
-            return sorted(matching_plugins, key=lambda plugin: plugin.package_version)[-1]
-        return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index d15aa7e..40d43fa 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -20,12 +20,14 @@ from sqlalchemy import (
     Text,
     Integer,
     Enum,
+    Boolean
 )
 from sqlalchemy import DateTime
 from sqlalchemy.ext.associationproxy import association_proxy
 from sqlalchemy.ext.declarative import declared_attr
 
 from .mixins import InstanceModelMixin
+from ..orchestrator import execution_plugin
 from ..parser import validation
 from ..parser.consumption import ConsumptionContext
 from ..utils import collections, formatting, console
@@ -65,8 +67,8 @@ class ServiceBase(InstanceModelMixin):
     :vartype outputs: {basestring: :class:`Parameter`}
     :ivar workflows: Custom workflows that can be performed on the service
     :vartype workflows: {basestring: :class:`Operation`}
-    :ivar plugin_specifications: Plugins used by the service
-    :vartype plugin_specifications: {basestring: :class:`PluginSpecification`}
+    :ivar plugins: Plugins used by the service
+    :vartype plugins: {basestring: :class:`Plugin`}
     :ivar created_at: Creation timestamp
     :vartype created_at: :class:`datetime.datetime`
     :ivar updated_at: Update timestamp
@@ -176,13 +178,12 @@ class ServiceBase(InstanceModelMixin):
         return relationship.many_to_many(cls, 'parameter', prefix='outputs', dict_key='name')
 
     @declared_attr
-    def plugin_specifications(cls):
-        return relationship.many_to_many(cls, 'plugin_specification', dict_key='name')
+    def plugins(cls):
+        return relationship.many_to_many(cls, 'plugin', dict_key='name')
 
     # endregion
 
     description = Column(Text)
-
     created_at = Column(DateTime, nullable=False, index=True)
     updated_at = Column(DateTime)
 
@@ -207,6 +208,18 @@ class ServiceBase(InstanceModelMixin):
                 satisfied = False
         return satisfied
 
+    def find_hosts(self):
+        for node in self.nodes.itervalues():
+            node.find_host()
+
+    def configure_operations(self):
+        for node in self.nodes.itervalues():
+            node.configure_operations()
+        for group in self.groups.itervalues():
+            group.configure_operations()
+        for operation in self.workflows.itervalues():
+            operation.configure()
+
     def is_node_a_target(self, target_node):
         for node in self.nodes.itervalues():
             if self._is_node_a_target(node, target_node):
@@ -215,11 +228,11 @@ class ServiceBase(InstanceModelMixin):
 
     def _is_node_a_target(self, source_node, target_node):
         if source_node.outbound_relationships:
-            for the_relationship in source_node.outbound_relationships:
-                if the_relationship.target_node.name == target_node.name:
+            for relationship_model in source_node.outbound_relationships:
+                if relationship_model.target_node.name == target_node.name:
                     return True
                 else:
-                    node = the_relationship.target_node
+                    node = relationship_model.target_node
                     if node is not None:
                         if self._is_node_a_target(node, target_node):
                             return True
@@ -282,38 +295,25 @@ class ServiceBase(InstanceModelMixin):
             if not self.is_node_a_target(node):
                 self._dump_graph_node(node)
 
-    def _dump_graph_node(self, node):
+    def _dump_graph_node(self, node, capability=None):
         context = ConsumptionContext.get_thread_local()
         console.puts(context.style.node(node.name))
+        if capability is not None:
+            console.puts('{0} ({1})'.format(context.style.property(capability.name),
+                                            context.style.type(capability.type.name)))
         if node.outbound_relationships:
             with context.style.indent:
-                for the_relationship in node.outbound_relationships:
-                    relationship_name = context.style.property(the_relationship.name)
-                    if the_relationship.type is not None:
-                        relationship_type = context.style.type(the_relationship.type.name)
+                for relationship_model in node.outbound_relationships:
+                    relationship_name = context.style.property(relationship_model.name)
+                    if relationship_model.type is not None:
+                        console.puts('-> {0} ({1})'.format(relationship_name,
+                                                           context.style.type(
+                                                               relationship_model.type.name)))
                     else:
-                        relationship_type = None
-                    if the_relationship.target_capability is not None:
-                        capability_name = \
-                            context.style.node(the_relationship.target_capability.name)
-                    else:
-                        capability_name = None
-                    if capability_name is not None:
-                        if relationship_type is not None:
-                            console.puts('-> {0} ({1}) {2}'.format(relationship_name,
-                                                                   relationship_type,
-                                                                   capability_name))
-                        else:
-                            console.puts('-> {0} {1}'.format(relationship_name, capability_name))
-                    else:
-                        if relationship_type is not None:
-                            console.puts('-> {0} ({1})'.format(relationship_name,
-                                                               relationship_type))
-                        else:
-                            console.puts('-> {0}'.format(relationship_name))
-                    target_node = the_relationship.target_node
+                        console.puts('-> {0}'.format(relationship_name))
                     with console.indent(3):
-                        self._dump_graph_node(target_node)
+                        self._dump_graph_node(relationship_model.target_node,
+                                              relationship_model.target_capability)
 
 
 class NodeBase(InstanceModelMixin):
@@ -360,8 +360,10 @@ class NodeBase(InstanceModelMixin):
     :vartype policies: [:class:`Policy`]
     :ivar substitution_mapping: Our contribution to service substitution
     :vartype substitution_mapping: :class:`SubstitutionMapping`
-    :ivar tasks: Tasks on this node
+    :ivar tasks: Tasks for this node
     :vartype tasks: [:class:`Task`]
+    :ivar hosted_tasks: Tasks on this node
+    :vartype hosted_tasks: [:class:`Task`]
     """
 
     __tablename__ = 'node'
@@ -417,7 +419,7 @@ class NodeBase(InstanceModelMixin):
 
     @property
     def is_available(self):
-        return self.state not in [self.INITIAL, self.DELETED, self.ERROR]
+        return self.state not in (self.INITIAL, self.DELETED, self.ERROR)
 
     # region foreign_keys
 
@@ -455,7 +457,7 @@ class NodeBase(InstanceModelMixin):
     # region one_to_one relationships
 
     @declared_attr
-    def host(cls):
+    def host(cls): # pylint: disable=method-hidden
         return relationship.one_to_one_self(cls, 'host_fk')
 
     # endregion
@@ -522,17 +524,9 @@ class NodeBase(InstanceModelMixin):
     __mapper_args__ = {'version_id_col': version} # Enable SQLAlchemy automatic version counting
 
     @property
-    def ip(self):
-        # TODO: totally broken
-        if not self.host_fk:
-            return None
-        host_node = self.host
-        if 'ip' in host_node.runtime_properties:  # pylint: disable=no-member
-            return host_node.runtime_properties['ip']  # pylint: disable=no-member
-        host_node = host_node.node_template  # pylint: disable=no-member
-        host_ip_property = host_node.properties.get('ip')
-        if host_ip_property:
-            return host_ip_property.value
+    def host_address(self):
+        if self.host and self.host.runtime_properties:
+            return self.host.runtime_properties.get('ip')
         return None
 
     def satisfy_requirements(self):
@@ -567,9 +561,10 @@ class NodeBase(InstanceModelMixin):
             if target_node_capability is not None:
                 # Relate to the first target node that has capacity
                 for node in target_nodes:
-                    target_capability = node.capabilities.get(target_node_capability.name)
-                    if target_capability.relate():
+                    a_target_capability = node.capabilities.get(target_node_capability.name)
+                    if a_target_capability.relate():
                         target_node = node
+                        target_capability = a_target_capability
                         break
             else:
                 # Use first target node
@@ -577,14 +572,15 @@ class NodeBase(InstanceModelMixin):
 
             if target_node is not None:
                 if requirement_template.relationship_template is not None:
-                    the_relationship = \
+                    relationship_model = \
                         requirement_template.relationship_template.instantiate(self)
                 else:
-                    the_relationship = models.Relationship(target_capability=target_capability)
-                the_relationship.name = requirement_template.name
-                the_relationship.requirement_template = requirement_template
-                the_relationship.target_node = target_node
-                self.outbound_relationships.append(the_relationship)
+                    relationship_model = models.Relationship()
+                relationship_model.name = requirement_template.name
+                relationship_model.requirement_template = requirement_template
+                relationship_model.target_node = target_node
+                relationship_model.target_capability = target_capability
+                self.outbound_relationships.append(relationship_model)
                 return True
             else:
                 context.validation.report('requirement "{0}" of node "{1}" targets node '
@@ -619,6 +615,32 @@ class NodeBase(InstanceModelMixin):
                 satisfied = False
         return satisfied
 
+    def find_host(self):
+        def _find_host(node):
+            if node.type.role == 'host':
+                return node
+            for the_relationship in node.outbound_relationships:
+                if (the_relationship.target_capability is not None) and \
+                    the_relationship.target_capability.type.role == 'host':
+                    host = _find_host(the_relationship.target_node)
+                    if host is not None:
+                        return host
+            for the_relationship in node.inbound_relationships:
+                if (the_relationship.target_capability is not None) and \
+                    the_relationship.target_capability.type.role == 'feature':
+                    host = _find_host(the_relationship.source_node)
+                    if host is not None:
+                        return host
+            return None
+
+        self.host = _find_host(self)
+
+    def configure_operations(self):
+        for interface in self.interfaces.itervalues():
+            interface.configure_operations()
+        for the_relationship in self.outbound_relationships:
+            the_relationship.configure_operations()
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -761,6 +783,10 @@ class GroupBase(InstanceModelMixin):
 
     description = Column(Text)
 
+    def configure_operations(self):
+        for interface in self.interfaces.itervalues():
+            interface.configure_operations()
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1146,7 +1172,7 @@ class RelationshipBase(InstanceModelMixin):
     :vartype source_node: :class:`Node`
     :ivar target_node: Target node
     :vartype target_node: :class:`Node`
-    :ivar tasks: Tasks on this node
+    :ivar tasks: Tasks for this relationship
     :vartype tasks: [:class:`Task`]
     """
 
@@ -1266,6 +1292,10 @@ class RelationshipBase(InstanceModelMixin):
     source_position = Column(Integer) # ???
     target_position = Column(Integer) # ???
 
+    def configure_operations(self):
+        for interface in self.interfaces.itervalues():
+            interface.configure_operations()
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1552,6 +1582,10 @@ class InterfaceBase(InstanceModelMixin):
 
     description = Column(Text)
 
+    def configure_operations(self):
+        for operation in self.operations.itervalues():
+            operation.configure()
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1592,10 +1626,16 @@ class OperationBase(InstanceModelMixin):
     :vartype operation_template: :class:`OperationTemplate`
     :ivar description: Human-readable description
     :vartype description: string
-    :ivar plugin_specification: Associated plugin
-    :vartype plugin_specification: :class:`PluginSpecification`
-    :ivar implementation: Implementation string (interpreted by the plugin)
+    :ivar plugin: Associated plugin
+    :vartype plugin: :class:`Plugin`
+    :ivar relationship_edge: When true specified that the operation is on the relationship's
+                             target edge instead of its source (only used by relationship
+                             operations)
+    :vartype relationship_edge: bool
+    :ivar implementation: Implementation (interpreted by the plugin)
     :vartype implementation: basestring
+    :ivar configuration: Configuration (interpreted by the plugin)
+    :vartype configuration: {basestring, object}
     :ivar dependencies: Dependency strings (interpreted by the plugin)
     :vartype dependencies: [basestring]
     :ivar inputs: Parameters that can be used by this operation
@@ -1632,9 +1672,9 @@ class OperationBase(InstanceModelMixin):
         return relationship.foreign_key('interface', nullable=True)
 
     @declared_attr
-    def plugin_specification_fk(cls):
-        """For Operation one-to-one to PluginSpecification"""
-        return relationship.foreign_key('plugin_specification', nullable=True)
+    def plugin_fk(cls):
+        """For Operation one-to-one to Plugin"""
+        return relationship.foreign_key('plugin', nullable=True)
 
     @declared_attr
     def operation_template_fk(cls):
@@ -1650,9 +1690,8 @@ class OperationBase(InstanceModelMixin):
     # region one_to_one relationships
 
     @declared_attr
-    def plugin_specification(cls):
-        return relationship.one_to_one(
-            cls, 'plugin_specification', back_populates=relationship.NO_BACK_POP)
+    def plugin(cls):
+        return relationship.one_to_one(cls, 'plugin', back_populates=relationship.NO_BACK_POP)
 
     # endregion
 
@@ -1685,12 +1724,32 @@ class OperationBase(InstanceModelMixin):
     # endregion
 
     description = Column(Text)
+    relationship_edge = Column(Boolean)
     implementation = Column(Text)
+    configuration = Column(modeling_types.StrictDict(key_cls=basestring))
     dependencies = Column(modeling_types.StrictList(item_cls=basestring))
     executor = Column(Text)
     max_retries = Column(Integer)
     retry_interval = Column(Integer)
 
+    def configure(self):
+        from . import models
+        # Note: for workflows (operations attached directly to the service) "interface" will be None
+        if (self.implementation is None) or (self.interface is None):
+            return
+
+        if self.plugin is None:
+            arguments = execution_plugin.instantiation.configure_operation(self)
+        else:
+            # In the future plugins may be able to add their own "configure_operation" hook that
+            # can validate the configuration and otherwise return specially derived arguments
+            arguments = self.configuration
+
+        # Note: the arguments will *override* operation inputs of the same name
+        if arguments:
+            for k, v in arguments.iteritems():
+                self.inputs[k] = models.Parameter.wrap(k, v)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1716,9 +1775,17 @@ class OperationBase(InstanceModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
         with context.style.indent:
+            if self.plugin is not None:
+                console.puts('Plugin: {0}'.format(
+                    context.style.literal(self.plugin.name)))
             if self.implementation is not None:
                 console.puts('Implementation: {0}'.format(
                     context.style.literal(self.implementation)))
+            if self.configuration:
+                with context.style.indent:
+                    for k, v in self.configuration.iteritems():
+                        console.puts('{0}: {1}'.format(context.style.property(k),
+                                                       context.style.literal(v)))
             if self.dependencies:
                 console.puts(
                     'Dependencies: {0}'.format(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 8355521..51fea2f 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -24,6 +24,7 @@ from sqlalchemy import (
     Column,
     Text,
     Integer,
+    Boolean,
     DateTime
 )
 from sqlalchemy.ext.declarative import declared_attr
@@ -291,6 +292,16 @@ class ServiceTemplateBase(TemplateModelMixin):
 
         context.modeling.instance = service
 
+        for plugin_specification in self.plugin_specifications.itervalues():
+            if plugin_specification.enabled:
+                if plugin_specification.resolve():
+                    plugin = plugin_specification.plugin
+                    service.plugins[plugin.name] = plugin
+                else:
+                    context = ConsumptionContext.get_thread_local()
+                    context.validation.report('specified plugin not found: {0}'.format(
+                        plugin_specification.name), level=validation.Issue.EXTERNAL)
+
         utils.instantiate_dict(self, service.meta_data, self.meta_data)
 
         for node_template in self.node_templates.itervalues():
@@ -301,7 +312,6 @@ class ServiceTemplateBase(TemplateModelMixin):
         utils.instantiate_dict(self, service.groups, self.group_templates)
         utils.instantiate_dict(self, service.policies, self.policy_templates)
         utils.instantiate_dict(self, service.workflows, self.workflow_templates)
-        utils.instantiate_dict(self, service.plugin_specifications, self.plugin_specifications)
 
         if self.substitution_template is not None:
             service.substitution = self.substitution_template.instantiate(container)
@@ -1740,8 +1750,14 @@ class OperationTemplateBase(TemplateModelMixin):
     :vartype description: basestring
     :ivar plugin_specification: Associated plugin
     :vartype plugin_specification: :class:`PluginSpecification`
-    :ivar implementation: Implementation string (interpreted by the plugin)
+    :ivar relationship_edge: When true specified that the operation is on the relationship's
+                             target edge instead of its source (only used by relationship
+                             operations)
+    :vartype relationship_edge: bool
+    :ivar implementation: Implementation (interpreted by the plugin)
     :vartype implementation: basestring
+    :ivar configuration: Configuration (interpreted by the plugin)
+    :vartype configuration: {basestring, object}
     :ivar dependencies: Dependency strings (interpreted by the plugin)
     :vartype dependencies: [basestring]
     :ivar inputs: Parameters that can be used by this operation
@@ -1766,8 +1782,6 @@ class OperationTemplateBase(TemplateModelMixin):
                           'interface_template_fk',
                           'plugin_fk']
 
-    description = Column(Text)
-
     # region foreign keys
 
     @declared_attr
@@ -1828,7 +1842,10 @@ class OperationTemplateBase(TemplateModelMixin):
 
     # endregion
 
+    description = Column(Text)
+    relationship_edge = Column(Boolean)
     implementation = Column(Text)
+    configuration = Column(modeling_types.StrictDict(key_cls=basestring))
     dependencies = Column(modeling_types.StrictList(item_cls=basestring))
     executor = Column(Text)
     max_retries = Column(Integer)
@@ -1848,11 +1865,23 @@ class OperationTemplateBase(TemplateModelMixin):
 
     def instantiate(self, container):
         from . import models
+        if self.plugin_specification and self.plugin_specification.enabled:
+            plugin = self.plugin_specification.plugin
+            implementation = self.implementation if plugin is not None else None
+            # "plugin" would be none if a match was not found. In that case, a validation error
+            # should already have been reported in ServiceTemplateBase.instantiate, so we will
+            # continue silently here
+        else:
+            # If the plugin is disabled, the operation should be disabled, too
+            plugin = None
+            implementation = None
         operation = models.Operation(name=self.name,
                                      description=deepcopy_with_locators(self.description),
-                                     implementation=self.implementation,
+                                     relationship_edge=self.relationship_edge,
+                                     plugin=plugin,
+                                     implementation=implementation,
+                                     configuration=self.configuration,
                                      dependencies=self.dependencies,
-                                     plugin_specification=self.plugin_specification,
                                      executor=self.executor,
                                      max_retries=self.max_retries,
                                      retry_interval=self.retry_interval,
@@ -1872,9 +1901,17 @@ class OperationTemplateBase(TemplateModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
         with context.style.indent:
+            if self.plugin_specification is not None:
+                console.puts('Plugin specification: {0}'.format(
+                    context.style.literal(self.plugin_specification.name)))
             if self.implementation is not None:
                 console.puts('Implementation: {0}'.format(
                     context.style.literal(self.implementation)))
+            if self.configuration:
+                with context.style.indent:
+                    for k, v in self.configuration.iteritems():
+                        console.puts('{0}: {1}'.format(context.style.property(k),
+                                                       context.style.literal(v)))
             if self.dependencies:
                 console.puts('Dependencies: {0}'.format(
                     ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
@@ -2023,3 +2060,87 @@ class ArtifactTemplateBase(TemplateModelMixin):
                 console.puts('Repository credential: {0}'.format(
                     context.style.literal(self.repository_credential)))
             utils.dump_dict_values(self.properties, 'Properties')
+
+
+class PluginSpecificationBase(TemplateModelMixin):
+    """
+    Plugin specification.
+
+    :ivar name: Required plugin name
+    :vartype name: basestring
+    :ivar version: Minimum plugin version
+    :vartype version: basestring
+    :ivar enabled: Whether the plugin is enabled
+    :vartype enabled: bool
+    :ivar plugin: The matching plugin (or None if not matched)
+    :vartype plugin: :class:`Plugin`
+    """
+
+    __tablename__ = 'plugin_specification'
+
+    __private_fields__ = ['service_template_fk',
+                          'plugin_fk']
+
+    version = Column(Text)
+    enabled = Column(Boolean, nullable=False, default=True)
+
+    # region foreign keys
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to PluginSpecification"""
+        return relationship.foreign_key('service_template', nullable=True)
+
+    @declared_attr
+    def plugin_fk(cls):
+        """For PluginSpecification many-to-one to Plugin"""
+        return relationship.foreign_key('plugin', nullable=True)
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
+
+    @declared_attr
+    def plugin(cls): # pylint: disable=method-hidden
+        return relationship.many_to_one(cls, 'plugin', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('version', self.version),
+            ('enabled', self.enabled)))
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def resolve(self):
+        # TODO: we are planning a separate "instantiation" module where this will be called or
+        # moved to. There, we will probably have a context with a storage manager. Until then,
+        # this is the only potentially available context, which of course will only be available
+        # if we're in a workflow.
+        from ..orchestrator import context
+        try:
+            workflow_context = context.workflow.current.get()
+            plugins = workflow_context.model.plugin.list()
+        except context.exceptions.ContextException:
+            plugins = None
+
+        matching_plugins = []
+        if plugins:
+            for plugin in plugins:
+                # TODO: we need to use a version comparator
+                if (plugin.name == self.name) and \
+                    ((self.version is None) or (plugin.package_version >= self.version)):
+                    matching_plugins.append(plugin)
+        self.plugin = None
+        if matching_plugins:
+            # Return highest version of plugin
+            self.plugin = sorted(matching_plugins, key=lambda plugin: plugin.package_version)[-1]
+        return self.plugin is not None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/execution_plugin/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/__init__.py b/aria/orchestrator/execution_plugin/__init__.py
index 372022f..3624264 100644
--- a/aria/orchestrator/execution_plugin/__init__.py
+++ b/aria/orchestrator/execution_plugin/__init__.py
@@ -14,6 +14,8 @@
 # limitations under the License.
 
 from contextlib import contextmanager
+from . import instantiation
+
 
 # Populated during execution of python scripts
 ctx = None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/execution_plugin/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/common.py b/aria/orchestrator/execution_plugin/common.py
index 7915c47..32e4575 100644
--- a/aria/orchestrator/execution_plugin/common.py
+++ b/aria/orchestrator/execution_plugin/common.py
@@ -34,7 +34,7 @@ def download_script(ctx, script_path):
     file_descriptor, dest_script_path = tempfile.mkstemp(suffix='-{0}'.format(suffix))
     os.close(file_descriptor)
     try:
-        if schema in ['http', 'https']:
+        if schema in ('http', 'https'):
             response = requests.get(script_path)
             if response.status_code == 404:
                 ctx.task.abort('Failed to download script: {0} (status code: {1})'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
new file mode 100644
index 0000000..960835c
--- /dev/null
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -0,0 +1,191 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: this module will eventually be moved to a new "aria.instantiation" package
+
+from ...utils.formatting import full_type_name
+from ...utils.collections import OrderedDict
+from ...parser import validation
+from ...parser.consumption import ConsumptionContext
+
+
+def configure_operation(operation):
+    configuration = OrderedDict(operation.configuration) if operation.configuration else {}
+
+    arguments = OrderedDict()
+    arguments['script_path'] = operation.implementation
+    arguments['process'] = _get_process(configuration.pop('process')) \
+        if 'process' in configuration else None
+
+    host = None
+    interface = operation.interface
+    if interface.node is not None:
+        host = interface.node.host
+    elif interface.relationship is not None:
+        if operation.relationship_edge is True:
+            host = interface.relationship.target_node.host
+        else: # either False or None
+            host = interface.relationship.source_node.host
+
+    if host is None:
+        _configure_local(operation)
+    else:
+        _configure_remote(operation, configuration, arguments)
+
+    # Any remaining unhandled configuration values will become extra arguments, available as kwargs
+    # in either "run_script_locally" or "run_script_with_ssh"
+    arguments.update(configuration)
+
+    return arguments
+
+def _configure_local(operation):
+    """
+    Local operation.
+    """
+    from . import operations
+    operation.implementation = '{0}.{1}'.format(operations.__name__,
+                                                operations.run_script_locally.__name__)
+
+
+def _configure_remote(operation, configuration, arguments):
+    """
+    Remote SSH operation via Fabric.
+    """
+    # TODO: find a way to configure these generally in the service template
+    default_user = ''
+    default_password = ''
+
+    ssh = _get_ssh(configuration.pop('ssh')) if 'ssh' in configuration else {}
+    if 'user' not in ssh:
+        ssh['user'] = default_user
+    if ('password' not in ssh) and ('key' not in ssh) and ('key_filename' not in ssh):
+        ssh['password'] = default_password
+
+    arguments['use_sudo'] = ssh.get('use_sudo')
+    arguments['hide_output'] = ssh.get('hide_output')
+    arguments['fabric_env'] = {}
+    if 'warn_only' in ssh:
+        arguments['fabric_env']['warn_only'] = ssh['warn_only']
+    arguments['fabric_env']['user'] = ssh.get('user')
+    arguments['fabric_env']['password'] = ssh.get('password')
+    arguments['fabric_env']['key'] = ssh.get('key')
+    arguments['fabric_env']['key_filename'] = ssh.get('key_filename')
+    if 'address' in ssh:
+        arguments['fabric_env']['host_string'] = ssh['address']
+
+    if arguments['fabric_env'].get('user') is None:
+        context = ConsumptionContext.get_thread_local()
+        context.validation.report('must configure "ssh.user" for "{0}"'
+                                  .format(operation.implementation),
+                                  level=validation.Issue.BETWEEN_TYPES)
+    if (arguments['fabric_env'].get('password') is None) and \
+        (arguments['fabric_env'].get('key') is None) and \
+        (arguments['fabric_env'].get('key_filename') is None):
+        context = ConsumptionContext.get_thread_local()
+        context.validation.report('must configure "ssh.password", "ssh.key", or "ssh.key_filename" '
+                                  'for "{0}"'
+                                  .format(operation.implementation),
+                                  level=validation.Issue.BETWEEN_TYPES)
+
+    from . import operations
+    operation.implementation = '{0}.{1}'.format(operations.__name__,
+                                                operations.run_script_with_ssh.__name__)
+
+
+def _get_process(value):
+    if value is None:
+        return None
+    _validate_type(value, dict, 'process')
+    for k, v in value.iteritems():
+        if k == 'eval_python':
+            value[k] = _str_to_bool(v, 'process.eval_python')
+        elif k == 'cwd':
+            _validate_type(v, basestring, 'process.cwd')
+        elif k == 'command_prefix':
+            _validate_type(v, basestring, 'process.command_prefix')
+        elif k == 'args':
+            value[k] = _dict_to_list(v, 'process.args')
+        elif k == 'env':
+            _validate_type(v, dict, 'process.env')
+        else:
+            context = ConsumptionContext.get_thread_local()
+            context.validation.report('unsupported configuration: "process.{0}"'.format(k),
+                                      level=validation.Issue.BETWEEN_TYPES)
+    return value
+
+
+def _get_ssh(value):
+    if value is None:
+        return {}
+    _validate_type(value, dict, 'ssh')
+    for k, v in value.iteritems():
+        if k == 'use_sudo':
+            value[k] = _str_to_bool(v, 'ssh.use_sudo')
+        elif k == 'hide_output':
+            value[k] = _dict_to_list(v, 'ssh.hide_output')
+        elif k == 'warn_only':
+            value[k] = _str_to_bool(v, 'ssh.warn_only')
+        elif k == 'user':
+            _validate_type(v, basestring, 'ssh.user')
+        elif k == 'password':
+            _validate_type(v, basestring, 'ssh.password')
+        elif k == 'key':
+            _validate_type(v, basestring, 'ssh.key')
+        elif k == 'key_filename':
+            _validate_type(v, basestring, 'ssh.key_filename')
+        elif k == 'address':
+            _validate_type(v, basestring, 'ssh.address')
+        else:
+            context = ConsumptionContext.get_thread_local()
+            context.validation.report('unsupported configuration: "ssh.{0}"'.format(k),
+                                      level=validation.Issue.BETWEEN_TYPES)
+    return value
+
+
+def _validate_type(value, the_type, name):
+    if not isinstance(value, the_type):
+        context = ConsumptionContext.get_thread_local()
+        context.validation.report('"{0}" configuration is not a {1}'
+                                  .format(name, full_type_name(the_type)),
+                                  level=validation.Issue.BETWEEN_TYPES)
+
+
+def _str_to_bool(value, name):
+    if value is None:
+        return None
+    _validate_type(value, basestring, name)
+    if value == 'true':
+        return True
+    elif value == 'false':
+        return False
+    else:
+        context = ConsumptionContext.get_thread_local()
+        context.validation.report('"{0}" configuration is not "true" or "false": {1}'
+                                  .format(name, repr(value)),
+                                  level=validation.Issue.BETWEEN_TYPES)
+
+
+def _dict_to_list(the_dict, name):
+    _validate_type(the_dict, dict, name)
+    value = []
+    for k in sorted(the_dict):
+        v = the_dict[k]
+        if not isinstance(v, basestring):
+            context = ConsumptionContext.get_thread_local()
+            context.validation.report('"{0}.{1}" configuration is not a string: {2}'
+                                      .format(name, k, repr(v)),
+                                      level=validation.Issue.BETWEEN_TYPES)
+        value.append(v)
+    return value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/execution_plugin/ssh/operations.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ssh/operations.py b/aria/orchestrator/execution_plugin/ssh/operations.py
index f240beb..7147a30 100644
--- a/aria/orchestrator/execution_plugin/ssh/operations.py
+++ b/aria/orchestrator/execution_plugin/ssh/operations.py
@@ -143,9 +143,9 @@ def _fabric_env(ctx, fabric_env, warn_only):
     env = constants.FABRIC_ENV_DEFAULTS.copy()
     env.update(fabric_env or {})
     env.setdefault('warn_only', warn_only)
-    if 'host_string' not in env:
-        env['host_string'] = ctx.task.runs_on.ip
     # validations
+    if (not env.get('host_string')) and (ctx.task) and (ctx.task.actor) and (ctx.task.actor.host):
+        env['host_string'] = ctx.task.actor.host.host_address
     if not env.get('host_string'):
         ctx.task.abort('`host_string` not supplied and ip cannot be deduced automatically')
     if not (env.get('password') or env.get('key_filename') or env.get('key')):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index f49ec2e..49c584c 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -19,7 +19,7 @@ Provides the tasks to be entered into the task graph
 import copy
 
 from ....modeling import models
-from ....utils.collections import OrderedDict
+from ....utils.collections import (OrderedDict, FrozenDict)
 from ....utils.uuid import generate_uuid
 from ... import context
 from .. import exceptions
@@ -56,7 +56,7 @@ class BaseTask(object):
 
 class OperationTask(BaseTask):
     """
-    Represents an operation task in the task_graph
+    Represents an operation task in the task graph.
     """
 
     NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
@@ -66,32 +66,50 @@ class OperationTask(BaseTask):
                  actor_type,
                  interface_name,
                  operation_name,
-                 runs_on=None,
+                 inputs=None,
                  max_attempts=None,
                  retry_interval=None,
-                 ignore_failure=None,
-                 inputs=None):
+                 ignore_failure=None):
         """
         Do not call this constructor directly. Instead, use :meth:`for_node` or
         :meth:`for_relationship`.
         """
 
-        assert isinstance(actor, (models.Node, models.Relationship))
-        assert actor_type in ('node', 'relationship')
         assert interface_name and operation_name
-        assert runs_on in models.Task.RUNS_ON
         super(OperationTask, self).__init__()
 
+        operation = None
+        interface = actor.interfaces.get(interface_name)
+        if interface is not None:
+            operation = interface.operations.get(operation_name)
+
+        if operation is None:
+            raise exceptions.OperationNotFoundException(
+                'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
+                .format(operation_name, interface_name, actor_type, actor.name))
+
+        if operation.implementation is None:
+            raise exceptions.OperationNotFoundException(
+                'Empty operation "{0}" on interface "{1}" for {2} "{3}"'
+                .format(operation_name, interface_name, actor_type, actor.name))
+
         self.actor = actor
+        self.actor_type = actor_type
+        self.interface_name = interface_name
+        self.operation_name = operation_name
+
+        self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
+                                                     name=actor.name,
+                                                     interface=interface_name,
+                                                     operation=operation_name)
         self.max_attempts = (self.workflow_context._task_max_attempts
                              if max_attempts is None else max_attempts)
         self.retry_interval = (self.workflow_context._task_retry_interval
                                if retry_interval is None else retry_interval)
         self.ignore_failure = (self.workflow_context._task_ignore_failure
                                if ignore_failure is None else ignore_failure)
-        self.runs_on = runs_on
-        self.interface_name = interface_name
-        self.operation_name = operation_name
+        self.implementation = operation.implementation
+        self.plugin = operation.plugin
 
         # Wrap inputs
         inputs = copy.deepcopy(inputs) if inputs else {}
@@ -99,65 +117,33 @@ class OperationTask(BaseTask):
             if not isinstance(v, models.Parameter):
                 inputs[k] = models.Parameter.wrap(k, v)
 
-        # TODO: Suggestion: these extra inputs could be stored as a separate entry in the task
-        # model, because they are different from the operation inputs. If we do this, then the two
-        # kinds of inputs should *not* be merged here.
-
-        operation = self._get_operation()
-        if operation is None:
-            raise exceptions.OperationNotFoundException(
-                'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
-                .format(self.operation_name, self.interface_name, actor_type, actor.name))
-
-        self.plugin = None
-        if operation.plugin_specification:
-            self.plugin = OperationTask._find_plugin(operation.plugin_specification)
-            if self.plugin is None:
-                raise exceptions.PluginNotFoundException(
-                    'Could not find plugin of operation "{0}" on interface "{1}" for {2} "{3}"'
-                    .format(self.operation_name, self.interface_name, actor_type, actor.name))
-
-        self.implementation = operation.implementation
-        self.inputs = OperationTask._merge_inputs(operation.inputs, inputs)
-
-        self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
-                                                     name=actor.name,
-                                                     interface=self.interface_name,
-                                                     operation=self.operation_name)
-
-    def __repr__(self):
-        return self.name
-
-    def _get_operation(self):
-        interface = self.actor.interfaces.get(self.interface_name)
-        if interface:
-            return interface.operations.get(self.operation_name)
-        return None
-
-
+        self.inputs = OrderedDict(operation.inputs)
+        if inputs:
+            self.inputs.update(inputs)
+        self.inputs = FrozenDict(self.inputs)
 
     @classmethod
     def for_node(cls,
                  node,
                  interface_name,
                  operation_name,
+                 inputs=None,
                  max_attempts=None,
                  retry_interval=None,
-                 ignore_failure=None,
-                 inputs=None):
+                 ignore_failure=None):
         """
         Creates an operation on a node.
 
         :param node: The node on which to run the operation
         :param interface_name: The interface name
         :param operation_name: The operation name within the interface
+        :param inputs: Override the operation's inputs
         :param max_attempts: The maximum number of attempts in case the operation fails
-                             (if not specified the defaults it taken from the workflow context)
+                             (if not specified the defaults is taken from the workflow context)
         :param retry_interval: The interval in seconds between attempts when the operation fails
-                               (if not specified the defaults it taken from the workflow context)
+                               (if not specified the defaults is taken from the workflow context)
         :param ignore_failure: Whether to ignore failures
-                               (if not specified the defaults it taken from the workflow context)
-        :param inputs: Additional operation inputs
+                               (if not specified the defaults is taken from the workflow context)
         """
 
         assert isinstance(node, models.Node)
@@ -166,62 +152,45 @@ class OperationTask(BaseTask):
             actor_type='node',
             interface_name=interface_name,
             operation_name=operation_name,
+            inputs=inputs,
             max_attempts=max_attempts,
             retry_interval=retry_interval,
-            ignore_failure=ignore_failure,
-            inputs=inputs,
-            runs_on=models.Task.RUNS_ON_NODE)
+            ignore_failure=ignore_failure)
 
     @classmethod
     def for_relationship(cls,
                          relationship,
                          interface_name,
                          operation_name,
-                         runs_on=models.Task.RUNS_ON_SOURCE,
+                         inputs=None,
                          max_attempts=None,
                          retry_interval=None,
-                         ignore_failure=None,
-                         inputs=None):
+                         ignore_failure=None):
         """
-        Creates an operation on a relationship edge.
+        Creates an operation on a relationship.
 
         :param relationship: The relationship on which to run the operation
         :param interface_name: The interface name
         :param operation_name: The operation name within the interface
-        :param runs_on: where to run the operation ("source" or "target"); defaults to "source"
+        :param inputs: Override the operation's inputs
         :param max_attempts: The maximum number of attempts in case the operation fails
-                             (if not specified the defaults it taken from the workflow context)
+                             (if not specified the defaults is taken from the workflow context)
         :param retry_interval: The interval in seconds between attempts when the operation fails
-                               (if not specified the defaults it taken from the workflow context)
+                               (if not specified the defaults is taken from the workflow context)
         :param ignore_failure: Whether to ignore failures
-                               (if not specified the defaults it taken from the workflow context)
-        :param inputs: Additional operation inputs
+                               (if not specified the defaults is taken from the workflow context)
         """
 
         assert isinstance(relationship, models.Relationship)
-        assert runs_on in models.Task.RUNS_ON
         return cls(
             actor=relationship,
             actor_type='relationship',
             interface_name=interface_name,
             operation_name=operation_name,
-            runs_on=runs_on,
+            inputs=inputs,
             max_attempts=max_attempts,
             retry_interval=retry_interval,
-            ignore_failure=ignore_failure,
-            inputs=inputs)
-
-    @staticmethod
-    def _find_plugin(plugin_specification):
-        workflow_context = context.workflow.current.get()
-        return plugin_specification.find_plugin(workflow_context.model.plugin.list())
-
-    @staticmethod
-    def _merge_inputs(operation_inputs, override_inputs=None):
-        final_inputs = OrderedDict(operation_inputs)
-        if override_inputs:
-            final_inputs.update(override_inputs)
-        return final_inputs
+            ignore_failure=ignore_failure)
 
 
 class WorkflowTask(BaseTask):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/api/task_graph.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task_graph.py b/aria/orchestrator/workflows/api/task_graph.py
index 92a39d2..9f0d13b 100644
--- a/aria/orchestrator/workflows/api/task_graph.py
+++ b/aria/orchestrator/workflows/api/task_graph.py
@@ -37,7 +37,7 @@ def _filter_out_empty_tasks(func=None):
         return lambda f: _filter_out_empty_tasks(func=f)
 
     def _wrapper(task, *tasks, **kwargs):
-        return func(*(t for t in [task] + list(tasks) if t), **kwargs)
+        return func(*(t for t in (task,) + tuple(tasks) if t), **kwargs)
     return _wrapper
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/builtin/utils.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/utils.py b/aria/orchestrator/workflows/builtin/utils.py
index d79318f..752fe35 100644
--- a/aria/orchestrator/workflows/builtin/utils.py
+++ b/aria/orchestrator/workflows/builtin/utils.py
@@ -71,15 +71,13 @@ def relationship_tasks(
         operations.append(
             OperationTask.for_relationship(relationship=relationship,
                                            interface_name=interface_name,
-                                           operation_name=source_operation_name,
-                                           runs_on='source')
+                                           operation_name=source_operation_name)
         )
     if target_operation_name:
         operations.append(
             OperationTask.for_relationship(relationship=relationship,
                                            interface_name=interface_name,
-                                           operation_name=target_operation_name,
-                                           runs_on='target')
+                                           operation_name=target_operation_name)
         )
 
     return operations

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index d32abb8..f73cade 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -82,8 +82,8 @@ class Engine(logger.LoggerMixin):
         events.on_cancelling_workflow_signal.send(self._workflow_context)
 
     def _is_cancel(self):
-        return self._workflow_context.execution.status in [models.Execution.CANCELLING,
-                                                           models.Execution.CANCELLED]
+        return self._workflow_context.execution.status in (models.Execution.CANCELLING,
+                                                           models.Execution.CANCELLED)
 
     def _executable_tasks(self):
         now = datetime.utcnow()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/core/events_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/events_handler.py b/aria/orchestrator/workflows/core/events_handler.py
index 8534aae..7f61bfa 100644
--- a/aria/orchestrator/workflows/core/events_handler.py
+++ b/aria/orchestrator/workflows/core/events_handler.py
@@ -125,8 +125,12 @@ def _workflow_cancelling(workflow_context, *args, **kwargs):
 
 
 def _update_node_state_if_necessary(task, is_transitional=False):
-    if task.interface_name in ['tosca.interfaces.node.lifecycle.Standard', 'Standard']:
-        node = task.runs_on
+    # TODO: this is not the right way to check! the interface name is arbitrary
+    # and also will *never* be the type name
+    model_task = task.model_task
+    node = model_task.node if model_task is not None else None
+    if (node is not None) and \
+        (task.interface_name in ('Standard', 'tosca.interfaces.node.lifecycle.Standard')):
         state = node.determine_state(op_name=task.operation_name, is_transitional=is_transitional)
         if state:
             node.state = state

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index aa8963f..ba93e21 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -71,11 +71,11 @@ class StubTask(BaseTask):
 
     @property
     def has_ended(self):
-        return self.status in [models.Task.SUCCESS, models.Task.FAILED]
+        return self.status in (models.Task.SUCCESS, models.Task.FAILED)
 
     @property
     def is_waiting(self):
-        return self.status in [models.Task.PENDING, models.Task.RETRYING]
+        return self.status in (models.Task.PENDING, models.Task.RETRYING)
 
 
 class StartWorkflowTask(StubTask):
@@ -133,15 +133,14 @@ class OperationTask(BaseTask):
         task_model = create_task_model(
             name=api_task.name,
             implementation=api_task.implementation,
-            instance=api_task.actor,
+            actor=api_task.actor,
             inputs=api_task.inputs,
             status=base_task_model.PENDING,
             max_attempts=api_task.max_attempts,
             retry_interval=api_task.retry_interval,
             ignore_failure=api_task.ignore_failure,
             plugin=plugin,
-            execution=self._workflow_context.execution,
-            runs_on=api_task.runs_on
+            execution=self._workflow_context.execution
         )
         self._workflow_context.model.task.put(task_model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/exceptions.py b/aria/orchestrator/workflows/exceptions.py
index 4fb8dd7..0ca263f 100644
--- a/aria/orchestrator/workflows/exceptions.py
+++ b/aria/orchestrator/workflows/exceptions.py
@@ -70,13 +70,19 @@ class TaskException(exceptions.AriaError):
     """
 
 
-class OperationNotFoundException(TaskException):
+class TaskCreationException(TaskException):
+    """
+    Could not create the task.
+    """
+
+
+class OperationNotFoundException(TaskCreationException):
     """
     Could not find an operation on the node or relationship.
     """
 
 
-class PluginNotFoundException(TaskException):
+class PluginNotFoundException(TaskCreationException):
     """
     Could not find a plugin matching the plugin specification.
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 6397e88..f814c4d 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -177,7 +177,7 @@ class ProcessExecutor(base.BaseExecutor):
                 pythonpath_dirs = [os.path.join(
                     plugin_prefix, 'lib{0}'.format(b),
                     'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1]),
-                    'site-packages') for b in ['', '64']]
+                    'site-packages') for b in ('', '64')]
 
         # Add used supplied directories to injected PYTHONPATH
         pythonpath_dirs.extend(self._python_path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/parser/consumption/modeling.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/modeling.py b/aria/parser/consumption/modeling.py
index 4847ba7..6c616b4 100644
--- a/aria/parser/consumption/modeling.py
+++ b/aria/parser/consumption/modeling.py
@@ -103,7 +103,7 @@ class InstantiateServiceInstance(Consumer):
     def consume(self):
         if self.context.modeling.template is None:
             self.context.validation.report('InstantiateServiceInstance consumer: missing service '
-                                           'model')
+                                           'template')
             return
 
         self.context.modeling.template.instantiate(None)
@@ -145,6 +145,24 @@ class ValidateCapabilities(Consumer):
         self.context.modeling.instance.validate_capabilities()
 
 
+class FindHosts(Consumer):
+    """
+    Find hosts for all nodes in the service instance.
+    """
+
+    def consume(self):
+        self.context.modeling.instance.find_hosts()
+
+
+class ConfigureOperations(Consumer):
+    """
+    Configures all operations in the service instance.
+    """
+
+    def consume(self):
+        self.context.modeling.instance.configure_operations()
+
+
 class ServiceInstance(ConsumerChain):
     """
     Generates the service instance by instantiating the service template.
@@ -158,6 +176,8 @@ class ServiceInstance(ConsumerChain):
                                                         SatisfyRequirements,
                                                         CoerceServiceInstanceValues,
                                                         ValidateCapabilities,
+                                                        FindHosts,
+                                                        ConfigureOperations,
                                                         CoerceServiceInstanceValues))
 
     def dump(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/aria/storage/instrumentation.py
----------------------------------------------------------------------
diff --git a/aria/storage/instrumentation.py b/aria/storage/instrumentation.py
index fb95fcf..138432a 100644
--- a/aria/storage/instrumentation.py
+++ b/aria/storage/instrumentation.py
@@ -110,9 +110,9 @@ class _Instrumentation(object):
                         current = copy.deepcopy(attribute_type(initial))
                     tracked_attributes[attribute_name] = _Value(initial, current)
                 target.__dict__[attribute_name] = tracked_attributes[attribute_name].current
-        for listener_args in [(instrumented_class, 'load', listener),
+        for listener_args in ((instrumented_class, 'load', listener),
                               (instrumented_class, 'refresh', listener),
-                              (instrumented_class, 'refresh_flush', listener)]:
+                              (instrumented_class, 'refresh_flush', listener)):
             sqlalchemy.event.listen(*listener_args)
             self.listeners.append(listener_args)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml b/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
index 32623d1..72b210a 100644
--- a/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
+++ b/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/elasticsearch.yaml
@@ -4,3 +4,5 @@ node_types:
 
   tosca.nodes.SoftwareComponent.Elasticsearch:
     derived_from: tosca.nodes.SoftwareComponent
+    capabilities:
+      app: tosca.capabilities.Endpoint

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml b/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
index 7af00d0..4ee8700 100644
--- a/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
+++ b/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/kibana.yaml
@@ -8,3 +8,5 @@ node_types:
       - search_endpoint:
           capability: tosca.capabilities.Endpoint
           relationship: tosca.relationships.ConnectsTo
+    capabilities:
+      app: tosca.capabilities.Endpoint

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml b/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
index a3eebbe..ea74c7e 100644
--- a/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
+++ b/examples/tosca-simple-1.0/use-cases/multi-tier-1/custom_types/logstash.yaml
@@ -8,3 +8,5 @@ node_types:
       - search_endpoint:
           capability: tosca.capabilities.Endpoint
           relationship: tosca.relationships.ConnectsTo
+    capabilities:
+      app: tosca.capabilities.Endpoint

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml b/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
index 4723a3f..02bb399 100644
--- a/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
+++ b/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/custom_types/paypalpizzastore_nodejs_app.yaml
@@ -9,7 +9,7 @@ node_types:
         type: string
     requirements:
       - database_connection:
-          capability: tosca.capabilities.Container
+          capability: tosca.capabilities.Node
 
   tosca.nodes.WebServer.Nodejs:
     derived_from: tosca.nodes.WebServer

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml b/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
index 66eab8e..91f0b35 100644
--- a/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
+++ b/examples/tosca-simple-1.0/use-cases/webserver-dbms-2/webserver-dbms-2.yaml
@@ -53,7 +53,7 @@ topology_template:
              implementation: scripts/nodejs/configure.sh
              inputs:
                github_url: { get_property: [ SELF, github_url ] }
-               mongodb_ip: { get_attribute: [mongo_server, private_address] }
+               mongodb_ip: { get_attribute: [ mongo_server, private_address ] }
            start: scripts/nodejs/start.sh
 
     nodejs:
@@ -86,7 +86,7 @@ topology_template:
           configure:
             implementation: mongodb/config.sh
             inputs:
-              mongodb_ip: { get_attribute: [mongo_server, private_address] }
+              mongodb_ip: { get_attribute: [ mongo_server, private_address ] }
           start: mongodb/start.sh
 
     mongo_server:
@@ -109,7 +109,7 @@ topology_template:
 
     nodejs_url:
       description: URL for the nodejs server, http://<IP>:3000
-      value: { get_attribute: [app_server, private_address] }
+      value: { get_attribute: [ app_server, private_address ] }
     mongodb_url:
       description: URL for the mongodb server.
       value: { get_attribute: [ mongo_server, private_address ] }

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
index 09cef57..0c5e77f 100644
--- a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
@@ -17,6 +17,8 @@ policy_types:
 
   aria.Plugin:
     _extensions:
+      shorthand_name: Plugin
+      type_qualified_name: aria:Plugin
       role: plugin
     description: >-
       Policy used to specify plugins used by services. For an operation to be able to use a plugin
@@ -29,9 +31,17 @@ policy_types:
           Minimum plugin version.
         type: version
         required: false
+      enabled:
+        description: >-
+          If the policy is to disable the plugin then it will be ignored and all operations and
+          workflows depending on it will also be disabled.
+        type: boolean
+        default: true
 
   aria.Workflow:
     _extensions:
+      shorthand_name: Workflow
+      type_qualified_name: aria:Workflow
       role: workflow
     description: >-
       Policy used to specify custom workflows. A workflow is usually a workload of interconnected

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
index 72f6f0e..0b81a16 100644
--- a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
+++ b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/capabilities.yaml
@@ -32,6 +32,7 @@ capability_types:
       specification: tosca-simple-1.0
       specification_section: 5.4.2
       specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_NODE'
+      role: feature
     description: >-
       The Node capability indicates the base capabilities of a TOSCA Node Type.
     derived_from: tosca.capabilities.Root
@@ -43,6 +44,7 @@ capability_types:
       specification: tosca-simple-1.0
       specification_section: 5.4.3
       specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_CAPABILITIES_CONTAINER'
+      role: host
     description: >-
       The Container capability, when included on a Node Type or Template definition, indicates that the node can act as a container
       for (or a host for) one or more other declared Node Types.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
index de1d34f..ff6ba6c 100644
--- a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
+++ b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/interfaces.yaml
@@ -63,24 +63,40 @@ interface_types:
     pre_configure_source:
       description: >-
         Operation to pre-configure the source endpoint.
+      _extensions:
+        relationship_edge: source
     pre_configure_target:
       description: >-
         Operation to pre-configure the target endpoint.
+      _extensions:
+        relationship_edge: target
     post_configure_source:
       description: >-
         Operation to post-configure the source endpoint.
+      _extensions:
+        relationship_edge: source
     post_configure_target:
       description: >-
         Operation to post-configure the target endpoint.
+      _extensions:
+        relationship_edge: target
     add_target:
       description: >-
         Operation to notify the source node of a target node being added via a relationship.
+      _extensions:
+        relationship_edge: source
     add_source:
       description: >-
         Operation to notify the target node of a source node which is now available via a relationship.
+      _extensions:
+        relationship_edge: target
     target_changed:
       description: >-
         Operation to notify source some property or attribute of the target changed
+      _extensions:
+        relationship_edge: source
     remove_target:
       description: >-
         Operation to remove a target node.
+      _extensions:
+        relationship_edge: source

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
index 414a388..bb33b6f 100644
--- a/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
+++ b/extensions/aria_extension_tosca/profiles/tosca-simple-1.0/nodes.yaml
@@ -60,6 +60,7 @@ node_types:
       specification: tosca-simple-1.0
       specification_section: 5.8.2
       specification_url: 'http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01/TOSCA-Simple-Profile-YAML-v1.0-cos01.html#DEFN_TYPE_NODES_COMPUTE'
+      role: host
     description: >-
       The TOSCA Compute node represents one or more real or virtual processors of software applications or services along with
       other essential local resources. Collectively, the resources the compute node represents can logically be viewed as a (real

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/simple_v1_0/assignments.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/assignments.py b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
index 2a39ed9..6e36ba8 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/assignments.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
@@ -84,6 +84,25 @@ class OperationAssignment(ExtensiblePresentation):
         :rtype: dict of str, :class:`PropertyAssignment`
         """
 
+    @cachedmethod
+    def _get_extensions(self, context):
+        def update_inherited_extensions(extensions, interface_type):
+            parent = interface_type._get_parent(context)
+            if parent is not None:
+                update_inherited_extensions(extensions, parent)
+            operation_definition = interface_type.operations.get(self._name)
+            if operation_definition is not None:
+                if operation_definition._extensions:
+                    extensions.update(operation_definition._extensions)
+    
+        extensions = {}
+        update_inherited_extensions(extensions, self._container._get_type(context))
+        if self._container._extensions:
+            extensions.update(self._container._extensions)
+        if self._extensions:
+            extensions.update(self._extensions)
+        return extensions
+
 @allow_unknown_fields
 @has_fields
 @dsl_specification('3.5.14-2', 'tosca-simple-1.0')
@@ -247,15 +266,18 @@ class RequirementAssignment(ExtensiblePresentation):
 
     @cachedmethod
     def _get_node(self, context):
-        node_name = self.node
-        if node_name is not None:
-            node = context.presentation.get_from_dict('service_template', 'topology_template',
-                                                      'node_templates', node_name)
-            if node is not None:
-                return node, 'node_template'
-            node = context.presentation.get_from_dict('service_template', 'node_types', node_name)
-            if node is not None:
-                return node, 'node_type'
+        node = self.node
+
+        if node is not None:
+            node_template = context.presentation.get_from_dict('service_template',
+                                                               'topology_template',
+                                                               'node_templates', node)
+            if node_template is not None:
+                return node_template, 'node_template'
+            node_type = get_type_by_full_or_shorthand_name(context, node, 'node_types')
+            if node_type is not None:
+                return node_type, 'node_type'
+
         return None, None
 
     @cachedmethod
@@ -268,11 +290,10 @@ class RequirementAssignment(ExtensiblePresentation):
                 capabilities = node._get_capabilities(context)
                 if capability in capabilities:
                     return capabilities[capability], 'capability_assignment'
-            else:
-                capability_types = context.presentation.get_from_dict('service_template',
-                                                                      'capability_types')
-                if (capability_types is not None) and (capability in capability_types):
-                    return capability_types[capability], 'capability_type'
+            capability_type = get_type_by_full_or_shorthand_name(context, capability,
+                                                                 'capability_types')
+            if capability_type is not None:
+                return capability_type, 'capability_type'
 
         return None, None
 



[2/9] incubator-ariatosca git commit: ARIA-127 Make use of in-memory sqlite more robust

Posted by ra...@apache.org.
ARIA-127 Make use of in-memory sqlite more robust


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/8e1d059f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/8e1d059f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/8e1d059f

Branch: refs/heads/ARIA-48-aria-cli
Commit: 8e1d059f9747327b4036b98c1f842d3f05c6c5f0
Parents: 3dadc9f
Author: Tal Liron <ta...@gmail.com>
Authored: Tue Apr 11 18:19:38 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Thu Apr 13 10:56:31 2017 -0500

----------------------------------------------------------------------
 tests/storage/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8e1d059f/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index c5d7678..66424db 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -51,6 +51,6 @@ def init_inmemory_model_storage():
 
     engine = create_engine(uri, **engine_kwargs)
     session_factory = orm.sessionmaker(bind=engine)
-    session = session_factory()
+    session = orm.scoped_session(session_factory=session_factory)
 
     return dict(engine=engine, session=session)


[6/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml
new file mode 100644
index 0000000..d0b0854
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml
@@ -0,0 +1,3 @@
+storage_snapshot_id: "snapshot-id"
+storage_location: /mnt
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml
new file mode 100644
index 0000000..d0b0854
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml
@@ -0,0 +1,3 @@
+storage_snapshot_id: "snapshot-id"
+storage_location: /mnt
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml
new file mode 100644
index 0000000..daca041
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml
@@ -0,0 +1,2 @@
+storage_location: /mnt
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml
new file mode 100644
index 0000000..18e457d
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml
@@ -0,0 +1,2 @@
+storage_snapshot_id: "snapshot-id"
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml
new file mode 100644
index 0000000..d0b0854
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml
@@ -0,0 +1,3 @@
+storage_snapshot_id: "snapshot-id"
+storage_location: /mnt
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml
new file mode 100644
index 0000000..d0b0854
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml
@@ -0,0 +1,3 @@
+storage_snapshot_id: "snapshot-id"
+storage_location: /mnt
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml
new file mode 100644
index 0000000..c1ee88a
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml
@@ -0,0 +1 @@
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml
new file mode 100644
index 0000000..5302bbf
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml
@@ -0,0 +1 @@
+my_cpus: 8
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml
new file mode 100644
index 0000000..9687bb0
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml
@@ -0,0 +1 @@
+network_name: "network"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml
new file mode 100644
index 0000000..9687bb0
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml
@@ -0,0 +1 @@
+network_name: "network"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml
new file mode 100644
index 0000000..9687bb0
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml
@@ -0,0 +1 @@
+network_name: "network"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml
new file mode 100644
index 0000000..57f99a3
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml
@@ -0,0 +1 @@
+objectstore_name: "objectstore"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml
----------------------------------------------------------------------
diff --git a/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml
new file mode 100644
index 0000000..c1ee88a
--- /dev/null
+++ b/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml
@@ -0,0 +1 @@
+cpus: 4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 0e9177f..9576260 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -19,6 +19,7 @@ Creates ARIA service template models based on the TOSCA presentation.
 Relies on many helper methods in the presentation classes. 
 """
 
+import os
 import re
 from types import FunctionType
 from datetime import datetime
@@ -41,7 +42,7 @@ IMPLEMENTATION_PREFIX_REGEX = re.compile(r'(?<!\\)(?:\\\\)*>')
 
 def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches
     model = ServiceTemplate(created_at=datetime.now(),
-                            main_file_name=str(context.presentation.location))
+                            main_file_name=os.path.basename(str(context.presentation.location)))
 
     model.description = context.presentation.get('service_template', 'description', 'value')
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/requirements.in
----------------------------------------------------------------------
diff --git a/requirements.in b/requirements.in
index bc27479..3950140 100644
--- a/requirements.in
+++ b/requirements.in
@@ -10,6 +10,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# In order to create the requirements.txt file, execute
+# pip-compile --output-file requirements.txt requirements.in (pip-tools package is needed).
+
 PyYAML<3.13
 requests>=2.3.0, <2.14.0
 networkx>=1.9, <1.10 # version 1.10 dropped support of python 2.6
@@ -25,6 +28,12 @@ SQLAlchemy>=1.1.0, <1.2  # version 1.2 dropped support of python 2.6
 wagon==0.6.0
 bottle>=0.12.0, <0.13
 Fabric>=1.13.0, <1.14
+click>=4.1, < 5.0
+colorama>=0.3.3, < 0.3.5
+PrettyTable>=0.7,<0.8
+click_didyoumean==0.0.3
+backports.shutil_get_terminal_size==1.0.0
+logutils==0.3.4.1
 
 # Since the tool we are using to generate our requirements.txt, `pip-tools`,
 # does not currently support conditional dependencies (;), we're adding our original

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/requirements.txt
----------------------------------------------------------------------
diff --git a/requirements.txt b/requirements.txt
index 901aa75..3accaa3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,28 +4,30 @@
 #
 #    pip-compile --output-file requirements.txt requirements.in
 #
-
-# ----------------------------------------------------------------------------------
 # Since the tool we are using to generate our requirements.txt, `pip-tools`,
 # does not currently support conditional dependencies (;), we're adding our original
-# conditional dependencies here manually.
+# conditional dependencies here as comments, and manually adding them to our
+# generated requirements.txt file.
 # The relevant pip-tools issue: https://github.com/jazzband/pip-tools/issues/435
 
-importlib==1.0.4 ; python_version < '2.7'
-ordereddict==1.1 ; python_version < '2.7'
-total-ordering==0.1.0 ; python_version < '2.7'
+importlib ; python_version < '2.7'
+ordereddict ; python_version < '2.7'
+total-ordering ; python_version < '2.7'  # only one version on pypi
 # Fabric makes use of this library, but doesn't bring it :(
 pypiwin32==219 ; sys_platform == 'win32'
 # ----------------------------------------------------------------------------------
 
-appdirs==1.4.3            # via setuptools
 args==0.1.0               # via clint
 asn1crypto==0.22.0        # via cryptography
+backports.shutil_get_terminal_size==1.0.0
 blinker==1.4
 bottle==0.12.13
 cachecontrol[filecache]==0.12.1
 cffi==1.10.0              # via cryptography
+click==4.1
+click_didyoumean==0.0.3
 clint==0.5.1
+colorama==0.3.4
 cryptography==1.8.1       # via paramiko
 decorator==4.0.11         # via networkx
 enum34==1.1.6             # via cryptography
@@ -35,11 +37,13 @@ ipaddress==1.0.18         # via cryptography
 jinja2==2.8.1
 jsonpickle==0.9.4
 lockfile==0.12.2          # via cachecontrol
+logutils==0.3.4.1
 markupsafe==1.0           # via jinja2
 msgpack-python==0.4.8     # via cachecontrol
 networkx==1.9.1
-packaging==16.8           # via cryptography, setuptools
+packaging==16.8           # via cryptography
 paramiko==2.1.2           # via fabric
+prettytable==0.7.2
 pyasn1==0.2.3             # via paramiko
 pycparser==2.17           # via cffi
 pyparsing==2.2.0          # via packaging
@@ -49,7 +53,7 @@ retrying==1.3.3
 ruamel.ordereddict==0.4.9  # via ruamel.yaml
 ruamel.yaml==0.11.15
 shortuuid==0.5.0
-six==1.10.0               # via cryptography, packaging, retrying, setuptools
+six==1.10.0               # via cryptography, packaging, retrying
 sqlalchemy==1.1.6
 wagon==0.6.0
 wheel==0.29.0             # via wagon

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index 3d72ebc..b64453a 100644
--- a/setup.py
+++ b/setup.py
@@ -61,7 +61,7 @@ except IOError:
     extras_require = {}
 
 
-console_scripts = ['aria = aria.cli.cli:main']
+console_scripts = ['aria = aria.cli.main:main']
 
 
 def _generate_user_options(command):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/.pylintrc
----------------------------------------------------------------------
diff --git a/tests/.pylintrc b/tests/.pylintrc
index 06409e9..eead6e8 100644
--- a/tests/.pylintrc
+++ b/tests/.pylintrc
@@ -77,7 +77,7 @@ confidence=
 # --enable=similarities". If you want to run only the classes checker, but have
 # no Warning level messages displayed, use"--disable=all --enable=classes
 # --disable=W"
-disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,no-self-use,missing-docstring,attribute-defined-outside-init,redefined-outer-name,import-error,redefined-variable-type,broad
 -except,protected-access,global-statement,too-many-locals,abstract-method,no-member
+disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,no-self-use,missing-docstring,attribute-defined-outside-init,redefined-outer-name,import-error,redefined-variable-type,broad
 -except,protected-access,global-statement,too-many-locals,abstract-method,no-member,unused-argument
 
 [REPORTS]
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/__init__.py
----------------------------------------------------------------------
diff --git a/tests/cli/__init__.py b/tests/cli/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/tests/cli/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/base_test.py
----------------------------------------------------------------------
diff --git a/tests/cli/base_test.py b/tests/cli/base_test.py
new file mode 100644
index 0000000..9268f71
--- /dev/null
+++ b/tests/cli/base_test.py
@@ -0,0 +1,62 @@
+from StringIO import StringIO
+import logging
+
+import pytest
+
+import tests.cli.runner as runner
+from tests.cli.utils import setup_logger, MockStorage
+
+
+@pytest.fixture
+def mock_storage():
+    return MockStorage()
+
+
+@pytest.mark.usefixtures("redirect_logger")
+class TestCliBase(object):
+
+    @staticmethod
+    @pytest.fixture(scope="class")
+    def redirect_logger():
+
+        setup_logger(logger_name='aria.cli.main',
+                     handlers=[logging.StreamHandler(TestCliBase._logger_output)],
+                     logger_format='%(message)s')
+        yield
+        setup_logger(logger_name='aria.cli.main',
+                     handlers=_default_logger_config['handlers'],
+                     level=_default_logger_config['level'])
+
+    _logger_output = StringIO()
+
+    def invoke(self, command):
+        self._logger_output.truncate(0)
+        return runner.invoke(command)
+
+    @property
+    def logger_output_string(self):
+        return self._logger_output.getvalue()
+
+
+def assert_exception_raised(outcome, expected_exception, expected_msg=''):
+    assert isinstance(outcome.exception, expected_exception)
+    assert expected_msg == str(outcome.exception)
+
+
+# This exists as I wanted to mocked a function using monkeypatch to return a function that raises an
+# exception. I tried doing that using a lambda in-place, but this can't be accomplished in a trivial
+# way it seems. So I wrote this silly function instead
+def raise_exception(exception, msg=''):
+
+    def inner(*args, **kwargs):
+        raise exception(msg)
+
+    return inner
+
+
+def get_default_logger_config():
+    logger = logging.getLogger('aria.cli.main')
+    return {'handlers': logger.handlers,
+            'level': logger.level}
+
+_default_logger_config = get_default_logger_config()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/runner.py
----------------------------------------------------------------------
diff --git a/tests/cli/runner.py b/tests/cli/runner.py
new file mode 100644
index 0000000..1682f95
--- /dev/null
+++ b/tests/cli/runner.py
@@ -0,0 +1,11 @@
+import aria.cli.commands as commands
+import click.testing
+
+
+def invoke(command_string):
+    command_list = command_string.split()
+    command, sub, args = command_list[0], command_list[1], command_list[2:]
+    runner = click.testing.CliRunner()
+    outcome = runner.invoke(getattr(
+        getattr(commands, command), sub), args)
+    return outcome

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_node_templates.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_node_templates.py b/tests/cli/test_node_templates.py
new file mode 100644
index 0000000..f0ad539
--- /dev/null
+++ b/tests/cli/test_node_templates.py
@@ -0,0 +1,101 @@
+from mock import ANY
+import pytest
+
+from aria.cli.env import _Environment
+from tests.cli.base_test import TestCliBase, mock_storage  # pylint: disable=unused-import
+
+
+class TestNodeTemplatesShow(TestCliBase):
+
+    def test_no_properties_no_nodes(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('node_templates show 1')
+        assert 'Showing node template 1' in self.logger_output_string
+        assert 'Node template properties:' in self.logger_output_string
+        assert 'No properties' in self.logger_output_string
+        assert 'prop1' not in self.logger_output_string
+        assert 'value1' not in self.logger_output_string
+        assert 'No nodes' in self.logger_output_string
+        assert 'node1' not in self.logger_output_string
+
+    def test_one_property_no_nodes(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('node_templates show 2')
+        assert 'Showing node template 2' in self.logger_output_string
+        assert 'Node template properties:' in self.logger_output_string
+        assert 'No properties' not in self.logger_output_string
+        assert 'prop1' in self.logger_output_string and 'value1' in self.logger_output_string
+        assert 'No nodes' in self.logger_output_string
+        assert 'node1' not in self.logger_output_string
+
+    def test_no_properties_one_node(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('node_templates show 3')
+        assert 'Showing node template 3' in self.logger_output_string
+        assert 'Node template properties:' in self.logger_output_string
+        assert 'No properties' in self.logger_output_string
+        assert 'prop1' not in self.logger_output_string
+        assert 'value1' not in self.logger_output_string
+        assert 'No nodes' not in self.logger_output_string
+        assert 'node1' in self.logger_output_string
+
+    def test_one_property_one_node(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('node_templates show 4')
+        assert 'Showing node template 4' in self.logger_output_string
+        assert 'Node template properties:' in self.logger_output_string
+        assert 'No properties' not in self.logger_output_string
+        assert 'prop1' in self.logger_output_string and 'value1' in self.logger_output_string
+        assert 'No nodes' not in self.logger_output_string
+        assert 'node1' in self.logger_output_string
+
+
+class TestNodeTemplatesList(TestCliBase):
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'service_template_name', 'asc'),
+        ('', ' --descending', 'service_template_name', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_list_specified_service_template(self, monkeypatch, mock_storage, sort_by, order,
+                                             sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('node_templates list -t test_st{sort_by}{order}'.format(sort_by=sort_by,
+                                                                            order=order))
+        assert 'Listing node templates for service template test_st...' in self.logger_output_string
+        assert 'Listing all node templates...' not in self.logger_output_string
+
+        node_templates_list = mock_storage.node_template.list
+        node_templates_list.assert_called_once_with(sort={sort_by_in_output: order_in_output},
+                                                    filters={'service_template': ANY})
+        assert 'Node templates:' in self.logger_output_string
+        assert 'test_st' in self.logger_output_string
+        assert 'test_nt' in self.logger_output_string
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'service_template_name', 'asc'),
+        ('', ' --descending', 'service_template_name', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_list_no_specified_service_template(self, monkeypatch, mock_storage, sort_by, order,
+                                                sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('node_templates list{sort_by}{order}'.format(sort_by=sort_by, order=order))
+        assert 'Listing all node templates...' in self.logger_output_string
+        assert 'Listing node templates for service template test_st...' not in \
+               self.logger_output_string
+
+        node_templates_list = mock_storage.node_template.list
+        node_templates_list.assert_called_once_with(sort={sort_by_in_output: order_in_output},
+                                                    filters={})
+        assert 'Node templates:' in self.logger_output_string
+        assert 'test_st' in self.logger_output_string
+        assert 'test_nt' in self.logger_output_string

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_nodes.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_nodes.py b/tests/cli/test_nodes.py
new file mode 100644
index 0000000..9be97ca
--- /dev/null
+++ b/tests/cli/test_nodes.py
@@ -0,0 +1,76 @@
+import pytest
+from mock import ANY
+
+from aria.cli.env import _Environment
+from tests.cli.base_test import TestCliBase, mock_storage  # pylint: disable=unused-import
+
+
+class TestNodesShow(TestCliBase):
+
+    def test_no_attributes(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('nodes show 1')
+        assert 'Showing node 1' in self.logger_output_string
+        assert 'Node:' in self.logger_output_string
+        assert 'Node attributes:' in self.logger_output_string
+        assert 'No attributes' in self.logger_output_string
+        assert 'attribute1' not in self.logger_output_string
+        assert 'value1' not in self.logger_output_string
+
+    def test_one_attribute(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('nodes show 2')
+        assert 'Showing node 2' in self.logger_output_string
+        assert 'Node:' in self.logger_output_string
+        assert 'Node attributes:' in self.logger_output_string
+        assert 'No attributes' not in self.logger_output_string
+        assert 'attribute1' in self.logger_output_string and 'value1' in self.logger_output_string
+
+
+class TestNodesList(TestCliBase):
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'service_name', 'asc'),
+        ('', ' --descending', 'service_name', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_list_specified_service(self, monkeypatch, mock_storage, sort_by, order,
+                                    sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('nodes list -s test_s{sort_by}{order}'.format(sort_by=sort_by,
+                                                                  order=order))
+        assert 'Listing nodes for service test_s...' in self.logger_output_string
+        assert 'Listing all nodes...' not in self.logger_output_string
+
+        nodes_list = mock_storage.node.list
+        nodes_list.assert_called_once_with(sort={sort_by_in_output: order_in_output},
+                                           filters={'service': ANY})
+        assert 'Nodes:' in self.logger_output_string
+        assert 'test_s' in self.logger_output_string
+        assert 'test_n' in self.logger_output_string
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'service_name', 'asc'),
+        ('', ' --descending', 'service_name', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_list_no_specified_service(self, monkeypatch, mock_storage, sort_by, order,
+                                       sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('nodes list{sort_by}{order}'.format(sort_by=sort_by,
+                                                        order=order))
+        assert 'Listing nodes for service test_s...' not in self.logger_output_string
+        assert 'Listing all nodes...' in self.logger_output_string
+
+        nodes_list = mock_storage.node.list
+        nodes_list.assert_called_once_with(sort={sort_by_in_output: order_in_output},
+                                           filters={})
+        assert 'Nodes:' in self.logger_output_string
+        assert 'test_s' in self.logger_output_string
+        assert 'test_n' in self.logger_output_string

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_service_templates.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_service_templates.py b/tests/cli/test_service_templates.py
new file mode 100644
index 0000000..ef70c37
--- /dev/null
+++ b/tests/cli/test_service_templates.py
@@ -0,0 +1,174 @@
+import pytest
+
+from aria.cli import service_template_utils, csar
+from aria.cli.env import _Environment
+from aria.cli.exceptions import AriaCliError
+from aria.core import Core
+from aria.exceptions import AriaException
+from aria.storage import exceptions as storage_exceptions
+from tests.cli.base_test import TestCliBase, assert_exception_raised, raise_exception, mock_storage  # pylint: disable=unused-import
+
+
+class TestServiceTemplatesShow(TestCliBase):
+
+    def test_show_no_services_no_description(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates show no_services_no_description')
+
+        assert 'Showing service template no_services_no_description...' in self.logger_output_string
+        assert 'Description:' not in self.logger_output_string
+        assert 'Existing services:\n[]' in self.logger_output_string
+
+    def test_show_no_services_yes_description(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates show no_services_yes_description')
+
+        assert 'Showing service template no_services_yes_description...' in \
+               self.logger_output_string
+        assert 'Description:\ntest_description' in self.logger_output_string
+        assert 'Existing services:\n[]' in self.logger_output_string
+
+    def test_show_one_service_no_description(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates show one_service_no_description')
+
+        assert 'Showing service template one_service_no_description...' in self.logger_output_string
+        assert 'Description:' not in self.logger_output_string
+        assert "Existing services:\n['test_s']" in self.logger_output_string
+
+    def test_show_one_service_yes_description(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates show one_service_yes_description')
+
+        assert 'Showing service template one_service_yes_description...' in \
+               self.logger_output_string
+        assert 'Description:\ntest_description' in self.logger_output_string
+        assert "Existing services:\n['test_s']" in self.logger_output_string
+
+
+class TestServiceTemplatesList(TestCliBase):
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'created_at', 'asc'),
+        ('', ' --descending', 'created_at', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_all_sorting_combinations(self, monkeypatch, mock_storage, sort_by, order,
+                                      sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates list{sort_by}{order}'.format(sort_by=sort_by, order=order))
+
+        mock_storage.service_template.list.assert_called_with(
+            sort={sort_by_in_output: order_in_output})
+        assert 'Listing all service templates...' in self.logger_output_string
+        assert 'test_st' in self.logger_output_string
+
+
+class TestServiceTemplatesStore(TestCliBase):
+
+    def test_store_no_exception(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(Core, 'create_service_template', mock_object)
+        monkeypatch.setattr(service_template_utils, 'get', mock_object)
+        self.invoke('service_templates store stubpath test_st')
+        assert 'Storing service template test_st...' in self.logger_output_string
+        assert 'Service template test_st stored' in self.logger_output_string
+
+    def test_store_raises_exception_resulting_from_name_uniqueness(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(service_template_utils, 'get', mock_object)
+        monkeypatch.setattr(Core,
+                            'create_service_template',
+                            raise_exception(storage_exceptions.NotFoundError,
+                                            msg='UNIQUE constraint failed'))
+
+        assert_exception_raised(
+            self.invoke('service_templates store stubpath test_st'),
+            expected_exception=AriaCliError,
+            expected_msg='Could not store service template `test_st`\n'
+                         'There already a exists a service template with the same name')
+        assert 'Storing service template test_st...' in self.logger_output_string
+
+    def test_store_raises_exception(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(service_template_utils, 'get', mock_object)
+        monkeypatch.setattr(Core,
+                            'create_service_template',
+                            raise_exception(storage_exceptions.NotFoundError))
+
+        assert_exception_raised(
+            self.invoke('service_templates store stubpath test_st'),
+            expected_exception=AriaCliError)
+        assert 'Storing service template test_st...' in self.logger_output_string
+
+
+class TestServiceTemplatesDelete(TestCliBase):
+
+    def test_delete_no_exception(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_object)
+        monkeypatch.setattr(Core, 'delete_service_template', mock_object)
+        self.invoke('service_templates delete test_st')
+        assert 'Deleting service template test_st...' in self.logger_output_string
+        assert 'Service template test_st deleted' in self.logger_output_string
+
+    def test_delete_raises_exception(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_object)
+        monkeypatch.setattr(Core,
+                            'delete_service_template',
+                            raise_exception(storage_exceptions.NotFoundError))
+
+        assert_exception_raised(
+            self.invoke('service_templates delete test_st'),
+            expected_exception=AriaCliError,
+            expected_msg='')
+        assert 'Deleting service template test_st...' in self.logger_output_string
+
+
+class TestServiceTemplatesInputs(TestCliBase):
+
+    def test_inputs_existing_inputs(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates inputs with_inputs')
+        assert 'Showing inputs for service template with_inputs...' in self.logger_output_string
+        assert 'input1' in self.logger_output_string and 'value1' in self.logger_output_string
+
+    def test_inputs_no_inputs(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('service_templates inputs without_inputs')
+        assert 'Showing inputs for service template without_inputs...' in self.logger_output_string
+        assert 'No inputs' in self.logger_output_string
+
+
+class TestServiceTemplatesValidate(TestCliBase):
+
+    def test_validate_no_exception(self, monkeypatch, mock_object):
+        monkeypatch.setattr(Core, 'validate_service_template', mock_object)
+        monkeypatch.setattr(service_template_utils, 'get', mock_object)
+        self.invoke('service_templates validate stubpath')
+        assert 'Validating service template: stubpath' in self.logger_output_string
+        assert 'Service template validated successfully' in self.logger_output_string
+
+    def test_validate_raises_exception(self, monkeypatch, mock_object):
+        monkeypatch.setattr(Core, 'validate_service_template', raise_exception(AriaException))
+        monkeypatch.setattr(service_template_utils, 'get', mock_object)
+        assert_exception_raised(
+            self.invoke('service_templates validate stubpath'),
+            expected_exception=AriaCliError)
+        assert 'Validating service template: stubpath' in self.logger_output_string
+
+
+class TestServiceTemplatesCreateArchive(TestCliBase):
+
+    def test_create_archive_successful(self, monkeypatch, mock_object):
+        monkeypatch.setattr(csar, 'write', mock_object)
+        self.invoke('service_templates create_archive stubpath stubdest')
+        assert 'Creating a csar archive' in self.logger_output_string
+        assert 'Csar archive created at stubdest' in self.logger_output_string

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_services.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_services.py b/tests/cli/test_services.py
new file mode 100644
index 0000000..116e449
--- /dev/null
+++ b/tests/cli/test_services.py
@@ -0,0 +1,178 @@
+import pytest
+from mock import ANY
+from aria.cli.exceptions import AriaCliError
+from aria.cli.env import _Environment
+from aria.core import Core
+from aria.exceptions import (AriaException, DependentActiveExecutionsError,
+                             DependentAvailableNodesError)
+from aria.storage import exceptions as storage_exceptions
+from tests.cli.base_test import TestCliBase, raise_exception, assert_exception_raised, mock_storage  #pylint: disable=unused-import
+from tests.mock.models import create_service, create_service_template
+
+
+class TestServicesList(TestCliBase):
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'created_at', 'asc'),
+        ('', ' --descending', 'created_at', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_list_no_specified_service_template(self, monkeypatch, mock_storage, sort_by, order,
+                                                sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('services list{sort_by}{order}'.format(sort_by=sort_by, order=order))
+        assert 'Listing all services...' in self.logger_output_string
+        assert 'Listing services for service template' not in self.logger_output_string
+
+        mock_storage.service.list.assert_called_once_with(sort={sort_by_in_output: order_in_output},
+                                                          filters={})
+        assert 'Services:' in self.logger_output_string
+        assert 'test_st' in self.logger_output_string
+        assert 'test_s' in self.logger_output_string
+
+    @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [
+        ('', '', 'created_at', 'asc'),
+        ('', ' --descending', 'created_at', 'desc'),
+        (' --sort-by name', '', 'name', 'asc'),
+        (' --sort-by name', ' --descending', 'name', 'desc')
+    ])
+    def test_list_specified_service_template(self, monkeypatch, mock_storage, sort_by, order,
+                                             sort_by_in_output, order_in_output):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('services list -t test_st{sort_by}{order}'.format(sort_by=sort_by, order=order))
+        assert 'Listing services for service template test_st...' in self.logger_output_string
+        assert 'Listing all services...' not in self.logger_output_string
+
+        mock_storage.service.list.assert_called_once_with(sort={sort_by_in_output: order_in_output},
+                                                          filters={'service_template': ANY})
+        assert 'Services:' in self.logger_output_string
+        assert 'test_st' in self.logger_output_string
+        assert 'test_s' in self.logger_output_string
+
+
+class TestServicesCreate(TestCliBase):
+
+    def test_create_no_exception(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_object)
+
+        test_st = create_service_template('test_st')
+        mock_object.return_value = create_service(test_st, 'test_s')
+        monkeypatch.setattr(Core, 'create_service', mock_object)
+        self.invoke('services create -t test_st test_s')
+
+        assert 'Creating new service from service template test_st...' in self.logger_output_string
+        assert "Service created. The service's name is test_s" in self.logger_output_string
+
+    def test_store_raises_storage_error_resulting_from_name_uniqueness(self, monkeypatch,
+                                                                       mock_object):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_object)
+        monkeypatch.setattr(Core,
+                            'create_service',
+                            raise_exception(storage_exceptions.NotFoundError,
+                                            msg='UNIQUE constraint failed'))
+        assert_exception_raised(
+            self.invoke('services create -t test_st test_s'),
+            expected_exception=AriaCliError,
+            expected_msg='Could not store service `test_s`\n'
+                         'There already a exists a service with the same name')
+
+        assert 'Creating new service from service template test_st...' in self.logger_output_string
+        assert "Service created. The service's name is test_s" not in self.logger_output_string
+
+    def test_store_raises_other_storage_error(self, monkeypatch, mock_object):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_object)
+        monkeypatch.setattr(Core,
+                            'create_service',
+                            raise_exception(storage_exceptions.NotFoundError))
+
+        assert_exception_raised(
+            self.invoke('services create -t test_st test_s'),
+            expected_exception=AriaCliError)
+
+        assert 'Creating new service from service template test_st...' in self.logger_output_string
+        assert "Service created. The service's name is test_s" not in self.logger_output_string
+
+    def test_store_raises_aria_exception(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        monkeypatch.setattr(Core,
+                            'create_service',
+                            raise_exception(AriaException, msg='error creating service `test_s`'))
+
+        assert_exception_raised(
+            self.invoke('services create -t with_inputs test_s'),
+            expected_exception=AriaCliError,
+            expected_msg='error creating service `test_s`')
+
+        assert 'Creating new service from service template with_inputs...' in \
+               self.logger_output_string
+        assert 'error creating service `test_s`' in self.logger_output_string
+        assert 'input1' in self.logger_output_string and 'value1' in self.logger_output_string
+        assert "Service created. The service's name is test_s" not in self.logger_output_string
+
+
+class TestServicesDelete(TestCliBase):
+
+    def test_delete_no_exception(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_object)
+        monkeypatch.setattr(Core, 'delete_service', mock_object)
+        self.invoke('services delete test_s')
+        assert 'Deleting service test_s...' in self.logger_output_string
+        assert 'Service test_s deleted' in self.logger_output_string
+
+    def test_delete_active_execution_error(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        assert_exception_raised(
+            self.invoke('services delete service_with_active_executions'),
+            expected_exception=DependentActiveExecutionsError,
+            expected_msg="Can't delete service test_s - there is an active "
+                         "execution for this service. Active execution id: 1"
+        )
+        assert 'Deleting service service_with_active_executions...' in self.logger_output_string
+
+    def test_delete_available_nodes_error(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        assert_exception_raised(
+            self.invoke('services delete service_with_available_nodes'),
+            expected_exception=DependentAvailableNodesError,
+            expected_msg="Can't delete service test_s - "
+                         "there are available nodes for this service. Available node ids: 1"
+        )
+        assert 'Deleting service service_with_available_nodes...' in self.logger_output_string
+
+    def test_delete_available_nodes_error_with_force(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('services delete service_with_available_nodes --force')
+
+        assert mock_storage.service.delete.call_count == 1
+        assert 'Deleting service service_with_available_nodes...' in self.logger_output_string
+        assert 'Service service_with_available_nodes deleted' in self.logger_output_string
+
+class TestServicesOutputs(TestCliBase):
+    pass
+
+
+class TestServicesInputs(TestCliBase):
+
+    def test_inputs_no_inputs(self, monkeypatch, mock_storage):
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('services inputs service_with_no_inputs')
+
+        assert 'Showing inputs for service service_with_no_inputs...' in self.logger_output_string
+        assert 'No inputs' in self.logger_output_string
+        assert 'input1' not in self.logger_output_string
+        assert 'value1' not in self.logger_output_string
+
+    def test_inputs_one_input(self, monkeypatch, mock_storage):
+
+        monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
+        self.invoke('services inputs service_with_one_input')
+
+        assert 'Showing inputs for service service_with_one_input...' in self.logger_output_string
+        assert 'input1' in self.logger_output_string
+        assert 'value1' in self.logger_output_string
+        assert 'No inputs' not in self.logger_output_string

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/utils.py
----------------------------------------------------------------------
diff --git a/tests/cli/utils.py b/tests/cli/utils.py
new file mode 100644
index 0000000..20fdb90
--- /dev/null
+++ b/tests/cli/utils.py
@@ -0,0 +1,175 @@
+import logging
+from aria.modeling import models
+from mock import MagicMock
+
+from tests.mock import models as mock_models
+
+
+def setup_logger(logger_name,
+                 level=logging.INFO,
+                 handlers=None,
+                 remove_existing_handlers=True,
+                 logger_format=None,
+                 propagate=True):
+    """
+    :param logger_name: Name of the logger.
+    :param level: Level for the logger (not for specific handler).
+    :param handlers: An optional list of handlers (formatter will be
+                     overridden); If None, only a StreamHandler for
+                     sys.stdout will be used.
+    :param remove_existing_handlers: Determines whether to remove existing
+                                     handlers before adding new ones
+    :param logger_format: the format this logger will have.
+    :param propagate: propagate the message the parent logger.
+    :return: A logger instance.
+    :rtype: logging.Logger
+    """
+
+    logger = logging.getLogger(logger_name)
+
+    if remove_existing_handlers:
+        for handler in logger.handlers:
+            logger.removeHandler(handler)
+
+    for handler in handlers:
+        if logger_format:
+            formatter = logging.Formatter(fmt=logger_format)
+            handler.setFormatter(formatter)
+        logger.addHandler(handler)
+
+    logger.setLevel(level)
+    if not propagate:
+        logger.propagate = False
+
+    return logger
+
+
+class MockStorage(object):
+
+    def __init__(self):
+        self.service_template = MockServiceTemplateStorage()
+        self.service = MockServiceStorage()
+        self.node_template = MockNodeTemplateStorage()
+        self.node = MockNodeStorage()
+
+
+class MockServiceTemplateStorage(object):
+
+    def __init__(self):
+        self.list = MagicMock(return_value=[mock_models.create_service_template('test_st')])
+
+    @staticmethod
+    def get_by_name(name):
+        st = mock_models.create_service_template('test_st')
+        if name == 'no_services_no_description':
+            pass
+        elif name == 'no_services_yes_description':
+            st.description = 'test_description'
+        elif name == 'one_service_no_description':
+            service = mock_models.create_service(st, 'test_s')
+            st.services = [service]
+        elif name == 'one_service_yes_description':
+            service = mock_models.create_service(st, 'test_s')
+            st.description = 'test_description'
+            st.services = [service]
+        elif name == 'with_inputs':
+            input = mock_models.create_parameter(name='input1', value='value1')
+            st.inputs = {'input1': input}
+        elif name == 'without_inputs':
+            st.inputs = {}
+        elif name == 'one_service':
+            service = mock_models.create_service(st, 'test_s')
+            st.services = [service]
+        return st
+
+
+class MockServiceStorage(object):
+
+    def __init__(self):
+        self.st = mock_models.create_service_template('test_st')
+        self.list = MagicMock(return_value=[mock_models.create_service(self.st, 'test_s')])
+        self.delete = MagicMock()
+
+    @staticmethod
+    def get(id):
+        test_st = mock_models.create_service_template('test_st')
+        test_s = mock_models.create_service(test_st, 'test_s')
+        if id == '1':
+            execution = mock_models.create_execution(test_s, status=models.Execution.STARTED)
+            execution.id = '1'
+            test_s.executions = [execution]
+        elif id == '2':
+            node_template = mock_models.create_node_template(service_template=test_st)
+            node = mock_models.create_node(name='test_node',
+                                           dependency_node_template=node_template,
+                                           service=test_s,
+                                           state=models.Node.STARTED)
+            node.id = '1'
+        return test_s
+
+    @staticmethod
+    def get_by_name(name):
+        test_st = mock_models.create_service_template('test_st')
+        test_s = mock_models.create_service(test_st, 'test_s')
+        if name == 'service_with_active_executions':
+            m = MagicMock()
+            m.id = '1'
+            return m
+        elif name == 'service_with_available_nodes':
+            m = MagicMock()
+            m.id = '2'
+            return m
+        elif name == 'service_with_no_inputs':
+            pass
+        elif name == 'service_with_one_input':
+            input = mock_models.create_parameter(name='input1', value='value1')
+            test_s.inputs = {'input1': input}
+
+        return test_s
+
+
+class MockNodeTemplateStorage(object):
+
+    def __init__(self):
+        self.st = mock_models.create_service_template('test_st')
+        self.list = MagicMock(return_value=[mock_models.create_node_template(self.st, 'test_nt')])
+
+
+    @staticmethod
+    def get(id):
+        st = mock_models.create_service_template('test_st')
+        s = mock_models.create_service(st, 'test_s')
+        nt = mock_models.create_node_template(service_template=st, name='test_nt')
+        if id == '1':
+            pass
+        elif id == '2':
+            prop1 = mock_models.create_parameter('prop1', 'value1')
+            nt.properties = {'prop1': prop1}
+        elif id == '3':
+            mock_models.create_node('node1', nt, s)
+        elif id == '4':
+            prop1 = mock_models.create_parameter('prop1', 'value1')
+            nt.properties = {'prop1': prop1}
+            mock_models.create_node('node1', nt, s)
+        return nt
+
+
+class MockNodeStorage(object):
+
+    def __init__(self):
+        self.st = mock_models.create_service_template('test_st')
+        self.s = mock_models.create_service(self.st, 'test_s')
+        self.nt = mock_models.create_node_template(service_template=self.st, name='test_nt')
+        self.list = MagicMock(return_value=[mock_models.create_node('test_n', self.nt, self.s)])
+
+    @staticmethod
+    def get(id):
+        st = mock_models.create_service_template('test_st')
+        s = mock_models.create_service(st, 'test_s')
+        nt = mock_models.create_node_template(service_template=st, name='test_nt')
+        n = mock_models.create_node('test_n', nt, s)
+        if id == '1':
+            pass
+        elif id == '2':
+            n.runtime_properties = {'attribute1': 'value1'}
+        return n

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/conftest.py
----------------------------------------------------------------------
diff --git a/tests/conftest.py b/tests/conftest.py
index c501eeb..8f2c273 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -18,6 +18,7 @@ import logging
 import pytest
 
 import aria
+from aria import logger
 
 
 @pytest.fixture(scope='session', autouse=True)
@@ -37,11 +38,10 @@ def logging_handler_cleanup(request):
     :return:
     """
     def clear_logging_handlers():
-        logged_ctx_names = [
-            aria.orchestrator.context.workflow.WorkflowContext.__name__,
-            aria.orchestrator.context.operation.NodeOperationContext.__name__,
-            aria.orchestrator.context.operation.RelationshipOperationContext.__name__
-        ]
-        for logger_name in logged_ctx_names:
-            logging.getLogger(logger_name).handlers = []
+        logging.getLogger(logger.TASK_LOGGER_NAME).handlers = []
     request.addfinalizer(clear_logging_handlers)
+
+
+@pytest.fixture
+def mock_object(mocker):
+    return mocker.MagicMock()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/end2end/test_orchestrator.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_orchestrator.py b/tests/end2end/test_orchestrator.py
deleted file mode 100644
index 4dfca44..0000000
--- a/tests/end2end/test_orchestrator.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import os
-
-from aria.orchestrator.runner import Runner
-from aria.orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
-from aria.utils.imports import import_fullname
-from aria.utils.collections import OrderedDict
-from aria.cli.dry import convert_to_dry
-
-from tests.parser.service_templates import consume_node_cellar
-
-
-WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
-
-
-def test_install():
-    _workflow('install')
-
-
-def test_custom():
-    _workflow('maintenance_on')
-
-
-def _workflow(workflow_name):
-    context, _ = consume_node_cellar()
-
-    convert_to_dry(context.modeling.instance)
-
-    # TODO: this logic will eventually stabilize and be part of the ARIA API,
-    # likely somewhere in aria.orchestrator.workflows
-    if workflow_name in BUILTIN_WORKFLOWS:
-        workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.' + workflow_name)
-        inputs = {}
-    else:
-        workflow = context.modeling.instance.policies[workflow_name]
-        sys.path.append(os.path.dirname(str(context.presentation.location)))
-        workflow_fn = import_fullname(workflow.properties['implementation'].value)
-        inputs = OrderedDict([
-            (k, v.value) for k, v in workflow.properties.iteritems()
-            if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES
-        ])
-
-    def _initialize_storage(model_storage):
-        context.modeling.store(model_storage)
-
-    runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
-                    lambda: context.modeling.instance.id)
-    runner.run()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/end2end/test_tosca_simple_v1_0.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_tosca_simple_v1_0.py b/tests/end2end/test_tosca_simple_v1_0.py
deleted file mode 100644
index 4658fc3..0000000
--- a/tests/end2end/test_tosca_simple_v1_0.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from tests.parser.service_templates import (consume_use_case, consume_node_cellar)
-
-
-# Use Cases
-
-def test_use_case_compute_1():
-    consume_use_case('compute-1', 'instance')
-
-
-def test_use_case_software_component_1():
-    consume_use_case('software-component-1', 'instance')
-
-
-def test_use_case_block_storage_1():
-    consume_use_case('block-storage-1', 'instance')
-
-
-def test_use_case_block_storage_2():
-    consume_use_case('block-storage-2', 'instance')
-
-
-def test_use_case_block_storage_3():
-    consume_use_case('block-storage-3', 'instance')
-
-
-def test_use_case_block_storage_4():
-    consume_use_case('block-storage-4', 'instance')
-
-
-def test_use_case_block_storage_5():
-    consume_use_case('block-storage-5', 'instance')
-
-
-def test_use_case_block_storage_6():
-    consume_use_case('block-storage-6', 'instance')
-
-
-def test_use_case_object_storage_1():
-    consume_use_case('object-storage-1', 'instance')
-
-
-def test_use_case_network_1():
-    consume_use_case('network-1', 'instance')
-
-
-def test_use_case_network_2():
-    consume_use_case('network-2', 'instance')
-
-
-def test_use_case_network_3():
-    consume_use_case('network-3', 'instance')
-
-
-def test_use_case_network_4():
-    consume_use_case('network-4', 'instance')
-
-
-def test_use_case_webserver_dbms_1():
-    consume_use_case('webserver-dbms-1', 'template')
-
-
-def test_use_case_webserver_dbms_2():
-    consume_use_case('webserver-dbms-2', 'instance')
-
-
-def test_use_case_multi_tier_1():
-    consume_use_case('multi-tier-1', 'instance')
-
-
-def test_use_case_container_1():
-    consume_use_case('container-1', 'template')
-
-
-# NodeCellar
-
-def test_node_cellar_validation():
-    consume_node_cellar('validate')
-
-
-def test_node_cellar_validation_no_cache():
-    consume_node_cellar('validate', False)
-
-
-def test_node_cellar_presentation():
-    consume_node_cellar('presentation')
-
-
-def test_node_cellar_model():
-    consume_node_cellar('template')
-
-
-def test_node_cellar_types():
-    consume_node_cellar('types')
-
-
-def test_node_cellar_instance():
-    consume_node_cellar('instance')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/fixtures.py
----------------------------------------------------------------------
diff --git a/tests/fixtures.py b/tests/fixtures.py
new file mode 100644
index 0000000..3b1b9b5
--- /dev/null
+++ b/tests/fixtures.py
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+
+import pytest
+
+from aria import (
+    application_model_storage,
+    application_resource_storage
+)
+from aria.orchestrator import plugin
+from aria.storage import (
+    sql_mapi,
+    filesystem_rapi
+)
+
+from . import storage
+
+
+@pytest.fixture
+def inmemory_model():
+    model = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                      initiator=storage.init_inmemory_model_storage)
+    yield model
+    storage.release_sqlite_storage(model)
+
+
+@pytest.fixture
+def fs_model(tmpdir):
+    result = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                       initiator_kwargs=dict(base_dir=str(tmpdir)),
+                                       initiator=sql_mapi.init_storage)
+    yield result
+    storage.release_sqlite_storage(result)
+
+
+@pytest.fixture
+def resource_storage(tmpdir):
+    result = tmpdir.join('resources')
+    result.mkdir()
+    resource_storage = application_resource_storage(
+        filesystem_rapi.FileSystemResourceAPI,
+        api_kwargs=dict(directory=str(result)))
+    yield resource_storage
+    shutil.rmtree(str(result))
+
+
+@pytest.fixture
+def plugins_dir(tmpdir):
+    result = tmpdir.join('plugins')
+    result.mkdir()
+    return str(result)
+
+
+@pytest.fixture
+def plugin_manager(model, plugins_dir):
+    return plugin.PluginManager(model=model, plugins_dir=plugins_dir)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index f943d7e..ac0a8a7 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -39,12 +39,17 @@ def simple(tmpdir, inmemory=False, context_kwargs=None, topology=None):
         api_kwargs=dict(directory=os.path.join(tmpdir, 'resources'))
     )
 
+    service_id = topology(model_storage)
+    execution = models.create_execution(model_storage.service.get(service_id))
+    model_storage.execution.put(execution)
+
     final_kwargs = dict(
         name='simple_context',
         model_storage=model_storage,
         resource_storage=resource_storage,
-        service_id=topology(model_storage),
+        service_id=service_id,
         workflow_name=models.WORKFLOW_NAME,
+        execution_id=execution.id,
         task_max_attempts=models.TASK_MAX_ATTEMPTS,
         task_retry_interval=models.TASK_RETRY_INTERVAL
     )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 1d29e2d..38c2b28 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -39,8 +39,8 @@ from aria.orchestrator.workflows.builtin.workflows import (
 
 SERVICE_NAME = 'test_service_name'
 SERVICE_TEMPLATE_NAME = 'test_service_template_name'
+NODE_TEMPLATE_NAME = 'test_node_template'
 WORKFLOW_NAME = 'test_workflow_name'
-EXECUTION_NAME = 'test_execution_name'
 TASK_RETRY_INTERVAL = 1
 TASK_MAX_ATTEMPTS = 1
 
@@ -81,6 +81,33 @@ def create_service(service_template, name=SERVICE_NAME):
     )
 
 
+def create_node_template(service_template,
+                         name=NODE_TEMPLATE_NAME,
+                         type=models.Type(variant='node', name='test_node_type'),
+                         capability_templates=None,
+                         requirement_templates=None,
+                         interface_templates=None,
+                         default_instances=1,
+                         min_instances=1,
+                         max_instances=1):
+    capability_templates = capability_templates or {}
+    requirement_templates = requirement_templates or []
+    interface_templates = interface_templates or {}
+    node_template = models.NodeTemplate(
+        name=name,
+        type=type,
+        capability_templates=capability_templates,
+        requirement_templates=requirement_templates,
+        interface_templates=interface_templates,
+        default_instances=default_instances,
+        min_instances=min_instances,
+        max_instances=max_instances,
+        service_template=service_template)
+
+    service_template.node_templates[node_template.name] = node_template
+    return node_template
+
+
 def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPLATE_NAME):
     node_type = service_template.node_types.get_descendant('test_node_type')
     capability_type = service_template.capability_types.get_descendant('test_capability_type')
@@ -89,18 +116,12 @@ def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPL
         name='capability',
         type=capability_type
     )
-
-    node_template = models.NodeTemplate(
+    return create_node_template(
+        service_template=service_template,
         name=name,
         type=node_type,
-        capability_templates=_dictify(capability_template),
-        default_instances=1,
-        min_instances=1,
-        max_instances=1,
-        service_template=service_template
+        capability_templates=_dictify(capability_template)
     )
-    service_template.node_templates[node_template.name] = node_template
-    return node_template
 
 
 def create_dependent_node_template(
@@ -111,29 +132,26 @@ def create_dependent_node_template(
         name='requirement',
         target_node_template=dependency_node_template
     )
-
-    node_template = models.NodeTemplate(
+    return create_node_template(
+        service_template=service_template,
         name=name,
         type=the_type,
-        default_instances=1,
-        min_instances=1,
-        max_instances=1,
         interface_templates=_dictify(get_standard_interface_template(service_template)),
         requirement_templates=[requirement_template],
-        service_template=service_template
     )
-    service_template.node_templates[node_template.name] = node_template
-    return node_template
 
 
-def create_node(name, dependency_node_template, service):
+def create_node(name, dependency_node_template, service, state=models.Node.INITIAL,
+                runtime_properties=None):
+    runtime_properties = runtime_properties or {}
+    # tmp_runtime_properties = {'ip': '1.1.1.1'}
     node = models.Node(
         name=name,
         type=dependency_node_template.type,
-        runtime_properties={'ip': '1.1.1.1'},
+        runtime_properties=runtime_properties,
         version=None,
         node_template=dependency_node_template,
-        state=models.Node.INITIAL,
+        state=state,
         scaling_groups=[],
         service=service,
         interfaces=get_standard_interface(service),
@@ -168,6 +186,12 @@ def create_interface_template(service_template, interface_name, operation_name,
 def create_interface(service, interface_name, operation_name, operation_kwargs=None,
                      interface_kwargs=None):
     the_type = service.service_template.interface_types.get_descendant('test_interface_type')
+
+    if operation_kwargs and operation_kwargs.get('inputs'):
+        operation_kwargs['inputs'] = dict(
+            (input_name, models.Parameter.wrap(input_name, input_value))
+            for input_name, input_value in operation_kwargs['inputs'].iteritems())
+
     operation = models.Operation(
         name=operation_name,
         **(operation_kwargs or {})
@@ -180,13 +204,14 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N
     )
 
 
-def create_execution(service):
+def create_execution(service, status=models.Execution.PENDING):
     return models.Execution(
         service=service,
-        status=models.Execution.STARTED,
+        status=status,
         workflow_name=WORKFLOW_NAME,
+        created_at=datetime.utcnow(),
         started_at=datetime.utcnow(),
-        parameters=None
+        inputs={}
     )
 
 
@@ -214,6 +239,11 @@ def create_plugin_specification(name='test_plugin', version='0.1'):
     )
 
 
+def create_parameter(name, value):
+    p = models.Parameter()
+    return p.wrap(name, value)
+
+
 def _dictify(item):
     return dict(((item.name, item),))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/mock/workflow.py
----------------------------------------------------------------------
diff --git a/tests/mock/workflow.py b/tests/mock/workflow.py
new file mode 100644
index 0000000..b12b9fa
--- /dev/null
+++ b/tests/mock/workflow.py
@@ -0,0 +1,26 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from aria.orchestrator.decorators import workflow
+
+
+@workflow
+def mock_workflow(graph, ctx, output_path=None, **kwargs):  # pylint: disable=unused-argument
+    if output_path:
+        # writes call arguments to the specified output file
+        with open(output_path, 'w') as f:
+            json.dump(kwargs, f)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index bd4eba4..d91249a 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -180,7 +180,7 @@ class TestServiceTemplate(object):
     @pytest.mark.parametrize(
         'is_valid, description, created_at, updated_at, main_file_name',
         [
-            (False, {}, now, now, '/path'),
+            (False, [], now, now, '/path'),
             (False, 'description', 'error', now, '/path'),
             (False, 'description', now, 'error', '/path'),
             (False, 'description', now, now, {}),
@@ -253,7 +253,7 @@ class TestService(object):
 class TestExecution(object):
 
     @pytest.mark.parametrize(
-        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, '
+        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, inputs, '
         'status, workflow_name',
         [
             (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
@@ -268,11 +268,11 @@ class TestExecution(object):
             (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
             (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'),
             (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
+            (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
         ]
     )
     def test_execution_model_creation(self, service_storage, is_valid, created_at, started_at,
-                                      ended_at, error, is_system_workflow, parameters, status,
+                                      ended_at, error, is_system_workflow, inputs, status,
                                       workflow_name):
         execution = _test_model(
             is_valid=is_valid,
@@ -285,7 +285,7 @@ class TestExecution(object):
                 ended_at=ended_at,
                 error=error,
                 is_system_workflow=is_system_workflow,
-                parameters=parameters,
+                inputs=inputs,
                 status=status,
                 workflow_name=workflow_name,
             ))
@@ -299,7 +299,7 @@ class TestExecution(object):
                 id='e_id',
                 workflow_name='w_name',
                 status=status,
-                parameters={},
+                inputs={},
                 created_at=now,
             )
             return execution

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index af8b454..c399474 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -69,16 +69,17 @@ def test_node_operation_task_execution(ctx, thread_executor):
     interface_name = 'Standard'
     operation_name = 'create'
 
+    inputs = {'putput': True}
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+                              inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -124,17 +125,18 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
     interface_name = 'Configure'
     operation_name = 'post_configure'
 
+    inputs = {'putput': True}
     relationship = ctx.model.relationship.list()[0]
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)),
+        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+                              inputs=inputs),
     )
 
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -231,21 +233,21 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
     plugin = mock.models.create_plugin()
     ctx.model.plugin.put(plugin)
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    filename = 'test_file'
+    content = 'file content'
+    inputs = {'filename': filename, 'content': content}
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
             implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
-            plugin=plugin)
+            plugin=plugin,
+            inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
-    filename = 'test_file'
-    content = 'file content'
-    inputs = {'filename': filename, 'content': content}
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(api.task.OperationTask.for_node(node=node,
@@ -277,21 +279,22 @@ def test_node_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
 
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+    inputs = {
+        'op_start': 'op_start',
+        'op_end': 'op_end',
+    }
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
-            implementation=op_path(logged_operation, module_path=__name__))
+            implementation=op_path(logged_operation, module_path=__name__),
+            inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
-    inputs = {
-        'op_start': 'op_start',
-        'op_end': 'op_end',
-    }
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
@@ -311,20 +314,20 @@ def test_relationship_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0]
 
     relationship = ctx.model.relationship.list()[0]
+    inputs = {
+        'op_start': 'op_start',
+        'op_end': 'op_end',
+    }
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__),
+                              inputs=inputs)
     )
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
 
-    inputs = {
-        'op_start': 'op_start',
-        'op_end': 'op_end',
-    }
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_resource_render.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py
index 696e9b3..8249086 100644
--- a/tests/orchestrator/context/test_resource_render.py
+++ b/tests/orchestrator/context/test_resource_render.py
@@ -64,9 +64,9 @@ def resources(tmpdir, ctx):
     implicit_ctx_template_path.write(_IMPLICIT_CTX_TEMPLATE)
     variables_template_path = tmpdir.join(_VARIABLES_TEMPLATE_PATH)
     variables_template_path.write(_VARIABLES_TEMPLATE)
-    ctx.resource.deployment.upload(entry_id='1',
-                                   source=str(implicit_ctx_template_path),
-                                   path=_IMPLICIT_CTX_TEMPLATE_PATH)
-    ctx.resource.deployment.upload(entry_id='1',
-                                   source=str(variables_template_path),
-                                   path=_VARIABLES_TEMPLATE_PATH)
+    ctx.resource.service.upload(entry_id='1',
+                                source=str(implicit_ctx_template_path),
+                                path=_IMPLICIT_CTX_TEMPLATE_PATH)
+    ctx.resource.service.upload(entry_id='1',
+                                source=str(variables_template_path),
+                                path=_VARIABLES_TEMPLATE_PATH)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 8b809b3..f4acc36 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -15,8 +15,6 @@
 
 import pytest
 
-import aria
-from aria.storage import sql_mapi
 from aria.orchestrator.workflows import api
 from aria.orchestrator.workflows.core import engine
 from aria.orchestrator.workflows.executor import process
@@ -34,7 +32,7 @@ def test_serialize_operation_context(context, executor, tmpdir):
     test_file = tmpdir.join(TEST_FILE_NAME)
     test_file.write(TEST_FILE_CONTENT)
     resource = context.resource
-    resource.blueprint.upload(TEST_FILE_ENTRY_ID, str(test_file))
+    resource.service_template.upload(TEST_FILE_ENTRY_ID, str(test_file))
     graph = _mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
@@ -72,7 +70,7 @@ def _mock_operation(ctx):
     # a correct ctx.deployment.name tells us we kept the correct deployment_id
     assert ctx.service.name == mock.models.SERVICE_NAME
     # Here we test that the resource storage was properly re-created
-    test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
+    test_file_content = ctx.resource.service_template.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
     assert test_file_content == TEST_FILE_CONTENT
     # a non empty plugin workdir tells us that we kept the correct base_workdir
     assert ctx.plugin_workdir is not None
@@ -98,10 +96,3 @@ def context(tmpdir):
 
     yield result
     storage.release_sqlite_storage(result.model)
-
-
-@pytest.fixture
-def memory_model_storage():
-    result = aria.application_model_storage(sql_mapi.SQLAlchemyModelAPI)
-    yield result
-    storage.release_sqlite_storage(result)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index cf82127..213d964 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -76,15 +76,16 @@ def test_host_ip(workflow_context, executor):
     interface_name = 'Standard'
     operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
+    inputs = {'putput': True}
     interface = mock.models.create_interface(
         dependency_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__),
+                              inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
     workflow_context.model.node.update(dependency_node)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -106,17 +107,17 @@ def test_relationship_tool_belt(workflow_context, executor):
     interface_name = 'Configure'
     operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
+    inputs = {'putput': True}
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__),
+                              inputs=inputs)
     )
     relationship.interfaces[interface.name] = interface
     workflow_context.model.relationship.update(relationship)
 
-    inputs = {'putput': True}
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index fa1f387..3c35435 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -35,7 +35,7 @@ class TestWorkflowContext(object):
         assert execution.service_template == storage.service_template.get_by_name(
             models.SERVICE_TEMPLATE_NAME)
         assert execution.status == storage.execution.model_cls.PENDING
-        assert execution.parameters == {}
+        assert execution.inputs == {}
         assert execution.created_at <= datetime.utcnow()
 
     def test_subsequent_workflow_context_creation_do_not_fail(self, storage):
@@ -49,11 +49,13 @@ class TestWorkflowContext(object):
         :param storage:
         :return WorkflowContext:
         """
+        service = storage.service.get_by_name(models.SERVICE_NAME)
         return context.workflow.WorkflowContext(
             name='simple_context',
             model_storage=storage,
             resource_storage=None,
-            service_id=storage.service.get_by_name(models.SERVICE_NAME).id,
+            service_id=service,
+            execution_id=storage.execution.list(filters=dict(service=service))[0].id,
             workflow_name=models.WORKFLOW_NAME,
             task_max_attempts=models.TASK_MAX_ATTEMPTS,
             task_retry_interval=models.TASK_RETRY_INTERVAL
@@ -66,6 +68,8 @@ def storage():
         sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage)
     workflow_storage.service_template.put(models.create_service_template())
     service_template = workflow_storage.service_template.get_by_name(models.SERVICE_TEMPLATE_NAME)
-    workflow_storage.service.put(models.create_service(service_template))
+    service = models.create_service(service_template)
+    workflow_storage.service.put(service)
+    workflow_storage.execution.put(models.create_execution(service))
     yield workflow_storage
     test_storage.release_sqlite_storage(workflow_storage)



[3/9] incubator-ariatosca git commit: ARIA-92 Automatic operation task configuration

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 267f6de..0e9177f 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -23,6 +23,8 @@ import re
 from types import FunctionType
 from datetime import datetime
 
+from aria.parser.validation import Issue
+from aria.utils.collections import StrictDict
 from aria.modeling.models import (Type, ServiceTemplate, NodeTemplate,
                                   RequirementTemplate, RelationshipTemplate, CapabilityTemplate,
                                   GroupTemplate, PolicyTemplate, SubstitutionTemplate,
@@ -32,6 +34,11 @@ from aria.modeling.models import (Type, ServiceTemplate, NodeTemplate,
 from ..data_types import coerce_value
 
 
+# These match the first un-escaped ">"
+# See: http://stackoverflow.com/a/11819111/849021
+IMPLEMENTATION_PREFIX_REGEX = re.compile(r'(?<!\\)(?:\\\\)*>')
+
+
 def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches
     model = ServiceTemplate(created_at=datetime.now(),
                             main_file_name=str(context.presentation.location))
@@ -352,20 +359,35 @@ def create_interface_template_model(context, service_template, interface):
     return model if model.operation_templates else None
 
 
-def create_operation_template_model(context, service_template, operation): # pylint: disable=unused-argument
+def create_operation_template_model(context, service_template, operation):
     model = OperationTemplate(name=operation._name)
 
     if operation.description:
         model.description = operation.description.value
 
     implementation = operation.implementation
-    if (implementation is not None) and operation.implementation.primary:
-        model.plugin_specification, model.implementation = \
-            parse_implementation_string(context, service_template, operation.implementation.primary)
-
+    if implementation is not None: 
+        primary = implementation.primary
+        parse_implementation_string(context, service_template, operation, model, primary)
+        relationship_edge = operation._get_extensions(context).get('relationship_edge')
+        if relationship_edge is not None:
+            if relationship_edge == 'source':
+                model.relationship_edge = False
+            elif relationship_edge == 'target':
+                model.relationship_edge = True
+            
         dependencies = implementation.dependencies
-        if dependencies is not None:
-            model.dependencies = dependencies
+        if dependencies:
+            for dependency in dependencies:
+                key, value = split_prefix(dependency)
+                if key is not None:
+                    if model.configuration is None:
+                        model.configuration = {}
+                    set_nested(model.configuration, key.split('.'), value)
+                else:
+                    if model.dependencies is None:
+                        model.dependencies = []
+                    model.dependencies.append(dependency)
 
     inputs = operation.inputs
     if inputs:
@@ -441,12 +463,13 @@ def create_substitution_template_model(context, service_template, substitution_m
 def create_plugin_specification_model(context, policy):
     properties = policy.properties
 
-    def get(name):
+    def get(name, default=None):
         prop = properties.get(name)
-        return prop.value if prop is not None else None
+        return prop.value if prop is not None else default
 
     model = PluginSpecification(name=policy._name,
-                                version=get('version'))
+                                version=get('version'),
+                                enabled=get('enabled', True))
 
     return model
 
@@ -461,8 +484,7 @@ def create_workflow_operation_template_model(context, service_template, policy):
     properties = policy._get_property_values(context)
     for prop_name, prop in properties.iteritems():
         if prop_name == 'implementation':
-            model.plugin_specification, model.implementation = \
-                parse_implementation_string(context, service_template, prop.value)
+            parse_implementation_string(context, service_template, policy, model, prop.value)
         elif prop_name == 'dependencies':
             model.dependencies = prop.value
         else:
@@ -677,21 +699,47 @@ def create_constraint_clause_lambda(context, node_filter, constraint_clause, pro
     return None
 
 
-def parse_implementation_string(context, service_template, implementation):
-    if not implementation:
-        return None, ''
+def split_prefix(string):
+    """
+    Splits the prefix on the first unescaped ">".
+    """
 
-    index = implementation.find('>')
-    if index == -1:
-        return None, implementation
-    plugin_name = implementation[:index].strip()
-    
-    if plugin_name == 'execution':
-        plugin_specification = None
-    else:
-        plugin_specification = service_template.plugin_specifications.get(plugin_name)
-        if plugin_specification is None:
-            raise ValueError('unknown plugin: "{0}"'.format(plugin_name))
+    split = IMPLEMENTATION_PREFIX_REGEX.split(string, 2)
+    if len(split) < 2:
+        return None, string
+    return split[0].strip(), split[1].lstrip()
+
+
+def set_nested(the_dict, keys, value):
+    """
+    If the ``keys`` list has just one item, puts the value in the the dict. If there are more items,
+    puts the value in a sub-dict, creating sub-dicts as necessary for each key.
 
-    implementation = implementation[index+1:].strip()
-    return plugin_specification, implementation
+    For example, if ``the_dict`` is an empty dict, keys is ``['first', 'second', 'third']`` and
+    value is ``'value'``, then the_dict will be: ``{'first':{'second':{'third':'value'}}}``.
+
+    :param the_dict: Dict to change
+    :type the_dict: {}
+    :param keys: Keys
+    :type keys: [basestring]
+    :param value: Value
+    """
+    key = keys.pop(0)
+    if len(keys) == 0:
+        the_dict[key] = value
+    else:
+        if key not in the_dict:
+            the_dict[key] = StrictDict(key_class=basestring)
+        set_nested(the_dict[key], keys, value)
+
+
+def parse_implementation_string(context, service_template, presentation, model, implementation):
+    plugin_name, model.implementation = split_prefix(implementation)
+    if plugin_name is not None:
+        model.plugin_specification = service_template.plugin_specifications.get(plugin_name)
+        if model.plugin_specification is None:
+            context.validation.report(
+                'no policy for plugin "{0}" specified in operation implementation: {1}'
+                .format(plugin_name, implementation),
+                locator=presentation._get_child_locator('properties', 'implementation'),
+                level=Issue.BETWEEN_TYPES)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
index d9b9f6b..6df7177 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
@@ -78,6 +78,11 @@ def get_inherited_capability_definitions(context, presentation, for_presentation
                 #capability_definitions[capability_name] = capability_definition
             else:
                 capability_definition = our_capability_definition._clone(for_presentation)
+                if isinstance(capability_definition._raw, basestring):
+                    # Make sure we have a dict
+                    the_type = capability_definition._raw
+                    capability_definition._raw = OrderedDict()
+                    capability_definition._raw['type'] = the_type
                 capability_definitions[capability_name] = capability_definition
 
             merge_capability_definition_from_type(context, presentation, capability_definition)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 8cd00f8..bd4eba4 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -585,48 +585,49 @@ class TestNode(object):
                    node_template_storage.service.list()[0]
 
 
-class TestNodeIP(object):
+class TestNodeHostAddress(object):
 
-    ip = '1.1.1.1'
+    host_address = '1.1.1.1'
 
-    def test_ip_on_none_hosted_node(self, service_storage):
-        node_template = self._node_template(service_storage, ip='not considered')
+    def test_host_address_on_none_hosted_node(self, service_storage):
+        node_template = self._node_template(service_storage, host_address='not considered')
         node = self._node(service_storage,
                           node_template,
                           is_host=False,
-                          ip='not considered')
-        assert node.ip is None
+                          host_address='not considered')
+        assert node.host_address is None
 
-    def test_property_ip_on_host_node(self, service_storage):
-        node_template = self._node_template(service_storage, ip=self.ip)
-        node = self._node(service_storage, node_template, is_host=True, ip=None)
-        assert node.ip == self.ip
+    def test_property_host_address_on_host_node(self, service_storage):
+        node_template = self._node_template(service_storage, host_address=self.host_address)
+        node = self._node(service_storage, node_template, is_host=True, host_address=None)
+        assert node.host_address == self.host_address
 
-    def test_runtime_property_ip_on_host_node(self, service_storage):
-        node_template = self._node_template(service_storage, ip='not considered')
-        node = self._node(service_storage, node_template, is_host=True, ip=self.ip)
-        assert node.ip == self.ip
+    def test_runtime_property_host_address_on_host_node(self, service_storage):
+        node_template = self._node_template(service_storage, host_address='not considered')
+        node = self._node(service_storage, node_template, is_host=True,
+                          host_address=self.host_address)
+        assert node.host_address == self.host_address
 
-    def test_no_ip_configured_on_host_node(self, service_storage):
-        node_template = self._node_template(service_storage, ip=None)
-        node = self._node(service_storage, node_template, is_host=True, ip=None)
-        assert node.ip is None
+    def test_no_host_address_configured_on_host_node(self, service_storage):
+        node_template = self._node_template(service_storage, host_address=None)
+        node = self._node(service_storage, node_template, is_host=True, host_address=None)
+        assert node.host_address is None
 
     def test_runtime_property_on_hosted_node(self, service_storage):
-        host_node_template = self._node_template(service_storage, ip=None)
+        host_node_template = self._node_template(service_storage, host_address=None)
         host_node = self._node(service_storage,
                                host_node_template,
                                is_host=True,
-                               ip=self.ip)
-        node_template = self._node_template(service_storage, ip=None)
+                               host_address=self.host_address)
+        node_template = self._node_template(service_storage, host_address=None)
         node = self._node(service_storage,
                           node_template,
                           is_host=False,
-                          ip=None,
+                          host_address=None,
                           host_fk=host_node.id)
-        assert node.ip == self.ip
+        assert node.host_address == self.host_address
 
-    def _node_template(self, storage, ip):
+    def _node_template(self, storage, host_address):
         kwargs = dict(
             name='node_template',
             type=storage.type.list()[0],
@@ -635,23 +636,27 @@ class TestNodeIP(object):
             min_instances=1,
             service_template=storage.service_template.list()[0]
         )
-        if ip:
-            kwargs['properties'] = {'ip': Parameter.wrap('ip', ip)}
+        if host_address:
+            kwargs['properties'] = {'host_address': Parameter.wrap('host_address', host_address)}
         node = NodeTemplate(**kwargs)
         storage.node_template.put(node)
         return node
 
-    def _node(self, storage, node, is_host, ip, host_fk=None):
+    def _node(self, storage, node_template, is_host, host_address, host_fk=None):
         kwargs = dict(
             name='node',
-            node_template=node,
+            node_template=node_template,
             type=storage.type.list()[0],
             runtime_properties={},
             state='initial',
             service=storage.service.list()[0]
         )
-        if ip:
-            kwargs['runtime_properties']['ip'] = ip
+        if is_host and (host_address is None):
+            host_address = node_template.properties.get('host_address')
+            if host_address is not None:
+                host_address = host_address.value
+        if host_address:
+            kwargs['runtime_properties']['ip'] = host_address
         if is_host:
             kwargs['host_fk'] = 1
         elif host_fk:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index f55b83e..af8b454 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -230,7 +230,6 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
 
     plugin = mock.models.create_plugin()
     ctx.model.plugin.put(plugin)
-    plugin_specification = mock.models.create_plugin_specification()
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
@@ -238,7 +237,7 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
         operation_name,
         operation_kwargs=dict(
             implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
-            plugin_specification=plugin_specification)
+            plugin=plugin)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index db45e8e..8b809b3 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -45,13 +45,12 @@ def _mock_workflow(ctx, graph):
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     plugin = mock.models.create_plugin()
     ctx.model.plugin.put(plugin)
-    plugin_specification = mock.models.create_plugin_specification()
     interface = mock.models.create_interface(
         node.service,
         'test',
         'op',
         operation_kwargs=dict(implementation=_operation_mapping(),
-                              plugin_specification=plugin_specification)
+                              plugin=plugin)
     )
     node.interfaces[interface.name] = interface
     task = api.task.OperationTask.for_node(node=node, interface_name='test', operation_name='op')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index dd36466..d86b6d2 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -292,12 +292,13 @@ class TestFabricEnvHideGroupsAndRunCommands(object):
         assert self.mock.settings_merged['timeout'] == timeout
 
     def test_implicit_host_string(self, mocker):
-        expected_ip = '1.1.1.1'
-        mocker.patch.object(self._Ctx.task.runs_on, 'ip', expected_ip)
+        expected_host_address = '1.1.1.1'
+        mocker.patch.object(self._Ctx.task.actor, 'host')
+        mocker.patch.object(self._Ctx.task.actor.host, 'host_address', expected_host_address)
         fabric_env = self.default_fabric_env.copy()
         del fabric_env['host_string']
         self._run(fabric_env=fabric_env)
-        assert self.mock.settings_merged['host_string'] == expected_ip
+        assert self.mock.settings_merged['host_string'] == expected_host_address
 
     def test_explicit_host_string(self):
         fabric_env = self.default_fabric_env.copy()
@@ -409,13 +410,15 @@ class TestFabricEnvHideGroupsAndRunCommands(object):
             raise RuntimeError
 
     class _Ctx(object):
-        class Stub(object):
+        class Task(object):
             @staticmethod
             def abort(message=None):
                 models.Task.abort(message)
-            ip = None
-        task = Stub
-        task.runs_on = Stub
+            actor = None
+        class Actor(object):
+            host = None
+        task = Task
+        task.actor = Actor
         logger = logging.getLogger()
 
     @staticmethod

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 80d2351..a705199 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -18,7 +18,6 @@ import pytest
 
 from aria.orchestrator import context
 from aria.orchestrator.workflows import api
-from aria.modeling import models
 
 from tests import mock, storage
 
@@ -45,13 +44,11 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.node.update(plugin)
 
-        plugin_specification = mock.models.create_plugin_specification('test_plugin', '0.1')
-
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(plugin_specification=plugin_specification,
+            operation_kwargs=dict(plugin=plugin,
                                   implementation='op_path'))
 
         node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
@@ -85,7 +82,6 @@ class TestOperationTask(object):
         assert api_task.max_attempts == max_attempts
         assert api_task.ignore_failure == ignore_failure
         assert api_task.plugin.name == 'test_plugin'
-        assert api_task.runs_on == models.Task.RUNS_ON_NODE
 
     def test_source_relationship_operation_task_creation(self, ctx):
         interface_name = 'test_interface'
@@ -94,13 +90,11 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.plugin.update(plugin)
 
-        plugin_specification = mock.models.create_plugin_specification('test_plugin', '0.1')
-
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(plugin_specification=plugin_specification,
+            operation_kwargs=dict(plugin=plugin,
                                   implementation='op_path')
         )
 
@@ -131,7 +125,6 @@ class TestOperationTask(object):
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.plugin.name == 'test_plugin'
-        assert api_task.runs_on == models.Task.RUNS_ON_SOURCE
 
     def test_target_relationship_operation_task_creation(self, ctx):
         interface_name = 'test_interface'
@@ -140,13 +133,11 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.node.update(plugin)
 
-        plugin_specification = mock.models.create_plugin_specification('test_plugin', '0.1')
-
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(plugin_specification=plugin_specification,
+            operation_kwargs=dict(plugin=plugin,
                                   implementation='op_path')
         )
 
@@ -163,8 +154,7 @@ class TestOperationTask(object):
                 operation_name=operation_name,
                 inputs=inputs,
                 max_attempts=max_attempts,
-                retry_interval=retry_interval,
-                runs_on=models.Task.RUNS_ON_TARGET)
+                retry_interval=retry_interval)
 
         assert api_task.name == api.task.OperationTask.NAME_FORMAT.format(
             type='relationship',
@@ -178,7 +168,6 @@ class TestOperationTask(object):
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.plugin.name == 'test_plugin'
-        assert api_task.runs_on == models.Task.RUNS_ON_TARGET
 
     def test_operation_task_default_values(self, ctx):
         interface_name = 'test_interface'
@@ -187,15 +176,13 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('package', '0.1')
         ctx.model.node.update(plugin)
 
-        plugin_specification = mock.models.create_plugin_specification('package', '0.1')
-
         dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(plugin_specification=plugin_specification,
+            operation_kwargs=dict(plugin=plugin,
                                   implementation='op_path'))
         dependency_node.interfaces[interface_name] = interface
 
@@ -210,7 +197,6 @@ class TestOperationTask(object):
         assert task.max_attempts == ctx._task_max_attempts
         assert task.ignore_failure == ctx._task_ignore_failure
         assert task.plugin is plugin
-        assert task.runs_on == models.Task.RUNS_ON_NODE
 
 
 class TestWorkflowTask(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/__init__.py b/tests/orchestrator/workflows/builtin/__init__.py
index 8a67247..1809f82 100644
--- a/tests/orchestrator/workflows/builtin/__init__.py
+++ b/tests/orchestrator/workflows/builtin/__init__.py
@@ -31,16 +31,13 @@ def _assert_relationships(operations, expected_op_full_name, relationships=0):
         # suffix once
         operation = next(operations)
         relationship_id_1 = operation.actor.id
-        edge1 = operation.runs_on
         _assert_cfg_interface_op(operation, expected_op_name)
 
         operation = next(operations)
         relationship_id_2 = operation.actor.id
-        edge2 = operation.runs_on
         _assert_cfg_interface_op(operation, expected_op_name)
 
         assert relationship_id_1 == relationship_id_2
-        assert edge1 != edge2
 
 
 def assert_node_install_operations(operations, relationships=0):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index 360e17d..4cddbe6 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -34,7 +34,8 @@ def test_execute_operation(ctx):
     interface = mock.models.create_interface(
         ctx.service,
         interface_name,
-        operation_name
+        operation_name,
+        operation_kwargs={'implementation': 'test'}
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 18ca056..8dda209 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -19,7 +19,6 @@ from datetime import (
 
 import pytest
 
-from aria.modeling import models
 from aria.orchestrator.context import workflow as workflow_context
 from aria.orchestrator.workflows import (
     api,
@@ -43,7 +42,8 @@ def ctx(tmpdir):
     interface = mock.models.create_interface(
         relationship.source_node.service,
         RELATIONSHIP_INTERFACE_NAME,
-        RELATIONSHIP_OPERATION_NAME
+        RELATIONSHIP_OPERATION_NAME,
+        operation_kwargs={'implementation': 'test'}
     )
     relationship.interfaces[interface.name] = interface
     context.model.relationship.update(relationship)
@@ -52,7 +52,8 @@ def ctx(tmpdir):
     interface = mock.models.create_interface(
         node.service,
         NODE_INTERFACE_NAME,
-        NODE_OPERATION_NAME
+        NODE_OPERATION_NAME,
+        operation_kwargs={'implementation': 'test'}
     )
     node.interfaces[interface.name] = interface
     context.model.node.update(node)
@@ -72,13 +73,12 @@ class TestOperationTask(object):
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
-    def _create_relationship_operation_task(self, ctx, relationship, runs_on):
+    def _create_relationship_operation_task(self, ctx, relationship):
         with workflow_context.current.push(ctx):
             api_task = api.task.OperationTask.for_relationship(
                 relationship=relationship,
                 interface_name=RELATIONSHIP_INTERFACE_NAME,
-                operation_name=RELATIONSHIP_OPERATION_NAME,
-                runs_on=runs_on)
+                operation_name=RELATIONSHIP_OPERATION_NAME)
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
@@ -88,12 +88,11 @@ class TestOperationTask(object):
         ctx.model.plugin.put(storage_plugin)
         ctx.model.plugin.put(storage_plugin_other)
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-        storage_plugin_specification = mock.models.create_plugin_specification('p1', '0.1')
         interface = mock.models.create_interface(
             node.service,
             NODE_INTERFACE_NAME,
             NODE_OPERATION_NAME,
-            operation_kwargs=dict(plugin_specification=storage_plugin_specification)
+            operation_kwargs=dict(plugin=storage_plugin, implementation='test')
         )
         node.interfaces[interface.name] = interface
         ctx.model.node.update(node)
@@ -101,7 +100,7 @@ class TestOperationTask(object):
         storage_task = ctx.model.task.get_by_name(core_task.name)
         assert storage_task.plugin is storage_plugin
         assert storage_task.execution_name == ctx.execution.name
-        assert storage_task.runs_on == core_task.context.node
+        assert storage_task.actor == core_task.context.node
         assert core_task.model_task == storage_task
         assert core_task.name == api_task.name
         assert core_task.implementation == api_task.implementation
@@ -109,18 +108,12 @@ class TestOperationTask(object):
         assert core_task.inputs == api_task.inputs == storage_task.inputs
         assert core_task.plugin == storage_plugin
 
-    def test_source_relationship_operation_task_creation(self, ctx):
+    def test_relationship_operation_task_creation(self, ctx):
         relationship = ctx.model.relationship.list()[0]
         ctx.model.relationship.update(relationship)
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship, models.Task.RUNS_ON_SOURCE)
-        assert core_task.model_task.runs_on == relationship.source_node
-
-    def test_target_relationship_operation_task_creation(self, ctx):
-        relationship = ctx.model.relationship.list()[0]
-        _, core_task = self._create_relationship_operation_task(
-            ctx, relationship, models.Task.RUNS_ON_TARGET)
-        assert core_task.model_task.runs_on == relationship.target_node
+            ctx, relationship)
+        assert core_task.model_task.actor == relationship
 
     def test_operation_task_edit_locked_attribute(self, ctx):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index 0a95d43..514bce9 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -30,7 +30,8 @@ def test_task_graph_into_execution_graph(tmpdir):
     interface = mock.models.create_interface(
         node.service,
         interface_name,
-        operation_name
+        operation_name,
+        operation_kwargs={'implementation': 'test'}
     )
     node.interfaces[interface.name] = interface
     task_context.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index d84d1ec..a7619de 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -119,6 +119,7 @@ class MockTask(object):
         self.ignore_failure = False
         self.interface_name = 'interface_name'
         self.operation_name = 'operation_name'
+        self.model_task = None
 
         for state in models.Task.STATES:
             setattr(self, state.upper(), state)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 436e7b6..502c9fd 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -142,6 +142,7 @@ class MockTask(object):
         self.ignore_failure = False
         self.interface_name = 'interface_name'
         self.operation_name = 'operation_name'
+        self.model_task = None
 
         for state in aria_models.Task.STATES:
             setattr(self, state.upper(), state)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index 3b4f371..349a166 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -154,14 +154,33 @@ topology_template:
                 - scalable:
                     properties:
                       - max_instances: { greater_or_equal: 8 }
-    
+            relationship:
+              interfaces:
+                Configure:
+                  target_changed:
+                    implementation:
+                      primary: changed.sh
+                      dependencies:
+                        #- { concat: [ process.args.1 >, mongodb ] }
+                        - process.args.1 > mongodb
+                        - process.args.2 > host
+                        - ssh.user > admin
+                        - ssh.password > 1234
+                        - ssh.use_sudo > true
+
+    nginx:
+      type: nginx.Nginx
+      requirements:
+        - host: loadbalancer_host
+        - feature: loadbalancer
+
+    # Features
+
     loadbalancer:
       type: nginx.LoadBalancer
       properties:
-        algorithm: round-robin      
-      requirements:
-        - host: loadbalancer_host
-    
+        algorithm: round-robin   
+
     # Hosts
 
     loadbalancer_host:
@@ -177,7 +196,11 @@ topology_template:
         Standard:
           inputs:
             openstack_credential: { get_input: openstack_credential }
-          configure: juju > charm.loadbalancer
+          configure:
+            implementation:
+              primary: juju > run_charm
+              dependencies:
+                - charm > loadbalancer
 
     application_host:
       copy: loadbalancer_host
@@ -253,6 +276,7 @@ topology_template:
       type: aria.Plugin
       properties:
         version: 1.0
+        enabled: false
 
     maintenance_on:
       type: MaintenanceWorkflow

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a7e7826e/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nginx.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nginx.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nginx.yaml
index eab130f..3621360 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nginx.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/types/nginx.yaml
@@ -15,12 +15,15 @@
 
 node_types:
 
+  nginx.Nginx:
+    description: >-
+      Nginx instance.
+    derived_from: tosca.nodes.SoftwareComponent
+    requirements:
+      - feature:
+          capability: tosca.capabilities.Node
+
   nginx.LoadBalancer:
     description: >-
-      Nginx as a loadbalancer.
+      Nginx loadbalancer feature.
     derived_from: tosca.nodes.LoadBalancer
-    requirements:
-      - host:
-          capability: tosca.capabilities.Container
-          node: tosca.nodes.Compute
-          relationship: tosca.relationships.HostedOn


[8/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/plugins.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/plugins.py b/aria/cli/commands/plugins.py
new file mode 100644
index 0000000..9e7d449
--- /dev/null
+++ b/aria/cli/commands/plugins.py
@@ -0,0 +1,133 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import zipfile
+
+from ..table import print_data
+from ..cli import aria
+from ..exceptions import AriaCliError
+from ..utils import storage_sort_param
+
+
+PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'supported_platform',
+                  'distribution', 'distribution_release', 'uploaded_at']
+
+
+@aria.group(name='plugins')
+@aria.options.verbose()
+def plugins():
+    """Handle plugins
+    """
+    pass
+
+
+@plugins.command(name='validate',
+                 short_help='Validate a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_logger
+def validate(plugin_path, logger):
+    """Validate a plugin
+
+    This will try to validate the plugin's archive is not corrupted.
+    A valid plugin is a wagon (http://github.com/cloudify-cosomo/wagon)
+    in the zip format (suffix may also be .wgn).
+
+    `PLUGIN_PATH` is the path to wagon archive to validate.
+    """
+    logger.info('Validating plugin {0}...'.format(plugin_path))
+
+    if not zipfile.is_zipfile(plugin_path):
+        raise AriaCliError(
+            'Archive {0} is of an unsupported type. Only '
+            'zip/wgn is allowed'.format(plugin_path))
+    with zipfile.ZipFile(plugin_path, 'r') as zip_file:
+        infos = zip_file.infolist()
+        try:
+            package_name = infos[0].filename[:infos[0].filename.index('/')]
+            package_json_path = "{0}/{1}".format(package_name, 'package.json')
+            zip_file.getinfo(package_json_path)
+        except (KeyError, ValueError, IndexError):
+            raise AriaCliError(
+                'Failed to validate plugin {0} '
+                '(package.json was not found in archive)'.format(plugin_path))
+
+    logger.info('Plugin validated successfully')
+
+
+# @plugins.command(name='delete',
+#                  short_help='Delete a plugin')
+# @aria.argument('plugin-id')
+# @aria.options.verbose()
+# @aria.pass_model_storage
+# @aria.pass_logger
+# def delete(plugin_id, model_storage, logger):
+#     """Delete a plugin
+#
+#     `PLUGIN_ID` is the id of the plugin to delete.
+#     """
+#     logger.info('Deleting plugin {0}...'.format(plugin_id))
+#     model_storage.plugin.delete(plugin_id=plugin_id)
+#     logger.info('Plugin deleted')
+
+
+@plugins.command(name='install',
+                 short_help='Install a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_context
+@aria.pass_plugin_manager
+@aria.pass_logger
+def install(ctx, plugin_path, plugin_manager, logger):
+    """Install a plugin
+
+    `PLUGIN_PATH` is the path to wagon archive to install.
+    """
+    ctx.invoke(validate, plugin_path=plugin_path)
+    logger.info('Installing plugin {0}...'.format(plugin_path))
+    plugin = plugin_manager.install(plugin_path)
+    logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id))
+
+
+@plugins.command(name='show',
+                 short_help='show plugin information')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(plugin_id, model_storage, logger):
+    """Show information for a specific plugin
+
+    `PLUGIN_ID` is the id of the plugin to show information on.
+    """
+    logger.info('Showing plugin {0}...'.format(plugin_id))
+    plugin = model_storage.plugin.get(plugin_id)
+    print_data(PLUGIN_COLUMNS, plugin.to_dict(), 'Plugin:')
+
+
+@plugins.command(name='list',
+                 short_help='List plugins')
+@aria.options.sort_by('uploaded_at')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+    """List all plugins on the manager
+    """
+    logger.info('Listing all plugins...')
+    plugins_list = [p.to_dict() for p in model_storage.plugin.list(
+        sort=storage_sort_param(sort_by, descending))]
+    print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
new file mode 100644
index 0000000..8e0e91c
--- /dev/null
+++ b/aria/cli/commands/service_templates.py
@@ -0,0 +1,220 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+
+from .. import utils
+from .. import csar
+from .. import service_template_utils
+from ..cli import aria
+from ..table import print_data
+from ..exceptions import AriaCliError
+from ..utils import handle_storage_exception
+from ...core import Core
+from ...exceptions import AriaException
+from ...storage import exceptions as storage_exceptions
+
+
+DESCRIPTION_LIMIT = 20
+SERVICE_TEMPLATE_COLUMNS = \
+    ['id', 'name', 'main_file_name', 'created_at', 'updated_at']
+
+
+@aria.group(name='service-templates')
+@aria.options.verbose()
+def service_templates():
+    """Handle service templates on the manager
+    """
+    pass
+
+
+@service_templates.command(name='show',
+                           short_help='Show service template information')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(service_template_name, model_storage, logger):
+    """Show information for a specific service templates
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to show information on.
+    """
+    logger.info('Showing service template {0}...'.format(service_template_name))
+    service_template = model_storage.service_template.get_by_name(service_template_name)
+    services = [d.to_dict() for d in service_template.services]
+    service_template_dict = service_template.to_dict()
+    service_template_dict['#services'] = len(services)
+    columns = SERVICE_TEMPLATE_COLUMNS + ['#services']
+    print_data(columns, service_template_dict, 'Service-template:', max_width=50)
+
+    if service_template_dict['description'] is not None:
+        logger.info('Description:')
+        logger.info('{0}{1}'.format(service_template_dict['description'].encode('UTF-8') or '',
+                                    os.linesep))
+
+    logger.info('Existing services:')
+    logger.info('{0}{1}'.format([s['name'] for s in services],
+                                os.linesep))
+
+
+@service_templates.command(name='list',
+                           short_help='List service templates')
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+    """List all service templates
+    """
+    def trim_description(service_template):
+        if service_template['description'] is not None:
+            if len(service_template['description']) >= DESCRIPTION_LIMIT:
+                service_template['description'] = '{0}..'.format(
+                    service_template['description'][:DESCRIPTION_LIMIT - 2])
+        else:
+            service_template['description'] = ''
+        return service_template
+
+    logger.info('Listing all service templates...')
+    service_templates_list = [trim_description(b.to_dict()) for b in
+                              model_storage.service_template.list(
+                                  sort=utils.storage_sort_param(sort_by, descending))]
+    print_data(SERVICE_TEMPLATE_COLUMNS, service_templates_list, 'Service templates:')
+
+
+@service_templates.command(name='store',
+                           short_help='Store a service template')
+@aria.argument('service-template-path')
+@aria.argument('service-template-name')
+@aria.options.service_template_filename
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def store(service_template_path, service_template_name, service_template_filename,
+          model_storage, resource_storage, plugin_manager, logger):
+    """Store a service template
+
+    `SERVICE_TEMPLATE_PATH` is the path of the service template to store.
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to store.
+    """
+    logger.info('Storing service template {0}...'.format(service_template_name))
+
+    service_template_path = service_template_utils.get(service_template_path,
+                                                       service_template_filename)
+    core = Core(model_storage, resource_storage, plugin_manager)
+    try:
+        core.create_service_template(service_template_path,
+                                     os.path.dirname(service_template_path),
+                                     service_template_name)
+    except storage_exceptions.StorageError as e:
+        handle_storage_exception(e, 'service template', service_template_name)
+    logger.info('Service template {0} stored'.format(service_template_name))
+
+
+@service_templates.command(name='delete',
+                           short_help='Delete a service template')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def delete(service_template_name, model_storage, resource_storage, plugin_manager, logger):
+    """Delete a service template
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to delete.
+    """
+    logger.info('Deleting service template {0}...'.format(service_template_name))
+    service_template = model_storage.service_template.get_by_name(service_template_name)
+    core = Core(model_storage, resource_storage, plugin_manager)
+    try:
+        core.delete_service_template(service_template.id)
+    except storage_exceptions.NotFoundError:
+        raise AriaCliError()
+    logger.info('Service template {0} deleted'.format(service_template_name))
+
+
+@service_templates.command(name='inputs',
+                           short_help='Show service template inputs')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def inputs(service_template_name, model_storage, logger):
+    """Show inputs for a specific service template
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to show inputs for.
+    """
+    logger.info('Showing inputs for service template {0}...'.format(service_template_name))
+    print_service_template_inputs(model_storage, service_template_name, logger)
+
+
+@service_templates.command(name='validate',
+                           short_help='Validate a service template')
+@aria.argument('service-template')
+@aria.options.service_template_filename
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def validate(service_template, service_template_filename,
+             model_storage, resource_storage, plugin_manager, logger):
+    """Validate a service template
+
+    `SERVICE_TEMPLATE` is the path or url of the service template or archive to validate.
+    """
+    logger.info('Validating service template: {0}'.format(service_template))
+    service_template_path = service_template_utils.get(service_template, service_template_filename)
+    core = Core(model_storage, resource_storage, plugin_manager)
+
+    try:
+        core.validate_service_template(service_template_path)
+    except AriaException as e:
+        # TODO: gather errors from parser and dump them via CLI?
+        raise AriaCliError(str(e))
+
+    logger.info('Service template validated successfully')
+
+
+@service_templates.command(name='create-archive',
+                           short_help='Create a csar archive')
+@aria.argument('service-template-path')
+@aria.argument('destination')
+@aria.options.verbose()
+@aria.pass_logger
+def create_archive(service_template_path, destination, logger):
+    """Create a csar archive
+
+    `service_template_path` is the path of the service template to create the archive from
+    `destination` is the path of the output csar archive
+    """
+    logger.info('Creating a csar archive')
+    csar.write(os.path.dirname(service_template_path), service_template_path, destination, logger)
+    logger.info('Csar archive created at {0}'.format(destination))
+
+
+def print_service_template_inputs(model_storage, service_template_name, logger):
+    service_template = model_storage.service_template.get_by_name(service_template_name)
+
+    logger.info('Service template inputs:')
+    if service_template.inputs:
+        logger.info(utils.get_parameter_templates_as_string(service_template.inputs))
+    else:
+        logger.info('\tNo inputs')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
new file mode 100644
index 0000000..b785006
--- /dev/null
+++ b/aria/cli/commands/services.py
@@ -0,0 +1,180 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+from StringIO import StringIO
+
+from . import service_templates
+from ..cli import aria, helptexts
+from ..exceptions import AriaCliError
+from ..table import print_data
+from ..utils import storage_sort_param, handle_storage_exception
+from ...core import Core
+from ...exceptions import AriaException
+from ...storage import exceptions as storage_exceptions
+
+
+SERVICE_COLUMNS = ['id', 'name', 'service_template_name', 'created_at', 'updated_at']
+
+
+@aria.group(name='services')
+@aria.options.verbose()
+def services():
+    """Handle services
+    """
+    pass
+
+
+@services.command(name='list', short_help='List services')
+@aria.options.service_template_name()
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List services
+
+    If `--service-template-name` is provided, list services for that service template.
+    Otherwise, list services for all service templates.
+    """
+    if service_template_name:
+        logger.info('Listing services for service template {0}...'.format(
+            service_template_name))
+        service_template = model_storage.service_template.get_by_name(service_template_name)
+        filters = dict(service_template=service_template)
+    else:
+        logger.info('Listing all services...')
+        filters = {}
+
+    services_list = [d.to_dict() for d in model_storage.service.list(
+        sort=storage_sort_param(sort_by=sort_by, descending=descending),
+        filters=filters)]
+    print_data(SERVICE_COLUMNS, services_list, 'Services:')
+
+
+@services.command(name='create',
+                  short_help='Create a services')
+@aria.argument('service-name', required=False)
+@aria.options.service_template_name(required=True)
+@aria.options.inputs
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def create(service_template_name,
+           service_name,
+           inputs,  # pylint: disable=redefined-outer-name
+           model_storage,
+           resource_storage,
+           plugin_manager,
+           logger):
+    """Create a service
+
+    `SERVICE_NAME` is the name of the service you'd like to create.
+
+    """
+    logger.info('Creating new service from service template {0}...'.format(
+        service_template_name))
+
+    try:
+        core = Core(model_storage, resource_storage, plugin_manager)
+        service_template = model_storage.service_template.get_by_name(service_template_name)
+        service = core.create_service(service_template.id, inputs, service_name)
+    except storage_exceptions.StorageError as e:
+        handle_storage_exception(e, 'service', service_name)
+    except AriaException as e:
+        logger.info(str(e))
+        service_templates.print_service_template_inputs(model_storage, service_template_name,
+                                                        logger)
+        raise AriaCliError(str(e))
+    logger.info("Service created. The service's name is {0}".format(service.name))
+
+
+@services.command(name='delete',
+                  short_help='Delete a service')
+@aria.argument('service-name')
+@aria.options.force(help=helptexts.IGNORE_RUNNING_NODES)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def delete(service_name, force, model_storage, resource_storage, plugin_manager, logger):
+    """Delete a service
+
+    `SERVICE_NAME` is the name of the service to delete.
+    """
+    logger.info('Deleting service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.delete_service(service.id, force=force)
+    logger.info('Service {0} deleted'.format(service_name))
+
+
+@services.command(name='outputs',
+                  short_help='Show service outputs')
+@aria.argument('service-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def outputs(service_name, model_storage, logger):
+    """Show outputs for a specific service
+
+    `SERVICE_NAME` is the name of the service to print outputs for.
+    """
+    logger.info('Showing outputs for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    #TODO fix this section..
+    outputs_def = service.outputs
+    response = model_storage.service.outputs.get(service_name)
+    outputs_ = StringIO()
+    for output_name, output in response.outputs.iteritems():
+        outputs_.write(' - "{0}":{1}'.format(output_name, os.linesep))
+        description = outputs_def[output_name].get('description', '')
+        outputs_.write('     Description: {0}{1}'.format(description,
+                                                         os.linesep))
+        outputs_.write('     Value: {0}{1}'.format(output, os.linesep))
+    logger.info(outputs_.getvalue())
+
+
+@services.command(name='inputs',
+                  short_help='Show service inputs')
+@aria.argument('service-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def inputs(service_name, model_storage, logger):
+    """Show inputs for a specific service
+
+    `SERVICE_NAME` is the id of the service to print inputs for.
+    """
+    logger.info('Showing inputs for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    if service.inputs:
+        inputs_ = StringIO()
+        for input_name, input in service.inputs.iteritems():
+            inputs_.write(' - "{0}":{1}'.format(input_name, os.linesep))
+            inputs_.write('     Value: {0}{1}'.format(input.value, os.linesep))
+        logger.info(inputs_.getvalue())
+    else:
+        logger.info('\tNo inputs')
+    logger.info('')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/workflows.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/workflows.py b/aria/cli/commands/workflows.py
new file mode 100644
index 0000000..72dea5b
--- /dev/null
+++ b/aria/cli/commands/workflows.py
@@ -0,0 +1,102 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from ..cli import aria
+from ..exceptions import AriaCliError
+
+WORKFLOW_COLUMNS = ['name', 'service_template_name', 'service_name']
+
+
+@aria.group(name='workflows')
+def workflows():
+    """Handle service workflows
+    """
+    pass
+
+
+@workflows.command(name='show',
+                   short_help='Show workflow information')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(workflow_name, service_name, model_storage, logger):
+    """Show information for a specific workflow of a specific service
+
+    `WORKFLOW_NAME` is the name of the workflow to get information on.
+    """
+    logger.info('Retrieving workflow {0} for service {1}'.format(
+        workflow_name, service_name))
+    service = model_storage.service.get_by_name(service_name)
+    workflow = next((wf for wf in service.workflows.values() if
+                     wf.name == workflow_name), None)
+    if not workflow:
+        raise AriaCliError(
+            'Workflow {0} not found for service {1}'.format(workflow_name, service_name))
+
+    defaults = {
+        'service_template_name': service.service_template_name,
+        'service_name': service.name
+    }
+    print_data(WORKFLOW_COLUMNS, workflow.to_dict(), 'Workflows:', defaults=defaults)
+
+    # print workflow inputs
+    required_inputs = dict()
+    optional_inputs = dict()
+    for input_name, input in workflow.inputs.iteritems():
+        inputs_group = optional_inputs if input.value is not None else required_inputs
+        inputs_group[input_name] = input
+
+    logger.info('Workflow Inputs:')
+    logger.info('\tMandatory Inputs:')
+    for input_name, input in required_inputs.iteritems():
+        if input.description is not None:
+            logger.info('\t\t{0}\t({1})'.format(input_name,
+                                                input.description))
+        else:
+            logger.info('\t\t{0}'.format(input_name))
+
+    logger.info('\tOptional Inputs:')
+    for input_name, input in optional_inputs.iteritems():
+        if input.description is not None:
+            logger.info('\t\t{0}: \t{1}\t({2})'.format(
+                input_name, input.value, input.description))
+        else:
+            logger.info('\t\t{0}: \t{1}'.format(input_name,
+                                                input.value))
+    logger.info('')
+
+
+@workflows.command(name='list',
+                   short_help='List workflows for a service')
+@aria.options.service_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name, model_storage, logger):
+    """List all workflows of a specific service
+    """
+    logger.info('Listing workflows for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    workflows_list = [wf.to_dict() for wf in
+                      sorted(service.workflows.values(), key=lambda w: w.name)]
+
+    defaults = {
+        'service_template_name': service.service_template_name,
+        'service_name': service.name
+    }
+    print_data(WORKFLOW_COLUMNS, workflows_list, 'Workflows:', defaults=defaults)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/config.py
----------------------------------------------------------------------
diff --git a/aria/cli/config.py b/aria/cli/config.py
deleted file mode 100644
index d82886d..0000000
--- a/aria/cli/config.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI configuration
-"""
-
-import os
-import logging
-from getpass import getuser
-from tempfile import gettempdir
-
-from yaml import safe_load
-
-from .storage import config_file_path
-
-# path to a file where cli logs will be saved.
-logging_filename = os.path.join(gettempdir(), 'aria_cli_{0}.log'.format(getuser()))
-# loggers log level to show
-logger_level = logging.INFO
-# loggers log level to show
-colors = True
-
-import_resolver = None
-
-
-def load_configurations():
-    """
-    Dynamically load attributes into the config module from the ``config.yaml`` defined in the user
-    configuration directory
-    """
-    config_path = config_file_path()
-    with open(config_path) as config_file:
-        globals().update(safe_load(config_file) or {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/config/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/config/__init__.py b/aria/cli/config/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/config/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/config/config.py
----------------------------------------------------------------------
diff --git a/aria/cli/config/config.py b/aria/cli/config/config.py
new file mode 100644
index 0000000..7d76830
--- /dev/null
+++ b/aria/cli/config/config.py
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import yaml
+import pkg_resources
+
+from jinja2.environment import Template
+
+
+class CliConfig(object):
+
+    def __init__(self, config_path):
+        with open(config_path) as f:
+            self._config = yaml.safe_load(f.read())
+
+    @classmethod
+    def create_config(cls, workdir):
+        config_path = os.path.join(workdir, 'config.yaml')
+        if not os.path.isfile(config_path):
+            config_template = pkg_resources.resource_string(
+                __package__,
+                'config_template.yaml')
+
+            default_values = {
+                'log_path': os.path.join(workdir, 'cli.log'),
+                'enable_colors': True
+            }
+
+            template = Template(config_template)
+            rendered = template.render(**default_values)
+            with open(config_path, 'w') as f:
+                f.write(rendered)
+                f.write(os.linesep)
+
+        return cls(config_path)
+
+    @property
+    def colors(self):
+        return self._config.get('colors', False)
+
+    @property
+    def logging(self):
+        return self.Logging(self._config.get('logging'))
+
+    class Logging(object):
+
+        def __init__(self, logging):
+            self._logging = logging or {}
+
+        @property
+        def filename(self):
+            return self._logging.get('filename')
+
+        @property
+        def loggers(self):
+            return self._logging.get('loggers', {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/config/config_template.yaml
----------------------------------------------------------------------
diff --git a/aria/cli/config/config_template.yaml b/aria/cli/config/config_template.yaml
new file mode 100644
index 0000000..13f2cf9
--- /dev/null
+++ b/aria/cli/config/config_template.yaml
@@ -0,0 +1,12 @@
+colors: {{ enable_colors }}
+
+logging:
+
+  # path to a file where cli logs will be saved.
+  filename: {{ log_path }}
+
+  # configuring level per logger
+  loggers:
+
+    # main logger of the cli. provides basic descriptions for executed operations.
+    aria.cli.main: info

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/constants.py
----------------------------------------------------------------------
diff --git a/aria/cli/constants.py b/aria/cli/constants.py
new file mode 100644
index 0000000..c68fb5e
--- /dev/null
+++ b/aria/cli/constants.py
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+DEFAULT_SERVICE_TEMPLATE_FILENAME = 'service_template.yaml'
+HELP_TEXT_COLUMN_BUFFER = 5

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/csar.py
----------------------------------------------------------------------
diff --git a/aria/cli/csar.py b/aria/cli/csar.py
index b185f46..5bc35ac 100644
--- a/aria/cli/csar.py
+++ b/aria/cli/csar.py
@@ -14,12 +14,13 @@
 # limitations under the License.
 
 import os
+import logging
 import pprint
 import tempfile
 import zipfile
 
 import requests
-from ruamel import yaml # @UnresolvedImport
+from ruamel import yaml
 
 
 META_FILE = 'TOSCA-Metadata/TOSCA.meta'
@@ -135,7 +136,7 @@ class _CSARReader(object):
         self.logger.debug('Attempting to parse CSAR metadata YAML')
         with open(csar_metafile) as f:
             self.metadata.update(yaml.load(f))
-        self.logger.debug('CSAR metadata:\n{0}'.format(pprint.pformat(self.metadata)))
+        self.logger.debug('CSAR metadata:{0}{1}'.format(os.linesep, pprint.pformat(self.metadata)))
 
     def _validate(self):
         def validate_key(key, expected=None):
@@ -167,5 +168,11 @@ class _CSARReader(object):
                     f.write(chunk)
 
 
-def read(source, destination, logger):
+def read(source, destination=None, logger=None):
+    destination = destination or tempfile.mkdtemp()
+    logger = logger or logging.getLogger('dummy')
     return _CSARReader(source=source, destination=destination, logger=logger)
+
+
+def is_csar_archive(source):
+    return source.endswith('.csar')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/dry.py
----------------------------------------------------------------------
diff --git a/aria/cli/dry.py b/aria/cli/dry.py
deleted file mode 100644
index fc6c0c5..0000000
--- a/aria/cli/dry.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from threading import RLock
-
-from ..modeling import models
-from ..orchestrator.decorators import operation
-from ..utils.collections import OrderedDict
-from ..utils.console import puts, Colored
-from ..utils.formatting import safe_repr
-
-
-_TERMINAL_LOCK = RLock()
-
-
-def convert_to_dry(service):
-    """
-    Converts all operations on the service (on workflows, node interfaces, and relationship
-    interfaces) to run dryly.
-    """
-
-    for workflow in service.workflows.itervalues():
-        convert_operation_to_dry(workflow)
-
-    for node in service.nodes.itervalues():
-        for interface in node.interfaces.itervalues():
-            for oper in interface.operations.itervalues():
-                convert_operation_to_dry(oper)
-        for relationship in node.outbound_relationships:
-            for interface in relationship.interfaces.itervalues():
-                for oper in interface.operations.itervalues():
-                    convert_operation_to_dry(oper)
-
-    for group in service.groups.itervalues():
-        for interface in group.interfaces.itervalues():
-            for oper in interface.operations.itervalues():
-                convert_operation_to_dry(oper)
-
-
-def convert_operation_to_dry(oper):
-    """
-    Converts a single :class:`Operation` to run dryly.
-    """
-
-    plugin = oper.plugin.name \
-        if oper.plugin is not None else None
-    if oper.inputs is None:
-        oper.inputs = OrderedDict()
-    oper.inputs['_implementation'] = models.Parameter(name='_implementation',
-                                                      type_name='string',
-                                                      value=oper.implementation)
-    oper.inputs['_plugin'] = models.Parameter(name='_plugin',
-                                              type_name='string',
-                                              value=plugin)
-    oper.implementation = '{0}.{1}'.format(__name__, 'dry_operation')
-    oper.plugin_specification = None
-
-
-@operation
-def dry_operation(ctx, _plugin, _implementation, **kwargs):
-    """
-    The dry operation simply prints out information about the operation to the console.
-    """
-
-    with _TERMINAL_LOCK:
-        print ctx.name
-        if hasattr(ctx, 'relationship'):
-            puts('> Relationship: {0} -> {1}'.format(
-                Colored.red(ctx.relationship.source_node.name),
-                Colored.red(ctx.relationship.target_node.name)))
-        else:
-            puts('> Node: {0}'.format(Colored.red(ctx.node.name)))
-        puts('  Operation: {0}'.format(Colored.green(ctx.name)))
-        _dump_implementation(_plugin, _implementation)
-
-
-def _dump_implementation(plugin, implementation):
-    if plugin:
-        puts('  Plugin: {0}'.format(Colored.magenta(plugin, bold=True)))
-    if implementation:
-        puts('  Implementation: {0}'.format(Colored.magenta(safe_repr(implementation))))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/env.py
----------------------------------------------------------------------
diff --git a/aria/cli/env.py b/aria/cli/env.py
new file mode 100644
index 0000000..7fe656f
--- /dev/null
+++ b/aria/cli/env.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import json
+import pkgutil
+
+from .config import config
+from .logger import Logging
+from .. import (application_model_storage, application_resource_storage)
+from ..orchestrator.plugin import PluginManager
+from ..storage.sql_mapi import SQLAlchemyModelAPI
+from ..storage.filesystem_rapi import FileSystemResourceAPI
+
+
+ARIA_DEFAULT_WORKDIR_NAME = '.aria'
+
+
+class _Environment(object):
+
+    def __init__(self, workdir):
+
+        self._workdir = workdir
+        self._init_workdir()
+
+        self._config = config.CliConfig.create_config(workdir)
+        self._logging = Logging(self._config)
+
+        self._model_storage_dir = os.path.join(workdir, 'models')
+        self._resource_storage_dir = os.path.join(workdir, 'resources')
+        self._plugins_dir = os.path.join(workdir, 'plugins')
+
+        # initialized lazily
+        self._model_storage = None
+        self._resource_storage = None
+        self._plugin_manager = None
+
+    @property
+    def workdir(self):
+        return self._workdir
+
+    @property
+    def config(self):
+        return self._config
+
+    @property
+    def logging(self):
+        return self._logging
+
+    @property
+    def model_storage(self):
+        if not self._model_storage:
+            self._model_storage = self._init_sqlite_model_storage()
+        return self._model_storage
+
+    @property
+    def resource_storage(self):
+        if not self._resource_storage:
+            self._resource_storage = self._init_fs_resource_storage()
+        return self._resource_storage
+
+    @property
+    def plugin_manager(self):
+        if not self._plugin_manager:
+            self._plugin_manager = self._init_plugin_manager()
+        return self._plugin_manager
+
+    @staticmethod
+    def get_version_data():
+        data = pkgutil.get_data(__package__, 'VERSION')
+        return json.loads(data)
+
+    def _init_workdir(self):
+        if not os.path.exists(self._workdir):
+            os.makedirs(self._workdir)
+
+    def _init_sqlite_model_storage(self):
+        if not os.path.exists(self._model_storage_dir):
+            os.makedirs(self._model_storage_dir)
+
+        initiator_kwargs = dict(base_dir=self._model_storage_dir)
+        return application_model_storage(
+            SQLAlchemyModelAPI,
+            initiator_kwargs=initiator_kwargs)
+
+    def _init_fs_resource_storage(self):
+        if not os.path.exists(self._resource_storage_dir):
+            os.makedirs(self._resource_storage_dir)
+
+        fs_kwargs = dict(directory=self._resource_storage_dir)
+        return application_resource_storage(
+            FileSystemResourceAPI,
+            api_kwargs=fs_kwargs)
+
+    def _init_plugin_manager(self):
+        if not os.path.exists(self._plugins_dir):
+            os.makedirs(self._plugins_dir)
+
+        return PluginManager(self.model_storage, self._plugins_dir)
+
+
+env = _Environment(os.path.join(
+    os.environ.get('ARIA_WORKDIR', os.path.expanduser('~')), ARIA_DEFAULT_WORKDIR_NAME))
+
+logger = env.logging.logger

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/cli/exceptions.py b/aria/cli/exceptions.py
index 6897731..89cfacd 100644
--- a/aria/cli/exceptions.py
+++ b/aria/cli/exceptions.py
@@ -13,59 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""
-CLI various exception classes
-"""
 
+from ..exceptions import AriaError
 
-class AriaCliError(Exception):
-    """
-    General CLI Exception class
-    """
-    pass
-
-
-class AriaCliFormatInputsError(AriaCliError):
-    """
-    Raised when provided inputs are malformed.
-    """
-
-    def __init__(self, message, inputs):
-        self.inputs = inputs
-        super(AriaCliFormatInputsError, self).__init__(message)
-
-    def user_message(self):
-        """
-        Describes the format error in detail.
-        """
-        return (
-            'Invalid input format: {0}, '
-            'the expected format is: '
-            'key1=value1;key2=value2'.format(self.inputs))
 
-
-class AriaCliYAMLInputsError(AriaCliError):
-    """
-    Raised when an invalid yaml file is provided
-    """
+class AriaCliError(AriaError):
     pass
-
-
-class AriaCliInvalidInputsError(AriaCliFormatInputsError):
-    """
-    Raised when provided inputs are invalid.
-    """
-
-    def user_message(self):
-        """
-        Describes the error in detail.
-        """
-        return (
-            'Invalid input: {0}. input must represent a dictionary.\n'
-            'Valid values can be one of:\n'
-            '- a path to a YAML file\n'
-            '- a path to a directory containing YAML files\n'
-            '- a single quoted wildcard based path (e.g. "*-inputs.yaml")\n'
-            '- a string formatted as JSON\n'
-            '- a string formatted as key1=value1;key2=value2'.format(self.inputs)
-        )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/inputs.py
----------------------------------------------------------------------
diff --git a/aria/cli/inputs.py b/aria/cli/inputs.py
new file mode 100644
index 0000000..78db846
--- /dev/null
+++ b/aria/cli/inputs.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import glob
+import yaml
+
+from .env import logger
+from.exceptions import AriaCliError
+
+
+def inputs_to_dict(resources):
+    """Returns a dictionary of inputs
+
+    `resources` can be:
+    - A list of files.
+    - A single file
+    - A directory containing multiple input files
+    - A key1=value1;key2=value2 pairs string.
+    - A string formatted as JSON/YAML.
+    - Wildcard based string (e.g. *-inputs.yaml)
+    """
+    if not resources:
+        return dict()
+
+    parsed_dict = {}
+
+    for resource in resources:
+        logger.debug('Processing inputs source: {0}'.format(resource))
+        # Workflow parameters always pass an empty dictionary. We ignore it
+        if isinstance(resource, basestring):
+            try:
+                parsed_dict.update(_parse_single_input(resource))
+            except AriaCliError:
+                raise AriaCliError(
+                    "Invalid input: {0}. It must represent a dictionary. "
+                    "Valid values can be one of:{1} "
+                    "- A path to a YAML file{1} "
+                    "- A path to a directory containing YAML files{1} "
+                    "- A single quoted wildcard based path "
+                    "(e.g. '*-inputs.yaml'){1} "
+                    "- A string formatted as JSON/YAML{1} "
+                    "- A string formatted as key1=value1;key2=value2".format(
+                        resource, os.linesep))
+    return parsed_dict
+
+
+def _parse_single_input(resource):
+    try:
+        # parse resource as string representation of a dictionary
+        return _plain_string_to_dict(resource)
+    except AriaCliError:
+        input_files = glob.glob(resource)
+        parsed_dict = dict()
+        if os.path.isdir(resource):
+            for input_file in os.listdir(resource):
+                parsed_dict.update(
+                    _parse_yaml_path(os.path.join(resource, input_file)))
+        elif input_files:
+            for input_file in input_files:
+                parsed_dict.update(_parse_yaml_path(input_file))
+        else:
+            parsed_dict.update(_parse_yaml_path(resource))
+    return parsed_dict
+
+
+def _parse_yaml_path(resource):
+
+    try:
+        # if resource is a path - parse as a yaml file
+        if os.path.isfile(resource):
+            with open(resource) as f:
+                content = yaml.load(f.read())
+        else:
+            # parse resource content as yaml
+            content = yaml.load(resource)
+    except yaml.error.YAMLError as e:
+        raise AriaCliError("'{0}' is not a valid YAML. {1}".format(
+            resource, str(e)))
+
+    # Emtpy files return None
+    content = content or dict()
+    if not isinstance(content, dict):
+        raise AriaCliError()
+
+    return content
+
+
+def _plain_string_to_dict(input_string):
+    input_string = input_string.strip()
+    input_dict = {}
+    mapped_inputs = input_string.split(';')
+    for mapped_input in mapped_inputs:
+        mapped_input = mapped_input.strip()
+        if not mapped_input:
+            continue
+        split_mapping = mapped_input.split('=')
+        try:
+            key = split_mapping[0].strip()
+            value = split_mapping[1].strip()
+        except IndexError:
+            raise AriaCliError(
+                "Invalid input format: {0}, the expected format is: "
+                "key1=value1;key2=value2".format(input_string))
+        input_dict[key] = value
+    return input_dict

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/logger.py
----------------------------------------------------------------------
diff --git a/aria/cli/logger.py b/aria/cli/logger.py
new file mode 100644
index 0000000..2f012d9
--- /dev/null
+++ b/aria/cli/logger.py
@@ -0,0 +1,114 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import copy
+import logging
+from logutils import dictconfig
+
+
+HIGH_VERBOSE = 3
+MEDIUM_VERBOSE = 2
+LOW_VERBOSE = 1
+NO_VERBOSE = 0
+
+DEFAULT_LOGGER_CONFIG = {
+    "version": 1,
+    "formatters": {
+        "file": {
+            "format": "%(asctime)s [%(levelname)s] %(message)s"
+        },
+        "console": {
+            "format": "%(message)s"
+        }
+    },
+    "handlers": {
+        "file": {
+            "class": "logging.handlers.RotatingFileHandler",
+            "formatter": "file",
+            "maxBytes": "5000000",
+            "backupCount": "20"
+        },
+        "console": {
+            "class": "logging.StreamHandler",
+            "stream": "ext://sys.stdout",
+            "formatter": "console"
+        }
+    },
+    "disable_existing_loggers": False
+}
+
+
+class Logging(object):
+
+    def __init__(self, config):
+        self._log_file = None
+        self._verbosity_level = NO_VERBOSE
+        self._all_loggers = []
+        self._configure_loggers(config)
+        self._lgr = logging.getLogger('aria.cli.main')
+
+    @property
+    def logger(self):
+        return self._lgr
+
+    @property
+    def log_file(self):
+        return self._log_file
+
+    @property
+    def verbosity_level(self):
+        return self._verbosity_level
+
+    def is_high_verbose_level(self):
+        return self.verbosity_level == HIGH_VERBOSE
+
+    @verbosity_level.setter
+    def verbosity_level(self, level):
+        self._verbosity_level = level
+        if self.is_high_verbose_level():
+            for logger_name in self._all_loggers:
+                logging.getLogger(logger_name).setLevel(logging.DEBUG)
+
+    def _configure_loggers(self, config):
+        loggers_config = config.logging.loggers
+        logfile = config.logging.filename
+
+        logger_dict = copy.deepcopy(DEFAULT_LOGGER_CONFIG)
+        if logfile:
+            # set filename on file handler
+            logger_dict['handlers']['file']['filename'] = logfile
+            logfile_dir = os.path.dirname(logfile)
+            if not os.path.exists(logfile_dir):
+                os.makedirs(logfile_dir)
+            self._log_file = logfile
+        else:
+            del logger_dict['handlers']['file']
+
+        # add handlers to all loggers
+        loggers = {}
+        for logger_name in loggers_config:
+            loggers[logger_name] = dict(handlers=list(logger_dict['handlers'].keys()))
+        logger_dict['loggers'] = loggers
+
+        # set level for all loggers
+        for logger_name, logging_level in loggers_config.iteritems():
+            log = logging.getLogger(logger_name)
+            level = logging._levelNames[logging_level.upper()]
+            log.setLevel(level)
+            self._all_loggers.append(logger_name)
+
+        dictconfig.dictConfig(logger_dict)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/main.py
----------------------------------------------------------------------
diff --git a/aria/cli/main.py b/aria/cli/main.py
new file mode 100644
index 0000000..d06ad8a
--- /dev/null
+++ b/aria/cli/main.py
@@ -0,0 +1,59 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from aria import install_aria_extensions
+from aria.cli import commands
+from aria.cli.cli import aria
+
+
+@aria.group(name='aria')
+@aria.options.verbose()
+@aria.options.version
+def _aria():
+    """ARIA's Command Line Interface
+
+    To activate bash-completion. Run: `eval "$(_ARIA_COMPLETE=source aria)"`
+
+    ARIA's working directory resides by default in ~/.aria. To change it, set
+    the environment variable `ARIA_WORKDIR` to something else (e.g. /tmp/).
+    """
+    aria.set_cli_except_hook()
+
+
+def _register_commands():
+    """
+    Register the CLI's commands.
+    """
+
+    _aria.add_command(commands.service_templates.service_templates)
+    _aria.add_command(commands.node_templates.node_templates)
+    _aria.add_command(commands.services.services)
+    _aria.add_command(commands.nodes.nodes)
+    _aria.add_command(commands.workflows.workflows)
+    _aria.add_command(commands.executions.executions)
+    _aria.add_command(commands.plugins.plugins)
+    _aria.add_command(commands.logs.logs)
+
+
+_register_commands()
+
+
+def main():
+    install_aria_extensions()
+    _aria()
+
+
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/service_template_utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/service_template_utils.py b/aria/cli/service_template_utils.py
new file mode 100644
index 0000000..0300449
--- /dev/null
+++ b/aria/cli/service_template_utils.py
@@ -0,0 +1,140 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from urlparse import urlparse
+
+from . import csar
+from . import utils
+from .exceptions import AriaCliError
+from ..utils import archive as archive_utils
+
+
+def get(source, service_template_filename):
+    """Get a source and return a path to the main service template file
+
+    The behavior based on then source argument content is:
+        -
+        - local archive:
+            extract it locally and return path service template file
+        - local yaml file: return the file
+        - URL:
+            - return it (download=False)
+            - download and get service template from downloaded file (download=True)
+        - github repo:
+            - map it to a URL and return it (download=False)
+            - download and get service template from downloaded file (download=True)
+
+    Supported archive types are: csar, zip, tar, tar.gz and tar.bz2
+
+    :param source: Path/URL/github repo to archive/service-template file
+    :type source: str
+    :param service_template_filename: Path to service template (if source is an archive file)
+    :type service_template_filename: str
+    :param download: Download service template file if source is URL/github repo
+    :type download: bool
+    :return: Path to file (if archive/service-template file passed) or url
+    :rtype: str
+
+    """
+    if urlparse(source).scheme:
+        downloaded_file = utils.download_file(source)
+        return _get_service_template_file_from_archive(
+            downloaded_file, service_template_filename)
+    elif os.path.isfile(source):
+        if _is_archive(source):
+            return _get_service_template_file_from_archive(source, service_template_filename)
+        else:
+            # Maybe check if yaml.
+            return source
+    elif len(source.split('/')) == 2:
+        url = _map_to_github_url(source)
+        downloaded_file = utils.download_file(url)
+        return _get_service_template_file_from_archive(
+            downloaded_file, service_template_filename)
+    else:
+        raise AriaCliError(
+            'You must provide either a path to a local file, a remote URL '
+            'or a GitHub `organization/repository[:tag/branch]`')
+
+
+def _get_service_template_file_from_archive(archive, service_template_filename):
+    """Extract archive to temporary location and get path to service template file.
+
+    :param archive: Path to archive file
+    :type archive: str
+    :param service_template_filename: Path to service template file relative to archive
+    :type service_template_filename: str
+    :return: Absolute path to service template file
+    :rtype: str
+
+    """
+    if csar.is_csar_archive(archive):
+        service_template_file = _extract_csar_archive(archive)
+    else:
+        extract_directory = archive_utils.extract_archive(archive)
+        service_template_dir = os.path.join(
+            extract_directory,
+            os.listdir(extract_directory)[0],
+        )
+        service_template_file = os.path.join(service_template_dir, service_template_filename)
+
+    if not os.path.isfile(service_template_file):
+        raise AriaCliError(
+            'Could not find `{0}`. Please provide the name of the main '
+            'service template file by using the `-n/--service-template-filename` flag'
+            .format(service_template_filename))
+    return service_template_file
+
+
+def _map_to_github_url(source):
+    """Returns a path to a downloaded github archive.
+
+    :param source: github repo in the format of `org/repo[:tag/branch]`.
+    :type source: str
+    :return: URL to the archive file for the given repo in github
+    :rtype: str
+
+    """
+    source_parts = source.split(':', 1)
+    repo = source_parts[0]
+    tag = source_parts[1] if len(source_parts) == 2 else 'master'
+    url = 'https://github.com/{0}/archive/{1}.tar.gz'.format(repo, tag)
+    return url
+
+
+# def generate_id(service_template_path,
+#                 service_template_filename=DEFAULT_SERVICE_TEMPLATE_FILENAME):
+#     """The name of the service template will be the name of the folder.
+#     If service_template_filename is provided, it will be appended to the folder.
+#     """
+#     service_template_id = os.path.split(os.path.dirname(os.path.abspath(
+#         service_template_path)))[-1]
+#     if service_template_filename != DEFAULT_SERVICE_TEMPLATE_FILENAME:
+#         filename, _ = os.path.splitext(os.path.basename(service_template_filename))
+#         service_template_id = (service_template_id + '.' + filename)
+#     return service_template_id.replace('_', '-')
+
+
+def _is_archive(source):
+    return archive_utils.is_archive(source) or csar.is_csar_archive(source)
+
+
+def _extract_csar_archive(archive):
+    if csar.is_csar_archive(archive):
+        reader = csar.read(source=archive)
+        main_service_template_file_name = os.path.basename(reader.entry_definitions)
+        return os.path.join(reader.destination,
+                            main_service_template_file_name)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/storage.py
----------------------------------------------------------------------
diff --git a/aria/cli/storage.py b/aria/cli/storage.py
deleted file mode 100644
index fa1518b..0000000
--- a/aria/cli/storage.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Filesystem related CLI storage location and configuration
-"""
-
-import os
-import getpass
-from shutil import rmtree
-
-work_space_directory = '.aria'
-storage_directory_name = 'local-storage'
-
-
-def user_space(user_name=getpass.getuser()):
-    """
-    Base work directory
-    """
-    user_path = '~{0}'.format(user_name)
-    real_path = os.path.expanduser(user_path)
-    if os.path.exists(real_path):
-        return os.path.join(real_path, work_space_directory)
-    return os.path.join(os.getcwd(), work_space_directory)
-
-
-def local_storage(user_name=getpass.getuser()):
-    """
-    Base storage directory
-    """
-    return os.path.join(user_space(user_name), storage_directory_name)
-
-
-def local_model_storage():
-    """
-    Model storage directory
-    """
-    return os.path.join(local_storage(), 'models')
-
-
-def local_resource_storage():
-    """
-    Resource storage directory
-    """
-    return os.path.join(local_storage(), 'resources')
-
-
-def config_file_path():
-    """
-    Configuration file path
-    """
-    path = os.path.join(user_space(), 'config.yaml')
-    if not os.path.exists(path):
-        open(path, 'w').close()
-    return path
-
-
-def create_user_space(user_name=getpass.getuser(), override=False):
-    """
-    Creates the base work directory
-    """
-    path = user_space(user_name)
-    if os.path.exists(path):
-        if override:
-            rmtree(path, ignore_errors=True)
-        else:
-            raise IOError('user space {0} already exists'.format(path))
-    os.mkdir(path)
-    return path
-
-
-def create_local_storage(user_name=getpass.getuser(), override=False):
-    """
-    Creates the base storage directory
-    """
-    path = local_storage(user_name)
-    if os.path.exists(path):
-        if override:
-            rmtree(path, ignore_errors=True)
-        else:
-            raise IOError('local storage {0} already exists'.format(path))
-    os.mkdir(path)
-    return path

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/table.py
----------------------------------------------------------------------
diff --git a/aria/cli/table.py b/aria/cli/table.py
new file mode 100644
index 0000000..36dcbea
--- /dev/null
+++ b/aria/cli/table.py
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from datetime import datetime
+
+from prettytable import PrettyTable
+
+from .env import logger
+
+
+def generate(cols, data, defaults=None):
+    """
+    Return a new PrettyTable instance representing the list.
+
+    Arguments:
+
+        cols - An iterable of strings that specify what
+               are the columns of the table.
+
+               for example: ['id','name']
+
+        data - An iterable of dictionaries, each dictionary must
+               have key's corresponding to the cols items.
+
+               for example: [{'id':'123', 'name':'Pete']
+
+        defaults - A dictionary specifying default values for
+                   key's that don't exist in the data itself.
+
+                   for example: {'serviceId':'123'} will set the
+                   serviceId value for all rows to '123'.
+
+    """
+    def get_values_per_column(column, row_data):
+        if column in row_data:
+            if row_data[column] and isinstance(row_data[column], basestring):
+                try:
+                    datetime.strptime(row_data[column][:10], '%Y-%m-%d')
+                    row_data[column] = \
+                        row_data[column].replace('T', ' ').replace('Z', ' ')
+                except ValueError:
+                    # not a timestamp
+                    pass
+            elif row_data[column] and isinstance(row_data[column], list):
+                row_data[column] = ','.join(row_data[column])
+            elif not row_data[column]:
+                # if it's empty list, don't print []
+                row_data[column] = ''
+            return row_data[column]
+        else:
+            return defaults[column]
+
+    pretty_table = PrettyTable([col for col in cols])
+
+    for datum in data:
+        values_row = []
+        for col in cols:
+            values_row.append(get_values_per_column(col, datum))
+        pretty_table.add_row(values_row)
+
+    return pretty_table
+
+
+def log(title, table):
+    logger.info('{0}{1}{0}{2}{0}'.format(os.linesep, title, table))
+
+
+def print_data(columns, items, header_text, max_width=None, defaults=None):
+    if items is None:
+        items = []
+    elif not isinstance(items, list):
+        items = [items]
+
+    pretty_table = generate(columns, data=items, defaults=defaults)
+    if max_width:
+        pretty_table.max_width = max_width
+    log(header_text, pretty_table)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/utils.py b/aria/cli/utils.py
new file mode 100644
index 0000000..fad1b07
--- /dev/null
+++ b/aria/cli/utils.py
@@ -0,0 +1,161 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import string
+import random
+import tempfile
+from StringIO import StringIO
+
+from backports.shutil_get_terminal_size import get_terminal_size
+import requests
+
+from .env import logger
+from .exceptions import AriaCliError
+
+
+def dump_to_file(collection, file_path):
+    with open(file_path, 'a') as f:
+        f.write(os.linesep.join(collection))
+        f.write(os.linesep)
+
+
+def is_virtual_env():
+    return hasattr(sys, 'real_prefix')
+
+
+def storage_sort_param(sort_by, descending):
+    return {sort_by: 'desc' if descending else 'asc'}
+
+
+def generate_random_string(size=6,
+                           chars=string.ascii_uppercase + string.digits):
+    return ''.join(random.choice(chars) for _ in range(size))
+
+
+def generate_suffixed_id(id):
+    return '{0}_{1}'.format(id, generate_random_string())
+
+
+def get_parameter_templates_as_string(parameter_templates):
+    params_string = StringIO()
+
+    for param_name, param_template in parameter_templates.iteritems():
+        params_string.write('\t{0}:{1}'.format(param_name, os.linesep))
+        param_dict = param_template.to_dict()
+        del param_dict['id']  # not interested in printing the id
+        for k, v in param_dict.iteritems():
+            params_string.write('\t\t{0}: {1}{2}'.format(k, v, os.linesep))
+
+    params_string.write(os.linesep)
+    return params_string.getvalue()
+
+
+def download_file(url, destination=None):
+    """Download file.
+
+    :param url: Location of the file to download
+    :type url: str
+    :param destination:
+        Location where the file should be saved (autogenerated by default)
+    :type destination: str | None
+    :returns: Location where the file was saved
+    :rtype: str
+
+    """
+    chunk_size = 1024
+
+    if not destination:
+        file_descriptor, destination = tempfile.mkstemp()
+        os.close(file_descriptor)
+    logger.info('Downloading {0} to {1}...'.format(url, destination))
+
+    try:
+        response = requests.get(url, stream=True)
+    except requests.exceptions.RequestException as ex:
+        raise AriaCliError(
+            'Failed to download {0}. ({1})'.format(url, str(ex)))
+
+    final_url = response.url
+    if final_url != url:
+        logger.debug('Redirected to {0}'.format(final_url))
+
+    try:
+        with open(destination, 'wb') as destination_file:
+            for chunk in response.iter_content(chunk_size):
+                destination_file.write(chunk)
+    except IOError as ex:
+        raise AriaCliError(
+            'Failed to download {0}. ({1})'.format(url, str(ex)))
+
+    return destination
+
+
+def generate_progress_handler(file_path, action='', max_bar_length=80):
+    """Returns a function that prints a progress bar in the terminal
+
+    :param file_path: The name of the file being transferred
+    :param action: Uploading/Downloading
+    :param max_bar_length: Maximum allowed length of the bar. Default: 80
+    :return: The configured print_progress function
+    """
+    # We want to limit the maximum line length to 80, but allow for a smaller
+    # terminal size. We also include the action string, and some extra chars
+    terminal_width = get_terminal_size().columns
+
+    # This takes care of the case where there is no terminal (e.g. unittest)
+    terminal_width = terminal_width or max_bar_length
+    bar_length = min(max_bar_length, terminal_width) - len(action) - 12
+
+    # Shorten the file name if it's too long
+    file_name = os.path.basename(file_path)
+    if len(file_name) > (bar_length / 4) + 3:
+        file_name = file_name[:bar_length / 4] + '...'
+
+    bar_length -= len(file_name)
+
+    def print_progress(read_bytes, total_bytes):
+        """Print upload/download progress on a single line
+
+        Call this function in a loop to create a progress bar in the terminal
+
+        :param read_bytes: Number of bytes already processed
+        :param total_bytes: Total number of bytes in the file
+        """
+
+        filled_length = min(bar_length, int(round(bar_length * read_bytes /
+                                                  float(total_bytes))))
+        percents = min(100.00, round(
+            100.00 * (read_bytes / float(total_bytes)), 2))
+        bar = '#' * filled_length + '-' * (bar_length - filled_length)  # pylint: disable=blacklisted-name
+
+        # The \r caret makes sure the cursor moves back to the beginning of
+        # the line
+        sys.stdout.write('\r{0} {1} |{2}| {3}%'.format(
+            action, file_name, bar, percents))
+        if read_bytes >= total_bytes:
+            sys.stdout.write(os.linesep)
+
+    return print_progress
+
+
+def handle_storage_exception(e, model_class, name):
+    if 'UNIQUE constraint failed' in e.message:
+        msg = 'Could not store {model_class} `{name}`{linesep}' \
+              'There already a exists a {model_class} with the same name' \
+              .format(model_class=model_class, name=name, linesep=os.linesep)
+        raise AriaCliError(msg)
+    raise AriaCliError()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
new file mode 100644
index 0000000..0be53c6
--- /dev/null
+++ b/aria/core.py
@@ -0,0 +1,120 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import exceptions
+from .parser import consumption
+from .parser.loading.location import UriLocation
+from .storage import exceptions as storage_exceptions
+
+
+class Core(object):
+
+    def __init__(self,
+                 model_storage,
+                 resource_storage,
+                 plugin_manager):
+        self._model_storage = model_storage
+        self._resource_storage = resource_storage
+        self._plugin_manager = plugin_manager
+
+    @property
+    def model_storage(self):
+        return self._model_storage
+
+    @property
+    def resource_storage(self):
+        return self._resource_storage
+
+    @property
+    def plugin_manager(self):
+        return self._plugin_manager
+
+    def validate_service_template(self, service_template_path):
+        self._parse_service_template(service_template_path)
+
+    def create_service_template(self, service_template_path, service_template_dir,
+                                service_template_name):
+        context = self._parse_service_template(service_template_path)
+        service_template = context.modeling.template
+        service_template.name = service_template_name
+        self.model_storage.service_template.put(service_template)
+        self.resource_storage.service_template.upload(
+            entry_id=str(service_template.id), source=service_template_dir)
+
+    def delete_service_template(self, service_template_id):
+        service_template = self.model_storage.service_template.get(service_template_id)
+        if service_template.services:
+            raise exceptions.DependentServicesError(
+                "Can't delete service template {0} - Service template has existing services")
+
+        self.model_storage.service_template.delete(service_template)
+        self.resource_storage.service_template.delete(entry_id=str(service_template.id))
+
+    def create_service(self, service_template_id, inputs, service_name=None):
+
+        service_template = self.model_storage.service_template.get(service_template_id)
+
+        # creating an empty ConsumptionContext, initiating a threadlocal context
+        consumption.ConsumptionContext()
+        # setting no autoflush for the duration of instantiation - this helps avoid dependency
+        # constraints as they're being set up
+        with self.model_storage._all_api_kwargs['session'].no_autoflush:
+            service = service_template.instantiate(None, inputs)
+
+        # If the user didn't enter a name for this service, we'll want to auto generate it.
+        # But how will we ensure a unique but simple name? We'll append the services' unique id
+        # to the service_templates name. Since this service is not in the storage yet, we'll put it
+        # there, and pull out its id.
+        self.model_storage.service.put(service)
+        service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
+        try:
+            self.model_storage.service.update(service)
+        except storage_exceptions.StorageError:
+            self.model_storage.service.delete(service)
+            raise
+        return service
+
+    def delete_service(self, service_id, force=False):
+        service = self.model_storage.service.get(service_id)
+
+        active_executions = [e for e in service.executions if e.is_active()]
+        if active_executions:
+            raise exceptions.DependentActiveExecutionsError(
+                "Can't delete service {0} - there is an active execution for this service. "
+                "Active execution id: {1}".format(service.name, active_executions[0].id))
+
+        if not force:
+            available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()]
+            if available_nodes:
+                raise exceptions.DependentAvailableNodesError(
+                    "Can't delete service {0} - there are available nodes for this service. "
+                    "Available node ids: {1}".format(service.name, ', '.join(available_nodes)))
+
+        self.model_storage.service.delete(service)
+
+    @staticmethod
+    def _parse_service_template(service_template_path):
+        context = consumption.ConsumptionContext()
+        context.presentation.location = UriLocation(service_template_path)
+        consumption.ConsumerChain(
+            context,
+            (
+                consumption.Read,
+                consumption.Validate,
+                consumption.ServiceTemplate
+            )).consume()
+        if context.validation.dump_issues():
+            raise exceptions.ParsingError('Failed to parse service template')
+        return context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/exceptions.py b/aria/exceptions.py
index a180ce1..bdf9f78 100644
--- a/aria/exceptions.py
+++ b/aria/exceptions.py
@@ -44,3 +44,28 @@ class AriaException(Exception):
                 # Make sure it's our traceback
                 cause_traceback = traceback
         self.cause_traceback = cause_traceback
+
+
+class DependentServicesError(AriaError):
+    """
+    Raised when attempting to delete a service template which has existing services
+    """
+    pass
+
+
+class DependentActiveExecutionsError(AriaError):
+    """
+    Raised when attempting to delete a service which has active executions
+    """
+    pass
+
+
+class DependentAvailableNodesError(AriaError):
+    """
+    Raised when attempting to delete a service which has available nodes
+    """
+    pass
+
+
+class ParsingError(AriaError):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/logger.py
----------------------------------------------------------------------
diff --git a/aria/logger.py b/aria/logger.py
index e3039f5..dd54264 100644
--- a/aria/logger.py
+++ b/aria/logger.py
@@ -19,8 +19,20 @@ Logging related mixins and functions
 
 import logging
 from logging import handlers as logging_handlers
+# NullHandler doesn't exist in < 27. this workaround is from
+# http://docs.python.org/release/2.6/library/logging.html#configuring-logging-for-a-library
+try:
+    from logging import NullHandler                                                                 # pylint: disable=unused-import
+except ImportError:
+    class NullHandler(logging.Handler):
+        def emit(self, record):
+            pass
 from datetime import datetime
 
+
+TASK_LOGGER_NAME = 'aria.executions.task'
+
+
 _base_logger = logging.getLogger('aria')
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/aria/modeling/__init__.py b/aria/modeling/__init__.py
index 4dfc39d..4ac79e7 100644
--- a/aria/modeling/__init__.py
+++ b/aria/modeling/__init__.py
@@ -19,6 +19,7 @@ from . import (
     mixins,
     types,
     models,
+    utils,
     service_template as _service_template_bases,
     service_instance as _service_instance_bases,
     service_changes as _service_changes_bases,
@@ -45,4 +46,5 @@ __all__ = (
     'types',
     'models',
     'model_bases',
+    'utils'
 )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index 6931c78..8225f37 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -32,3 +32,21 @@ class CannotEvaluateFunctionException(ModelingException):
     """
     ARIA modeling exception: cannot evaluate the function at this time.
     """
+
+
+class MissingRequiredInputsException(ModelingException):
+    """
+    ARIA modeling exception: Required inputs have been omitted
+    """
+
+
+class InputsOfWrongTypeException(ModelingException):
+    """
+    ARIA modeling exception: Inputs of the wrong types have been provided
+    """
+
+
+class UndeclaredInputsException(ModelingException):
+    """
+    ARIA modeling exception: Undeclared inputs have been provided
+    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/models.py
----------------------------------------------------------------------
diff --git a/aria/modeling/models.py b/aria/modeling/models.py
index 170efb2..584b877 100644
--- a/aria/modeling/models.py
+++ b/aria/modeling/models.py
@@ -16,6 +16,10 @@
 # pylint: disable=abstract-method
 
 from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy import (
+    Column,
+    Text
+)
 
 from . import (
     service_template,
@@ -26,7 +30,6 @@ from . import (
     mixins,
 )
 
-
 aria_declarative_base = declarative_base(cls=mixins.ModelIDMixin)
 
 
@@ -84,7 +87,7 @@ __all__ = (
 # region service template models
 
 class ServiceTemplate(aria_declarative_base, service_template.ServiceTemplateBase):
-    pass
+    name = Column(Text, index=True, unique=True)
 
 
 class NodeTemplate(aria_declarative_base, service_template.NodeTemplateBase):
@@ -140,7 +143,7 @@ class PluginSpecification(aria_declarative_base, service_template.PluginSpecific
 # region service instance models
 
 class Service(aria_declarative_base, service_instance.ServiceBase):
-    pass
+    name = Column(Text, index=True, unique=True)
 
 
 class Node(aria_declarative_base, service_instance.NodeBase):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index b32a8a1..a2f041b 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -39,7 +39,6 @@ from sqlalchemy.ext.associationproxy import association_proxy
 from sqlalchemy.ext.declarative import declared_attr
 
 from ..orchestrator.exceptions import (TaskAbortException, TaskRetryException)
-from .types import Dict
 from .mixins import ModelMixin
 from . import (
     relationship,
@@ -55,9 +54,7 @@ class ExecutionBase(ModelMixin):
     __tablename__ = 'execution'
 
     __private_fields__ = ['service_fk',
-                          'service_name',
-                          'service_template',
-                          'service_template_name']
+                          'service_template']
 
     TERMINATED = 'terminated'
     FAILED = 'failed'
@@ -97,17 +94,14 @@ class ExecutionBase(ModelMixin):
     ended_at = Column(DateTime, nullable=True, index=True)
     error = Column(Text, nullable=True)
     is_system_workflow = Column(Boolean, nullable=False, default=False)
-    parameters = Column(Dict)
     status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
     workflow_name = Column(Text)
 
-    @property
     def has_ended(self):
         return self.status in self.END_STATES
 
-    @property
     def is_active(self):
-        return not self.has_ended
+        return not self.has_ended()
 
     @declared_attr
     def logs(cls):
@@ -121,6 +115,10 @@ class ExecutionBase(ModelMixin):
     def tasks(cls):
         return relationship.one_to_many(cls, 'task')
 
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
     # region foreign keys
 
     @declared_attr
@@ -264,10 +262,7 @@ class TaskBase(ModelMixin):
     __private_fields__ = ['node_fk',
                           'relationship_fk',
                           'plugin_fk',
-                          'execution_fk'
-                          'node_name',
-                          'relationship_name',
-                          'execution_name']
+                          'execution_fk']
 
     PENDING = 'pending'
     RETRYING = 'retrying'
@@ -322,11 +317,9 @@ class TaskBase(ModelMixin):
     ended_at = Column(DateTime, default=None)
     retry_count = Column(Integer, default=0)
 
-    @property
     def has_ended(self):
         return self.status in (self.SUCCESS, self.FAILED)
 
-    @property
     def is_waiting(self):
         return self.status in (self.PENDING, self.RETRYING)
 



[7/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/service_changes.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_changes.py b/aria/modeling/service_changes.py
index b1a75a2..1974424 100644
--- a/aria/modeling/service_changes.py
+++ b/aria/modeling/service_changes.py
@@ -45,9 +45,7 @@ class ServiceUpdateBase(ModelMixin):
     __tablename__ = 'service_update'
 
     __private_fields__ = ['service_fk',
-                          'execution_fk',
-                          'execution_name',
-                          'service_name']
+                          'execution_fk']
 
     created_at = Column(DateTime, nullable=False, index=True)
     service_plan = Column(Dict, nullable=False)
@@ -125,8 +123,7 @@ class ServiceUpdateStepBase(ModelMixin):
 
     __tablename__ = 'service_update_step'
 
-    __private_fields__ = ['service_update_fk',
-                          'service_update_name']
+    __private_fields__ = ['service_update_fk']
 
     _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
     ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
@@ -222,8 +219,7 @@ class ServiceModificationBase(ModelMixin):
 
     __tablename__ = 'service_modification'
 
-    __private_fields__ = ['service_fk',
-                          'service_name']
+    __private_fields__ = ['service_fk']
 
     STARTED = 'started'
     FINISHED = 'finished'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index 1fcbc5f..e7fda29 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -87,6 +87,15 @@ class ParameterBase(TemplateModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
 
+    @staticmethod
+    def unwrap_dict(parameters_dict):
+        """
+        Takes a parameters dict and simplifies it into key-value dict
+        :param parameters_dict: a parameter-name to parameter dict
+        :return: a parameter-name to parameter value dict
+        """
+        return dict((k, v.value) for k, v in parameters_dict.iteritems())
+
     @classmethod
     def wrap(cls, name, value, description=None):
         """
@@ -98,13 +107,11 @@ class ParameterBase(TemplateModelMixin):
         :param description: Description (optional)
         :type description: basestring
         """
-
-        from . import models
-        return models.Parameter(name=name,
-                                type_name=formatting.full_type_name(value)
-                                if value is not None else None,
-                                value=value,
-                                description=description)
+        return cls(name=name,
+                   type_name=formatting.full_type_name(value)
+                   if value is not None else None,
+                   value=value,
+                   description=description)
 
 
 class TypeBase(InstanceModelMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 40d43fa..6d8f3fe 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -88,8 +88,7 @@ class ServiceBase(InstanceModelMixin):
     __tablename__ = 'service'
 
     __private_fields__ = ['substitution_fk',
-                          'service_template_fk',
-                          'service_template_name']
+                          'service_template_fk']
 
     # region foreign keys
 
@@ -371,8 +370,7 @@ class NodeBase(InstanceModelMixin):
     __private_fields__ = ['type_fk',
                           'host_fk',
                           'service_fk',
-                          'node_template_fk',
-                          'service_name']
+                          'node_template_fk']
 
     INITIAL = 'initial'
     CREATING = 'creating'
@@ -417,7 +415,6 @@ class NodeBase(InstanceModelMixin):
         except KeyError:
             return None
 
-    @property
     def is_available(self):
         return self.state not in (self.INITIAL, self.DELETED, self.ERROR)
 
@@ -452,6 +449,11 @@ class NodeBase(InstanceModelMixin):
         """Required for use by SQLAlchemy queries"""
         return association_proxy('service', 'name')
 
+    @declared_attr
+    def node_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('node_template', 'name')
+
     # endregion
 
     # region one_to_one relationships
@@ -1183,9 +1185,7 @@ class RelationshipBase(InstanceModelMixin):
                           'target_node_fk',
                           'target_capability_fk',
                           'requirement_template_fk',
-                          'relationship_template_fk',
-                          'source_node_name',
-                          'target_node_name']
+                          'relationship_template_fk']
 
     # region foreign keys
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 51fea2f..c2da22d 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -280,7 +280,7 @@ class ServiceTemplateBase(TemplateModelMixin):
             ('interface_types', formatting.as_raw(self.interface_types)),
             ('artifact_types', formatting.as_raw(self.artifact_types))))
 
-    def instantiate(self, container):
+    def instantiate(self, container, inputs=None):  # pylint: disable=arguments-differ
         from . import models
         context = ConsumptionContext.get_thread_local()
         now = datetime.now()
@@ -288,10 +288,11 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  updated_at=now,
                                  description=deepcopy_with_locators(self.description),
                                  service_template=self)
-        #service.name = '{0}_{1}'.format(self.name, service.id)
-
         context.modeling.instance = service
 
+        service.inputs = utils.create_inputs(inputs or {}, self.inputs)
+        # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
+
         for plugin_specification in self.plugin_specifications.itervalues():
             if plugin_specification.enabled:
                 if plugin_specification.resolve():
@@ -316,15 +317,8 @@ class ServiceTemplateBase(TemplateModelMixin):
         if self.substitution_template is not None:
             service.substitution = self.substitution_template.instantiate(container)
 
-        utils.instantiate_dict(self, service.inputs, self.inputs)
         utils.instantiate_dict(self, service.outputs, self.outputs)
 
-        for name, the_input in context.modeling.inputs.iteritems():
-            if name not in service.inputs:
-                context.validation.report('input "{0}" is not supported'.format(name))
-            else:
-                service.inputs[name].value = the_input
-
         return service
 
     def validate(self):
@@ -448,8 +442,7 @@ class NodeTemplateBase(TemplateModelMixin):
     __tablename__ = 'node_template'
 
     __private_fields__ = ['type_fk',
-                          'service_template_fk',
-                          'service_template_name']
+                          'service_template_fk']
 
     # region foreign_keys
 
@@ -472,6 +465,11 @@ class NodeTemplateBase(TemplateModelMixin):
         """Required for use by SQLAlchemy queries"""
         return association_proxy('service_template', 'name')
 
+    @declared_attr
+    def type_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('type', 'name')
+
     # endregion
 
     # region one_to_one relationships
@@ -558,6 +556,7 @@ class NodeTemplateBase(TemplateModelMixin):
                            type=self.type,
                            description=deepcopy_with_locators(self.description),
                            state=models.Node.INITIAL,
+                           runtime_properties={},
                            node_template=self)
         utils.instantiate_dict(node, node.properties, self.properties)
         utils.instantiate_dict(node, node.interfaces, self.interface_templates)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 0b4015c..91d7b9c 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -13,12 +13,100 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
+from json import JSONEncoder
+from StringIO import StringIO
+
+from . import exceptions
 from ..parser.consumption import ConsumptionContext
 from ..parser.exceptions import InvalidValueError
 from ..parser.presentation import Value
 from ..utils.collections import OrderedDict
 from ..utils.console import puts
-from .exceptions import CannotEvaluateFunctionException
+from ..utils.type import validate_value_type
+
+
+class ModelJSONEncoder(JSONEncoder):
+    def default(self, o):  # pylint: disable=method-hidden
+        from .mixins import ModelMixin
+        if isinstance(o, ModelMixin):
+            if hasattr(o, 'value'):
+                dict_to_return = o.to_dict(fields=('value',))
+                return dict_to_return['value']
+            else:
+                return o.to_dict()
+        else:
+            return JSONEncoder.default(self, o)
+
+
+def create_inputs(inputs, template_inputs):
+    """
+    :param inputs: key-value dict
+    :param template_inputs: parameter name to parameter object dict
+    :return: dict of parameter name to Parameter models
+    """
+    merged_inputs = _merge_and_validate_inputs(inputs, template_inputs)
+
+    from . import models
+    input_models = []
+    for input_name, input_val in merged_inputs.iteritems():
+        parameter = models.Parameter(
+            name=input_name,
+            type_name=template_inputs[input_name].type_name,
+            description=template_inputs[input_name].description,
+            value=input_val)
+        input_models.append(parameter)
+
+    return dict((inp.name, inp) for inp in input_models)
+
+
+def _merge_and_validate_inputs(inputs, template_inputs):
+    """
+    :param inputs: key-value dict
+    :param template_inputs: parameter name to parameter object dict
+    :return:
+    """
+    merged_inputs = inputs.copy()
+
+    missing_inputs = []
+    wrong_type_inputs = {}
+    for input_name, input_template in template_inputs.iteritems():
+        if input_name not in inputs:
+            if input_template.value is not None:
+                merged_inputs[input_name] = input_template.value  # apply default value
+            else:
+                missing_inputs.append(input_name)
+        else:
+            # Validate input type
+            try:
+                validate_value_type(inputs[input_name], input_template.type_name)
+            except ValueError:
+                wrong_type_inputs[input_name] = input_template.type_name
+            except RuntimeError:
+                # TODO: This error shouldn't be raised (or caught), but right now we lack support
+                # for custom data_types, which will raise this error. Skipping their validation.
+                pass
+
+    if missing_inputs:
+        raise exceptions.MissingRequiredInputsException(
+            'Required inputs {0} have not been specified - expected inputs: {1}'
+            .format(missing_inputs, template_inputs.keys()))
+
+    if wrong_type_inputs:
+        error_message = StringIO()
+        for param_name, param_type in wrong_type_inputs.iteritems():
+            error_message.write('Input "{0}" must be of type {1}{2}'
+                                .format(param_name, param_type, os.linesep))
+        raise exceptions.InputsOfWrongTypeException(error_message.getvalue())
+
+    undeclared_inputs = [input_name for input_name in inputs.keys()
+                         if input_name not in template_inputs]
+    if undeclared_inputs:
+        raise exceptions.UndeclaredInputsException(
+            'Undeclared inputs have been specified: {0}; Expected inputs: {1}'
+            .format(undeclared_inputs, template_inputs.keys()))
+
+    return merged_inputs
 
 
 def coerce_value(container, value, report_issues=False):
@@ -35,7 +123,7 @@ def coerce_value(container, value, report_issues=False):
         try:
             value = value._evaluate(context, container)
             value = coerce_value(container, value, report_issues)
-        except CannotEvaluateFunctionException:
+        except exceptions.CannotEvaluateFunctionException:
             pass
         except InvalidValueError as e:
             if report_issues:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index 127641f..15843db 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -19,7 +19,6 @@ A common context for both workflow and operation
 
 import logging
 from contextlib import contextmanager
-from datetime import datetime
 from functools import partial
 
 import jinja2
@@ -55,6 +54,7 @@ class BaseContext(object):
             self,
             name,
             service_id,
+            execution_id,
             model_storage,
             resource_storage,
             workdir=None,
@@ -65,27 +65,17 @@ class BaseContext(object):
         self._model = model_storage
         self._resource = resource_storage
         self._service_id = service_id
+        self._execution_id = execution_id
         self._workdir = workdir
         self.logger = None
 
-    def _create_execution(self):
-        now = datetime.utcnow()
-        execution = self.model.execution.model_cls(
-            service_instance=self.service_instance,
-            workflow_name=self._workflow_name,
-            created_at=now,
-            parameters=self.parameters,
-        )
-        self.model.execution.put(execution)
-        return execution.id
-
-    def _register_logger(self, logger_name=None, level=None, task_id=None):
-        self.logger = self.PrefixedLogger(logging.getLogger(logger_name or self.__class__.__name__),
-                                          self.logging_id,
-                                          task_id=task_id)
-        self.logger.addHandler(aria_logger.create_console_log_handler())
-        self.logger.addHandler(self._get_sqla_handler())
+    def _register_logger(self, level=None, task_id=None):
+        self.logger = self.PrefixedLogger(
+            logging.getLogger(aria_logger.TASK_LOGGER_NAME), self.logging_id, task_id=task_id)
         self.logger.setLevel(level or logging.DEBUG)
+        if not self.logger.handlers:
+            self.logger.addHandler(aria_logger.create_console_log_handler())
+            self.logger.addHandler(self._get_sqla_handler())
 
     def _get_sqla_handler(self):
         api_kwargs = {}
@@ -168,13 +158,13 @@ class BaseContext(object):
         Download a blueprint resource from the resource storage
         """
         try:
-            self.resource.deployment.download(entry_id=str(self.service.id),
-                                              destination=destination,
-                                              path=path)
+            self.resource.service.download(entry_id=str(self.service.id),
+                                           destination=destination,
+                                           path=path)
         except exceptions.StorageError:
-            self.resource.blueprint.download(entry_id=str(self.service_template.id),
-                                             destination=destination,
-                                             path=path)
+            self.resource.service_template.download(entry_id=str(self.service_template.id),
+                                                    destination=destination,
+                                                    path=path)
 
     def download_resource_and_render(self, destination, path=None, variables=None):
         """
@@ -193,9 +183,10 @@ class BaseContext(object):
         Read a deployment resource as string from the resource storage
         """
         try:
-            return self.resource.deployment.read(entry_id=str(self.service.id), path=path)
+            return self.resource.service.read(entry_id=str(self.service.id), path=path)
         except exceptions.StorageError:
-            return self.resource.deployment.read(entry_id=str(self.service_template.id), path=path)
+            return self.resource.service_template.read(entry_id=str(self.service_template.id),
+                                                       path=path)
 
     def get_resource_and_render(self, path=None, variables=None):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index cbd186c..c7d8246 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -36,7 +36,6 @@ class BaseOperationContext(BaseContext):
                  service_id,
                  task_id,
                  actor_id,
-                 execution_id,
                  **kwargs):
         super(BaseOperationContext, self).__init__(
             name=name,
@@ -47,7 +46,6 @@ class BaseOperationContext(BaseContext):
         self._task_id = task_id
         self._actor_id = actor_id
         self._thread_local = threading.local()
-        self._execution_id = execution_id
         self._register_logger(task_id=self.task.id)
 
     def __repr__(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 5f86d9d..667d22f 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -19,7 +19,6 @@ Workflow and operation contexts
 
 import threading
 from contextlib import contextmanager
-from datetime import datetime
 
 from .exceptions import ContextException
 from .common import BaseContext
@@ -35,36 +34,21 @@ class WorkflowContext(BaseContext):
                  task_max_attempts=1,
                  task_retry_interval=0,
                  task_ignore_failure=False,
-                 execution_id=None,
                  *args, **kwargs):
         super(WorkflowContext, self).__init__(*args, **kwargs)
         self._workflow_name = workflow_name
-        self.parameters = parameters or {}
+        self._parameters = parameters or {}
         self._task_max_attempts = task_max_attempts
         self._task_retry_interval = task_retry_interval
         self._task_ignore_failure = task_ignore_failure
-        # TODO: execution creation should happen somewhere else
-        # should be moved there, when such logical place exists
-        self._execution_id = execution_id or self._create_execution()
         self._register_logger()
 
     def __repr__(self):
         return (
             '{name}(deployment_id={self._service_id}, '
-            'workflow_name={self._workflow_name}'.format(
+            'workflow_name={self._workflow_name}, execution_id={self._execution_id})'.format(
                 name=self.__class__.__name__, self=self))
 
-    def _create_execution(self):
-        now = datetime.utcnow()
-        execution = self.model.execution.model_cls(
-            service=self.service,
-            workflow_name=self._workflow_name,
-            created_at=now,
-            parameters=self.parameters,
-        )
-        self.model.execution.put(execution)
-        return execution.id
-
     @property
     def logging_id(self):
         return '{0}[{1}]'.format(self._workflow_name, self._execution_id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/exceptions.py b/aria/orchestrator/exceptions.py
index c00b66b..fd3b66d 100644
--- a/aria/orchestrator/exceptions.py
+++ b/aria/orchestrator/exceptions.py
@@ -46,3 +46,24 @@ class TaskAbortException(RuntimeError):
     Used internally when ctx.task.abort is called
     """
     pass
+
+
+class UndeclaredWorkflowError(AriaError):
+    """
+    Raised when attempting to execute an undeclared workflow
+    """
+    pass
+
+
+class ActiveExecutionsError(AriaError):
+    """
+    Raised when attempting to execute a workflow on a service which already has an active execution
+    """
+    pass
+
+
+class WorkflowImplementationNotFoundError(AriaError):
+    """
+    Raised when attempting to import a workflow's code but the implementation is not found
+    """
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
index 817d064..52a5312 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
@@ -24,6 +24,7 @@ import StringIO
 import wsgiref.simple_server
 
 import bottle
+from aria import modeling
 
 from .. import exceptions
 
@@ -111,7 +112,7 @@ class CtxProxy(object):
             result = json.dumps({
                 'type': result_type,
                 'payload': payload
-            })
+            }, cls=modeling.utils.ModelJSONEncoder)
         except Exception as e:
             traceback_out = StringIO.StringIO()
             traceback.print_exc(file=traceback_out)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/plugin.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/plugin.py b/aria/orchestrator/plugin.py
index d815754..d526e9c 100644
--- a/aria/orchestrator/plugin.py
+++ b/aria/orchestrator/plugin.py
@@ -46,8 +46,7 @@ class PluginManager(object):
             archive_name=metadata['archive_name'],
             supported_platform=metadata['supported_platform'],
             supported_py_versions=metadata['supported_python_versions'],
-            # Remove suffix colon after upgrading wagon to > 0.5.0
-            distribution=os_props.get('distribution:') or os_props.get('distribution'),
+            distribution=os_props.get('distribution'),
             distribution_release=os_props['distribution_version'],
             distribution_version=os_props['distribution_release'],
             package_name=metadata['package_name'],

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/runner.py b/aria/orchestrator/runner.py
deleted file mode 100644
index f1633fa..0000000
--- a/aria/orchestrator/runner.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow runner
-"""
-
-import tempfile
-import os
-
-from .context.workflow import WorkflowContext
-from .workflows.core.engine import Engine
-from .workflows.executor.thread import ThreadExecutor
-from ..storage import (
-    sql_mapi,
-    filesystem_rapi,
-)
-from .. import (
-    application_model_storage,
-    application_resource_storage
-)
-
-
-class Runner(object):
-    """
-    Runs workflows on a deployment. By default uses temporary storage (either on disk or in memory)
-    but can also be used with existing storage.
-
-    Handles the initialization of the storage engine and provides convenience methods for
-    sub-classes to create tasks.
-
-    :param path: path to Sqlite database file; use '' (the default) to use a temporary file,
-                 and None to use an in-memory database
-    :type path: string
-    """
-
-    def __init__(self, workflow_name, workflow_fn, inputs, initialize_model_storage_fn,
-                 service_id_fn, storage_path='', is_storage_temporary=True):
-        if storage_path == '':
-            # Temporary file storage
-            the_file, storage_path = tempfile.mkstemp(suffix='.db', prefix='aria-')
-            os.close(the_file)
-
-        self._storage_path = storage_path
-        self._storage_dir = os.path.dirname(storage_path)
-        self._storage_name = os.path.basename(storage_path)
-        self._is_storage_temporary = is_storage_temporary
-
-        workflow_context = self.create_workflow_context(workflow_name, initialize_model_storage_fn,
-                                                        service_id_fn)
-
-        tasks_graph = workflow_fn(ctx=workflow_context, **inputs)
-
-        self._engine = Engine(
-            executor=ThreadExecutor(),
-            workflow_context=workflow_context,
-            tasks_graph=tasks_graph)
-
-    def run(self):
-        try:
-            self._engine.execute()
-        finally:
-            self.cleanup()
-
-    def create_workflow_context(self,
-                                workflow_name,
-                                initialize_model_storage_fn,
-                                service_id_fn):
-        self.cleanup()
-        model_storage = application_model_storage(
-            sql_mapi.SQLAlchemyModelAPI,
-            initiator_kwargs=dict(base_dir=self._storage_dir, filename=self._storage_name))
-        if initialize_model_storage_fn:
-            initialize_model_storage_fn(model_storage)
-        resource_storage = application_resource_storage(
-            filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory='.'))
-        return WorkflowContext(
-            name=workflow_name,
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            service_id=service_id_fn(),
-            workflow_name=self.__class__.__name__,
-            task_max_attempts=1,
-            task_retry_interval=1)
-
-    def cleanup(self):
-        if (self._is_storage_temporary and (self._storage_path is not None) and
-                os.path.isfile(self._storage_path)):
-            os.remove(self._storage_path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
new file mode 100644
index 0000000..1cdf1de
--- /dev/null
+++ b/aria/orchestrator/workflow_runner.py
@@ -0,0 +1,166 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Workflow runner
+"""
+
+import os
+import sys
+from datetime import datetime
+
+from . import exceptions
+from .context.workflow import WorkflowContext
+from .workflows.builtin import BUILTIN_WORKFLOWS, BUILTIN_WORKFLOWS_PATH_PREFIX
+from .workflows.core.engine import Engine
+from .workflows.executor.process import ProcessExecutor
+from ..modeling import models
+from ..modeling import utils as modeling_utils
+from ..utils.imports import import_fullname
+
+
+DEFAULT_TASK_MAX_ATTEMPTS = 1
+DEFAULT_TASK_RETRY_INTERVAL = 1
+# TODO move this constant somewhere in the DSL parser?
+WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
+
+
+class WorkflowRunner(object):
+
+    def __init__(self, workflow_name, service_id, inputs,
+                 model_storage, resource_storage, plugin_manager,
+                 executor=None, task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
+                 task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
+        """
+        Manages a single workflow execution on a given service
+        :param workflow_name: Workflow name
+        :param service_id: Service id
+        :param inputs: A key-value dict of inputs for the execution
+        :param model_storage: Model storage
+        :param resource_storage: Resource storage
+        :param plugin_manager: Plugin manager
+        :param executor: Executor for tasks. Defaults to a ProcessExecutor instance.
+        :param task_max_attempts: Maximum attempts of repeating each failing task
+        :param task_retry_interval: Retry interval in between retry attempts of a failing task
+        """
+
+        self._model_storage = model_storage
+        self._resource_storage = resource_storage
+        self._workflow_name = workflow_name
+
+        # the IDs are stored rather than the models themselves, so this module could be used
+        # by several threads without raising errors on model objects shared between threads
+        self._service_id = service_id
+
+        self._validate_workflow_exists_for_service()
+
+        workflow_fn = self._get_workflow_fn()
+
+        execution = self._create_execution_model(inputs)
+        self._execution_id = execution.id
+
+        workflow_context = WorkflowContext(
+            name=self.__class__.__name__,
+            model_storage=self._model_storage,
+            resource_storage=resource_storage,
+            service_id=service_id,
+            execution_id=execution.id,
+            workflow_name=workflow_name,
+            task_max_attempts=task_max_attempts,
+            task_retry_interval=task_retry_interval)
+
+        # transforming the execution inputs to dict, to pass them to the workflow function
+        execution_inputs_dict = models.Parameter.unwrap_dict(self.execution.inputs)
+        self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
+
+        executor = executor or ProcessExecutor(plugin_manager=plugin_manager)
+        self._engine = Engine(
+            executor=executor,
+            workflow_context=workflow_context,
+            tasks_graph=self._tasks_graph)
+
+    @property
+    def execution(self):
+        return self._model_storage.execution.get(self._execution_id)
+
+    @property
+    def service(self):
+        return self._model_storage.service.get(self._service_id)
+
+    def execute(self):
+        self._engine.execute()
+
+    def cancel(self):
+        self._engine.cancel_execution()
+
+    def _create_execution_model(self, inputs):
+        execution = models.Execution(
+            created_at=datetime.utcnow(),
+            service=self.service,
+            workflow_name=self._workflow_name,
+            inputs={})
+
+        if self._workflow_name in BUILTIN_WORKFLOWS:
+            workflow_inputs = dict()  # built-in workflows don't have any inputs
+        else:
+            workflow_inputs = dict((k, v) for k, v in
+                                   self.service.workflows[self._workflow_name].inputs.iteritems()
+                                   if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES)
+
+        execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs)
+        # TODO: these two following calls should execute atomically
+        self._validate_no_active_executions(execution)
+        self._model_storage.execution.put(execution)
+        return execution
+
+    def _validate_workflow_exists_for_service(self):
+        if self._workflow_name not in self.service.workflows and \
+                        self._workflow_name not in BUILTIN_WORKFLOWS:
+            raise exceptions.UndeclaredWorkflowError(
+                'No workflow policy {0} declared in service {1}'
+                .format(self._workflow_name, self.service.name))
+
+    def _validate_no_active_executions(self, execution):
+        active_executions = [e for e in self.service.executions
+                             if e.id != execution.id and e.is_active()]
+        if active_executions:
+            raise exceptions.ActiveExecutionsError(
+                "Can't start execution; Service {0} has an active execution with id {1}"
+                .format(self.service.name, active_executions[0].id))
+
+    def _get_workflow_fn(self):
+        if self._workflow_name in BUILTIN_WORKFLOWS:
+            return import_fullname('{0}.{1}'.format(BUILTIN_WORKFLOWS_PATH_PREFIX,
+                                                    self._workflow_name))
+
+        workflow = self.service.workflows[self._workflow_name]
+
+        # TODO: Custom workflow support needs improvement, currently this code uses internal
+        # knowledge of the resource storage; Instead, workflows should probably be loaded
+        # in a similar manner to operation plugins. Also consider passing to import_fullname
+        # as paths instead of appending to sys path.
+        service_template_resources_path = os.path.join(
+            self._resource_storage.service_template.base_path,
+            str(self.service.service_template.id))
+        sys.path.append(service_template_resources_path)
+
+        try:
+            workflow_fn = import_fullname(workflow.implementation)
+        except ImportError:
+            raise exceptions.WorkflowImplementationNotFoundError(
+                'Could not find workflow {0} implementation at {1}'.format(
+                    self._workflow_name, workflow.implementation))
+
+        return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index 49c584c..82c40c3 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -16,18 +16,16 @@
 """
 Provides the tasks to be entered into the task graph
 """
-import copy
 
+from ... import context
 from ....modeling import models
-from ....utils.collections import (OrderedDict, FrozenDict)
+from ....modeling import utils as modeling_utils
 from ....utils.uuid import generate_uuid
-from ... import context
-from .. import exceptions
 
 
 class BaseTask(object):
     """
-    Abstract task_graph task
+    Abstract task graph task
     """
 
     def __init__(self, ctx=None, **kwargs):
@@ -56,14 +54,13 @@ class BaseTask(object):
 
 class OperationTask(BaseTask):
     """
-    Represents an operation task in the task graph.
+    Represents an operation task in the task graph
     """
 
     NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
 
     def __init__(self,
                  actor,
-                 actor_type,
                  interface_name,
                  operation_name,
                  inputs=None,
@@ -75,122 +72,101 @@ class OperationTask(BaseTask):
         :meth:`for_relationship`.
         """
 
+        actor_type = type(actor).__name__.lower()
+        assert isinstance(actor, (models.Node, models.Relationship))
+        assert actor_type in ('node', 'relationship')
         assert interface_name and operation_name
         super(OperationTask, self).__init__()
 
-        operation = None
-        interface = actor.interfaces.get(interface_name)
-        if interface is not None:
-            operation = interface.operations.get(operation_name)
-
-        if operation is None:
-            raise exceptions.OperationNotFoundException(
-                'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
-                .format(operation_name, interface_name, actor_type, actor.name))
-
-        if operation.implementation is None:
-            raise exceptions.OperationNotFoundException(
-                'Empty operation "{0}" on interface "{1}" for {2} "{3}"'
-                .format(operation_name, interface_name, actor_type, actor.name))
-
         self.actor = actor
-        self.actor_type = actor_type
-        self.interface_name = interface_name
-        self.operation_name = operation_name
-
-        self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
-                                                     name=actor.name,
-                                                     interface=interface_name,
-                                                     operation=operation_name)
         self.max_attempts = (self.workflow_context._task_max_attempts
                              if max_attempts is None else max_attempts)
         self.retry_interval = (self.workflow_context._task_retry_interval
                                if retry_interval is None else retry_interval)
         self.ignore_failure = (self.workflow_context._task_ignore_failure
                                if ignore_failure is None else ignore_failure)
-        self.implementation = operation.implementation
-        self.plugin = operation.plugin
+        self.interface_name = interface_name
+        self.operation_name = operation_name
 
-        # Wrap inputs
-        inputs = copy.deepcopy(inputs) if inputs else {}
-        for k, v in inputs.iteritems():
-            if not isinstance(v, models.Parameter):
-                inputs[k] = models.Parameter.wrap(k, v)
+        operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
+        self.plugin = operation.plugin
+        self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs)
+        self.implementation = operation.implementation
+        self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
+                                                     name=actor.name,
+                                                     interface=self.interface_name,
+                                                     operation=self.operation_name)
 
-        self.inputs = OrderedDict(operation.inputs)
-        if inputs:
-            self.inputs.update(inputs)
-        self.inputs = FrozenDict(self.inputs)
+    def __repr__(self):
+        return self.name
 
     @classmethod
     def for_node(cls,
                  node,
                  interface_name,
                  operation_name,
-                 inputs=None,
                  max_attempts=None,
                  retry_interval=None,
-                 ignore_failure=None):
+                 ignore_failure=None,
+                 inputs=None):
         """
         Creates an operation on a node.
 
         :param node: The node on which to run the operation
         :param interface_name: The interface name
         :param operation_name: The operation name within the interface
-        :param inputs: Override the operation's inputs
         :param max_attempts: The maximum number of attempts in case the operation fails
-                             (if not specified the defaults is taken from the workflow context)
+                             (if not specified the defaults it taken from the workflow context)
         :param retry_interval: The interval in seconds between attempts when the operation fails
-                               (if not specified the defaults is taken from the workflow context)
+                               (if not specified the defaults it taken from the workflow context)
         :param ignore_failure: Whether to ignore failures
-                               (if not specified the defaults is taken from the workflow context)
+                               (if not specified the defaults it taken from the workflow context)
+        :param inputs: Additional operation inputs
         """
 
         assert isinstance(node, models.Node)
         return cls(
             actor=node,
-            actor_type='node',
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=inputs,
             max_attempts=max_attempts,
             retry_interval=retry_interval,
-            ignore_failure=ignore_failure)
+            ignore_failure=ignore_failure,
+            inputs=inputs)
 
     @classmethod
     def for_relationship(cls,
                          relationship,
                          interface_name,
                          operation_name,
-                         inputs=None,
                          max_attempts=None,
                          retry_interval=None,
-                         ignore_failure=None):
+                         ignore_failure=None,
+                         inputs=None):
         """
-        Creates an operation on a relationship.
+        Creates an operation on a relationship edge.
 
         :param relationship: The relationship on which to run the operation
         :param interface_name: The interface name
         :param operation_name: The operation name within the interface
-        :param inputs: Override the operation's inputs
         :param max_attempts: The maximum number of attempts in case the operation fails
-                             (if not specified the defaults is taken from the workflow context)
+                             (if not specified the defaults it taken from the workflow context)
         :param retry_interval: The interval in seconds between attempts when the operation fails
-                               (if not specified the defaults is taken from the workflow context)
+                               (if not specified the defaults it taken from the workflow context)
         :param ignore_failure: Whether to ignore failures
-                               (if not specified the defaults is taken from the workflow context)
+                               (if not specified the defaults it taken from the workflow context)
+        :param inputs: Additional operation inputs
         """
 
         assert isinstance(relationship, models.Relationship)
         return cls(
             actor=relationship,
-            actor_type='relationship',
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=inputs,
             max_attempts=max_attempts,
             retry_interval=retry_interval,
-            ignore_failure=ignore_failure)
+            ignore_failure=ignore_failure,
+            inputs=inputs)
 
 
 class WorkflowTask(BaseTask):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/__init__.py b/aria/orchestrator/workflows/builtin/__init__.py
index d43a962..8b13c62 100644
--- a/aria/orchestrator/workflows/builtin/__init__.py
+++ b/aria/orchestrator/workflows/builtin/__init__.py
@@ -24,6 +24,7 @@ from .stop import stop
 
 
 BUILTIN_WORKFLOWS = ('install', 'uninstall', 'start', 'stop')
+BUILTIN_WORKFLOWS_PATH_PREFIX = 'aria.orchestrator.workflows.builtin'
 
 
 __all__ = [

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 348f47a..16504ec 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -17,7 +17,7 @@
 Builtin execute_operation workflow
 """
 
-from ..api.task import OperationTask
+from . import utils
 from ... import workflow
 
 
@@ -28,7 +28,6 @@ def execute_operation(
         interface_name,
         operation_name,
         operation_kwargs,
-        allow_kwargs_override,
         run_by_dependency_order,
         type_names,
         node_template_ids,
@@ -41,7 +40,6 @@ def execute_operation(
     :param TaskGraph graph: the graph which will describe the workflow.
     :param basestring operation: the operation name to execute
     :param dict operation_kwargs:
-    :param bool allow_kwargs_override:
     :param bool run_by_dependency_order:
     :param type_names:
     :param node_template_ids:
@@ -71,8 +69,7 @@ def execute_operation(
                 node=node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                operation_kwargs=operation_kwargs,
-                allow_kwargs_override=allow_kwargs_override
+                operation_kwargs=operation_kwargs
             )
         )
 
@@ -108,21 +105,16 @@ def _create_node_task(
         node,
         interface_name,
         operation_name,
-        operation_kwargs,
-        allow_kwargs_override):
+        operation_kwargs):
     """
     A workflow which executes a single operation
     :param node: the node instance to install
     :param basestring operation: the operation name
     :param dict operation_kwargs:
-    :param bool allow_kwargs_override:
     :return:
     """
 
-    if allow_kwargs_override is not None:
-        operation_kwargs['allow_kwargs_override'] = allow_kwargs_override
-
-    return OperationTask.for_node(
+    return utils.create_node_task(
         node=node,
         interface_name=interface_name,
         operation_name=operation_name,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/builtin/utils.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/utils.py b/aria/orchestrator/workflows/builtin/utils.py
index 752fe35..722c618 100644
--- a/aria/orchestrator/workflows/builtin/utils.py
+++ b/aria/orchestrator/workflows/builtin/utils.py
@@ -12,26 +12,31 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from ..api.task import OperationTask
+
+from ..api.task import OperationTask, StubTask
 from .. import exceptions
 
 
-def create_node_task(node, interface_name, operation_name):
+def create_node_task(node, interface_name, operation_name, **kwargs):
     """
     Returns a new operation task if the operation exists in the node, otherwise returns None.
     """
 
     try:
+        if _is_empty_task(node, interface_name, operation_name):
+            return StubTask()
+
         return OperationTask.for_node(node=node,
                                       interface_name=interface_name,
-                                      operation_name=operation_name)
+                                      operation_name=operation_name,
+                                      **kwargs)
     except exceptions.OperationNotFoundException:
         # We will skip nodes which do not have the operation
         return None
 
 
 def create_relationships_tasks(
-        node, interface_name, source_operation_name=None, target_operation_name=None):
+        node, interface_name, source_operation_name=None, target_operation_name=None, **kwargs):
     """
     Creates a relationship task (source and target) for all of a node_instance relationships.
     :param basestring source_operation_name: the relationship operation name.
@@ -43,21 +48,18 @@ def create_relationships_tasks(
     """
     sub_tasks = []
     for relationship in node.outbound_relationships:
-        try:
-            relationship_operations = relationship_tasks(
-                relationship,
-                interface_name,
-                source_operation_name=source_operation_name,
-                target_operation_name=target_operation_name)
-            sub_tasks.append(relationship_operations)
-        except exceptions.OperationNotFoundException:
-            # We will skip relationships which do not have the operation
-            pass
+        relationship_operations = relationship_tasks(
+            relationship,
+            interface_name,
+            source_operation_name=source_operation_name,
+            target_operation_name=target_operation_name,
+            **kwargs)
+        sub_tasks.append(relationship_operations)
     return sub_tasks
 
 
-def relationship_tasks(
-        relationship, interface_name, source_operation_name=None, target_operation_name=None):
+def relationship_tasks(relationship, interface_name, source_operation_name=None,
+                       target_operation_name=None, **kwargs):
     """
     Creates a relationship task source and target.
     :param Relationship relationship: the relationship instance itself
@@ -68,17 +70,33 @@ def relationship_tasks(
     """
     operations = []
     if source_operation_name:
-        operations.append(
-            OperationTask.for_relationship(relationship=relationship,
-                                           interface_name=interface_name,
-                                           operation_name=source_operation_name)
-        )
+        try:
+            if _is_empty_task(relationship, interface_name, source_operation_name):
+                operations.append(StubTask())
+
+            operations.append(
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=source_operation_name,
+                                               **kwargs)
+            )
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
     if target_operation_name:
-        operations.append(
-            OperationTask.for_relationship(relationship=relationship,
-                                           interface_name=interface_name,
-                                           operation_name=target_operation_name)
-        )
+        try:
+            if _is_empty_task(relationship, interface_name, target_operation_name):
+                operations.append(StubTask())
+
+            operations.append(
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=target_operation_name,
+                                               **kwargs)
+            )
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
 
     return operations
 
@@ -106,3 +124,15 @@ def create_node_task_dependencies(graph, tasks_and_nodes, reverse=False):
                     graph.add_dependency(dependency, task)
             else:
                 graph.add_dependency(task, dependencies)
+
+
+def _is_empty_task(actor, interface_name, operation_name):
+    interface = actor.interfaces.get(interface_name)
+    if interface:
+        operation = interface.operations.get(operation_name)
+        if operation:
+            return operation.implementation is None
+
+    raise exceptions.OperationNotFoundException(
+        'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
+        .format(operation_name, interface_name, type(actor).__name__.lower(), actor.name))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index f73cade..155d0ee 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -88,12 +88,12 @@ class Engine(logger.LoggerMixin):
     def _executable_tasks(self):
         now = datetime.utcnow()
         return (task for task in self._tasks_iter()
-                if task.is_waiting and
+                if task.is_waiting() and
                 task.due_at <= now and
                 not self._task_has_dependencies(task))
 
     def _ended_tasks(self):
-        return (task for task in self._tasks_iter() if task.has_ended)
+        return (task for task in self._tasks_iter() if task.has_ended())
 
     def _task_has_dependencies(self, task):
         return len(self._execution_graph.pred.get(task.id, {})) > 0
@@ -105,7 +105,7 @@ class Engine(logger.LoggerMixin):
         for _, data in self._execution_graph.nodes_iter(data=True):
             task = data['task']
             if isinstance(task, engine_task.OperationTask):
-                if not task.model_task.has_ended:
+                if not task.model_task.has_ended():
                     self._workflow_context.model.task.refresh(task.model_task)
             yield task
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index ba93e21..2b26152 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -69,11 +69,9 @@ class StubTask(BaseTask):
         self.status = models.Task.PENDING
         self.due_at = datetime.utcnow()
 
-    @property
     def has_ended(self):
         return self.status in (models.Task.SUCCESS, models.Task.FAILED)
 
-    @property
     def is_waiting(self):
         return self.status in (models.Task.PENDING, models.Task.RETRYING)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/exceptions.py b/aria/orchestrator/workflows/exceptions.py
index 0ca263f..b5ae496 100644
--- a/aria/orchestrator/workflows/exceptions.py
+++ b/aria/orchestrator/workflows/exceptions.py
@@ -16,6 +16,8 @@
 """
 Workflow related Exception classes
 """
+import os
+
 from .. import exceptions
 
 
@@ -52,10 +54,10 @@ class ProcessException(ExecutorException):
         Describes the error in detail
         """
         return (
-            'Command "{error.command}" executed with an error.\n'
-            'code: {error.return_code}\n'
-            'error: {error.stderr}\n'
-            'output: {error.stdout}'.format(error=self))
+            'Command "{error.command}" executed with an error.{0}'
+            'code: {error.return_code}{0}'
+            'error: {error.stderr}{0}'
+            'output: {error.stdout}'.format(os.linesep, error=self))
 
 
 class AriaEngineError(exceptions.AriaError):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index baa0375..3c98197 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -22,6 +22,8 @@ import Queue
 
 from aria.orchestrator.workflows.executor import BaseExecutor
 
+from ....modeling.models import Parameter
+
 
 class CeleryExecutor(BaseExecutor):
     """
@@ -44,7 +46,7 @@ class CeleryExecutor(BaseExecutor):
 
     def execute(self, task):
         self._tasks[task.id] = task
-        inputs = dict((k, v.value) for k, v in task.inputs.iteritems())
+        inputs = Parameter.unwrap_dict(task.inputs.iteritems())
         inputs['ctx'] = task.context
         self._results[task.id] = self._app.send_task(
             task.operation_mapping,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
new file mode 100644
index 0000000..b14f5d7
--- /dev/null
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Dry executor
+"""
+
+from datetime import datetime
+
+from .base import BaseExecutor
+from ....modeling.models import Parameter
+
+
+class DryExecutor(BaseExecutor):
+    """
+    Executor which dry runs tasks - prints task information without causing any side effects
+    """
+
+    def execute(self, task):
+        # updating the task manually instead of calling self._task_started(task),
+        # to avoid any side effects raising that event might cause
+        with task._update():
+            task.started_at = datetime.utcnow()
+            task.status = task.STARTED
+
+        actor_type = type(task.actor).__name__.lower()
+        implementation = '{0} > '.format(task.plugin) if task.plugin else ''
+        implementation += task.implementation
+        inputs = Parameter.unwrap_dict(task.inputs)
+
+        task.context.logger.info(
+            'Executing {actor_type} {task.actor.name} operation {task.interface_name} '
+            '{task.operation_name}: {implementation} (Inputs: {inputs})'
+            .format(actor_type=actor_type, task=task, implementation=implementation, inputs=inputs))
+
+        # updating the task manually instead of calling self._task_succeeded(task),
+        # to avoid any side effects raising that event might cause
+        with task._update():
+            task.ended_at = datetime.utcnow()
+            task.status = task.SUCCESS

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index f814c4d..3c2b5fe 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -48,6 +48,7 @@ from aria.utils import exceptions
 from aria.orchestrator.workflows.executor import base
 from aria.storage import instrumentation
 from aria.modeling import types as modeling_types
+from aria.modeling.models import Parameter
 
 _IS_WIN = os.name == 'nt'
 
@@ -148,7 +149,7 @@ class ProcessExecutor(base.BaseExecutor):
         return {
             'task_id': task.id,
             'implementation': task.implementation,
-            'operation_inputs': dict((k, v.value) for k, v in task.inputs.iteritems()),
+            'operation_inputs': Parameter.unwrap_dict(task.inputs),
             'port': self._server_port,
             'context': task.context.serialization_dict,
         }

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 1a49af5..8b443cc 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -21,7 +21,9 @@ import Queue
 import threading
 
 from aria.utils import imports
+
 from .base import BaseExecutor
+from ....modeling.models import Parameter
 
 
 class ThreadExecutor(BaseExecutor):
@@ -58,7 +60,7 @@ class ThreadExecutor(BaseExecutor):
                 self._task_started(task)
                 try:
                     task_func = imports.load_attribute(task.implementation)
-                    inputs = dict((k, v.value) for k, v in task.inputs.iteritems())
+                    inputs = Parameter.unwrap_dict(task.inputs)
                     task_func(ctx=task.context, **inputs)
                     self._task_succeeded(task)
                 except BaseException as e:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/parser/consumption/modeling.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/modeling.py b/aria/parser/consumption/modeling.py
index 6c616b4..37787d6 100644
--- a/aria/parser/consumption/modeling.py
+++ b/aria/parser/consumption/modeling.py
@@ -106,7 +106,7 @@ class InstantiateServiceInstance(Consumer):
                                            'template')
             return
 
-        self.context.modeling.template.instantiate(None)
+        self.context.modeling.template.instantiate(None, dict(self.context.modeling.inputs))
 
 
 class CoerceServiceInstanceValues(Consumer):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index 8302fc9..8caca66 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -38,7 +38,7 @@ API:
     * StorageDriver - class, abstract model implementation.
 """
 
-from aria.logger import LoggerMixin
+from aria.logger import LoggerMixin, NullHandler
 from . import sql_mapi
 
 __all__ = (
@@ -71,6 +71,10 @@ class Storage(LoggerMixin):
         :param kwargs:
         """
         super(Storage, self).__init__(**kwargs)
+        # Set the logger handler of any storage object to NullHandler.
+        # This is since the absence of a handler shows up while using the CLI in the form of:
+        # `No handlers could be found for logger "aria.ResourceStorage"`.
+        self.logger.addHandler(NullHandler())
         self.api = api_cls
         self.registered = {}
         self._initiator = initiator

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/storage/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/storage/exceptions.py b/aria/storage/exceptions.py
index f982f63..3f0ecec 100644
--- a/aria/storage/exceptions.py
+++ b/aria/storage/exceptions.py
@@ -23,3 +23,7 @@ class StorageError(exceptions.AriaError):
     General storage exception
     """
     pass
+
+
+class NotFoundError(StorageError):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/storage/instrumentation.py
----------------------------------------------------------------------
diff --git a/aria/storage/instrumentation.py b/aria/storage/instrumentation.py
index 138432a..cf2a365 100644
--- a/aria/storage/instrumentation.py
+++ b/aria/storage/instrumentation.py
@@ -13,6 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
 import copy
 import json
 
@@ -189,9 +190,9 @@ def apply_tracked_changes(tracked_changes, model):
             if not value:
                 del successfully_updated_changes[key]
         model.logger.error(
-            'Registering all the changes to the storage has failed. \n'
-            'The successful updates were: \n '
-            '{0}'.format(json.dumps(successfully_updated_changes, indent=4)))
+            'Registering all the changes to the storage has failed. {0}'
+            'The successful updates were: {0} '
+            '{1}'.format(os.linesep, json.dumps(successfully_updated_changes, indent=4)))
 
         raise
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
index 8d34bb4..144c925 100644
--- a/aria/storage/sql_mapi.py
+++ b/aria/storage/sql_mapi.py
@@ -59,7 +59,7 @@ class SQLAlchemyModelAPI(api.ModelAPI):
         result = query.first()
 
         if not result:
-            raise exceptions.StorageError(
+            raise exceptions.NotFoundError(
                 'Requested `{0}` with ID `{1}` was not found'
                 .format(self.model_cls.__name__, entry_id)
             )
@@ -69,13 +69,13 @@ class SQLAlchemyModelAPI(api.ModelAPI):
         assert hasattr(self.model_cls, 'name')
         result = self.list(include=include, filters={'name': entry_name})
         if not result:
-            raise exceptions.StorageError(
-                'Requested {0} with NAME `{1}` was not found'
+            raise exceptions.NotFoundError(
+                'Requested {0} with name `{1}` was not found'
                 .format(self.model_cls.__name__, entry_name)
             )
         elif len(result) > 1:
             raise exceptions.StorageError(
-                'Requested {0} with NAME `{1}` returned more than 1 value'
+                'Requested {0} with name `{1}` returned more than 1 value'
                 .format(self.model_cls.__name__, entry_name)
             )
         else:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/application.py
----------------------------------------------------------------------
diff --git a/aria/utils/application.py b/aria/utils/application.py
deleted file mode 100644
index 2f40825..0000000
--- a/aria/utils/application.py
+++ /dev/null
@@ -1,294 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Convenience storage related tools.
-# TODO rename module name
-"""
-
-import json
-import os
-import shutil
-import tarfile
-import tempfile
-from datetime import datetime
-
-from aria.storage.exceptions import StorageError
-from aria.logger import LoggerMixin
-
-
-class StorageManager(LoggerMixin):
-    """
-    Convenience wrapper to simplify work with the lower level storage mechanism
-    """
-
-    def __init__(
-            self,
-            model_storage,
-            resource_storage,
-            blueprint_path,
-            blueprint_id,
-            blueprint_plan,
-            deployment_id,
-            deployment_plan,
-            **kwargs):
-        super(StorageManager, self).__init__(**kwargs)
-        self.model_storage = model_storage
-        self.resource_storage = resource_storage
-        self.blueprint_path = blueprint_path
-        self.blueprint_id = blueprint_id
-        self.blueprint_plan = blueprint_plan
-        self.deployment_id = deployment_id
-        self.deployment_plan = deployment_plan
-
-    @classmethod
-    def from_deployment(
-            cls,
-            model_storage,
-            resource_storage,
-            deployment_id,
-            deployment_plan):
-        """
-        Create a StorageManager from a deployment
-        """
-        return cls(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            deployment_id=deployment_id,
-            deployment_plan=deployment_plan,
-            blueprint_path=None,
-            blueprint_plan=None,
-            blueprint_id=None
-        )
-
-    @classmethod
-    def from_blueprint(
-            cls,
-            model_storage,
-            resource_storage,
-            blueprint_path,
-            blueprint_id,
-            blueprint_plan):
-        """
-        Create a StorageManager from a blueprint
-        """
-        return cls(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            blueprint_path=blueprint_path,
-            blueprint_plan=blueprint_plan,
-            blueprint_id=blueprint_id,
-            deployment_id=None,
-            deployment_plan=None)
-
-    def create_blueprint_storage(self, source, main_file_name=None):
-        """
-        create blueprint model & resource
-        """
-        assert self.blueprint_path and self.blueprint_id
-        assert hasattr(self.resource_storage, 'blueprint')
-        assert hasattr(self.model_storage, 'blueprint')
-
-        self.logger.debug('creating blueprint resource storage entry')
-        self.resource_storage.service_template.upload(
-            entry_id=self.blueprint_id,
-            source=os.path.dirname(source))
-        self.logger.debug('created blueprint resource storage entry')
-
-        self.logger.debug('creating blueprint model storage entry')
-        now = datetime.utcnow()
-        blueprint = self.model_storage.service_template.model_cls(
-            plan=self.blueprint_plan,
-            id=self.blueprint_id,
-            description=self.blueprint_plan.get('description'),
-            created_at=now,
-            updated_at=now,
-            main_file_name=main_file_name,
-        )
-        self.model_storage.service_template.put(blueprint)
-        self.logger.debug('created blueprint model storage entry')
-
-    def create_nodes_storage(self):
-        """
-        create nodes model
-        """
-        assert self.blueprint_path and self.blueprint_id
-        assert hasattr(self.model_storage, 'node')
-        assert hasattr(self.model_storage, 'relationship')
-
-        for node in self.blueprint_plan['nodes']:
-            node_copy = node.copy()
-            for field in ('name',
-                          'deployment_plugins_to_install',
-                          'interfaces',
-                          'instances'):
-                node_copy.pop(field)
-            scalable = node_copy.pop('capabilities')['scalable']['properties']
-            for index, relationship in enumerate(node_copy['relationships']):
-                relationship = self.model_storage.relationship.model_cls(**relationship)
-                self.model_storage.relationship.put(relationship)
-                node_copy['relationships'][index] = relationship
-
-            node_copy = self.model_storage.node.model_cls(
-                blueprint_id=self.blueprint_id,
-                planned_number_of_instances=scalable['current_instances'],
-                deploy_number_of_instances=scalable['default_instances'],
-                min_number_of_instances=scalable['min_instances'],
-                max_number_of_instances=scalable['max_instances'],
-                number_of_instances=scalable['current_instances'],
-                **node_copy)
-            self.model_storage.node.put(node_copy)
-
-    def create_deployment_storage(self):
-        """
-        create deployment model & resource
-        """
-        assert self.deployment_id and self.deployment_plan
-
-        assert hasattr(self.resource_storage, 'blueprint')
-        assert hasattr(self.resource_storage, 'deployment')
-        assert hasattr(self.model_storage, 'deployment')
-
-        self.logger.debug('creating deployment resource storage entry')
-        temp_dir = tempfile.mkdtemp()
-        try:
-            self.resource_storage.service_template.download(
-                entry_id=self.blueprint_id,
-                destination=temp_dir)
-            self.resource_storage.service_instance.upload(
-                entry_id=self.deployment_id,
-                source=temp_dir)
-        finally:
-            shutil.rmtree(temp_dir, ignore_errors=True)
-        self.logger.debug('created deployment resource storage entry')
-
-        self.logger.debug('creating deployment model storage entry')
-        now = datetime.utcnow()
-        deployment = self.model_storage.service_instance.model_cls(
-            id=self.deployment_id,
-            blueprint_id=self.blueprint_id,
-            description=self.deployment_plan['description'],
-            workflows=self.deployment_plan['workflows'],
-            inputs=self.deployment_plan['inputs'],
-            policy_types=self.deployment_plan['policy_types'],
-            policy_triggers=self.deployment_plan['policy_triggers'],
-            groups=self.deployment_plan['groups'],
-            scaling_groups=self.deployment_plan['scaling_groups'],
-            outputs=self.deployment_plan['outputs'],
-            created_at=now,
-            updated_at=now
-        )
-        self.model_storage.service_instance.put(deployment)
-        self.logger.debug('created deployment model storage entry')
-
-    def create_node_instances_storage(self):
-        """
-        create node_instances model
-        """
-        assert self.deployment_id and self.deployment_plan
-        assert hasattr(self.model_storage, 'node_instance')
-        assert hasattr(self.model_storage, 'relationship_instance')
-
-        self.logger.debug('creating node-instances model storage entries')
-        for node_instance in self.deployment_plan['node_instances']:
-            node_model = self.model_storage.node.get(node_instance['node_id'])
-            relationship_instances = []
-
-            for index, relationship_instance in enumerate(node_instance['relationships']):
-                relationship_instance_model = self.model_storage.relationship.model_cls(
-                    relationship=node_model.relationships[index],
-                    target_name=relationship_instance['target_name'],
-                    type=relationship_instance['type'],
-                    target_id=relationship_instance['target_id'])
-                relationship_instances.append(relationship_instance_model)
-                self.model_storage.relationship.put(relationship_instance_model)
-
-            node_instance_model = self.model_storage.node.model_cls(
-                node=node_model,
-                id=node_instance['id'],
-                runtime_properties={},
-                state=self.model_storage.node.model_cls.UNINITIALIZED,
-                deployment_id=self.deployment_id,
-                version='1.0',
-                relationship_instances=relationship_instances)
-
-            self.model_storage.node.put(node_instance_model)
-        self.logger.debug('created node-instances model storage entries')
-
-    def create_plugin_storage(self, plugin_id, source):
-        """
-        create plugin model & resource
-        """
-        assert hasattr(self.model_storage, 'plugin')
-        assert hasattr(self.resource_storage, 'plugin')
-
-        self.logger.debug('creating plugin resource storage entry')
-        self.resource_storage.plugin.upload(entry_id=plugin_id, source=source)
-        self.logger.debug('created plugin resource storage entry')
-
-        self.logger.debug('creating plugin model storage entry')
-        plugin = _load_plugin_from_archive(source)
-        build_props = plugin.get('build_server_os_properties')
-        now = datetime.utcnow()
-
-        plugin = self.model_storage.plugin.model_cls(
-            id=plugin_id,
-            package_name=plugin.get('package_name'),
-            package_version=plugin.get('package_version'),
-            archive_name=plugin.get('archive_name'),
-            package_source=plugin.get('package_source'),
-            supported_platform=plugin.get('supported_platform'),
-            distribution=build_props.get('distribution'),
-            distribution_version=build_props.get('distribution_version'),
-            distribution_release=build_props.get('distribution_release'),
-            wheels=plugin.get('wheels'),
-            excluded_wheels=plugin.get('excluded_wheels'),
-            supported_py_versions=plugin.get('supported_python_versions'),
-            uploaded_at=now
-        )
-        self.model_storage.plugin.put(plugin)
-        self.logger.debug('created plugin model storage entry')
-
-
-def _load_plugin_from_archive(tar_source):
-    if not tarfile.is_tarfile(tar_source):
-        # TODO: go over the exceptions
-        raise StorageError(
-            'the provided tar archive can not be read.')
-
-    with tarfile.open(tar_source) as tar:
-        tar_members = tar.getmembers()
-        # a wheel plugin will contain exactly one sub directory
-        if not tar_members:
-            raise StorageError(
-                'archive file structure malformed. expecting exactly one '
-                'sub directory; got none.')
-        package_json_path = os.path.join(tar_members[0].name,
-                                         'package.json')
-        try:
-            package_member = tar.getmember(package_json_path)
-        except KeyError:
-            raise StorageError("'package.json' was not found under {0}"
-                               .format(package_json_path))
-        try:
-            package_json = tar.extractfile(package_member)
-        except tarfile.ExtractError as e:
-            raise StorageError(str(e))
-        try:
-            return json.load(package_json)
-        except ValueError as e:
-            raise StorageError("'package.json' is not a valid json: "
-                               "{json_str}. error is {error}"
-                               .format(json_str=package_json.read(), error=str(e)))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/archive.py
----------------------------------------------------------------------
diff --git a/aria/utils/archive.py b/aria/utils/archive.py
new file mode 100644
index 0000000..63d9004
--- /dev/null
+++ b/aria/utils/archive.py
@@ -0,0 +1,63 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import tarfile
+import zipfile
+import tempfile
+from contextlib import closing
+
+
+def is_archive(source):
+    return tarfile.is_tarfile(source) or zipfile.is_zipfile(source)
+
+
+def extract_archive(source):
+    if tarfile.is_tarfile(source):
+        return untar(source)
+    elif zipfile.is_zipfile(source):
+        return unzip(source)
+    raise ValueError(
+        'Unsupported archive type provided or archive is not valid: {0}.'.format(source))
+
+
+def tar(source, destination):
+    with closing(tarfile.open(destination, 'w:gz')) as tar_archive:
+        tar_archive.add(source, arcname=os.path.basename(source))
+
+
+def untar(archive, destination=None):
+    if not destination:
+        destination = tempfile.mkdtemp()
+    with closing(tarfile.open(name=archive)) as tar_archive:
+        tar_archive.extractall(path=destination, members=tar_archive.getmembers())
+    return destination
+
+
+def zip(source, destination):
+    with closing(zipfile.ZipFile(destination, 'w')) as zip_file:
+        for root, _, files in os.walk(source):
+            for filename in files:
+                file_path = os.path.join(root, filename)
+                source_dir = os.path.dirname(source)
+                zip_file.write(
+                    file_path, os.path.relpath(file_path, source_dir))
+    return destination
+
+
+def unzip(archive, destination=None):
+    if not destination:
+        destination = tempfile.mkdtemp()
+    with closing(zipfile.ZipFile(archive, 'r')) as zip_file:
+        zip_file.extractall(destination)
+    return destination

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/utils/exceptions.py b/aria/utils/exceptions.py
index 9e3e80f..b60cee4 100644
--- a/aria/utils/exceptions.py
+++ b/aria/utils/exceptions.py
@@ -15,6 +15,7 @@
 
 import sys
 import linecache
+import StringIO
 import traceback as tb
 
 import jsonpickle
@@ -89,6 +90,16 @@ def _print_stack(frame):
                 puts(line)
 
 
+def get_exception_as_string(exc_type, exc_val, traceback):
+    s_traceback = StringIO.StringIO()
+    tb.print_exception(
+        etype=exc_type,
+        value=exc_val,
+        tb=traceback,
+        file=s_traceback)
+    return s_traceback.getvalue()
+
+
 class _WrappedException(Exception):
 
     def __init__(self, exception_type, exception_str):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/file.py
----------------------------------------------------------------------
diff --git a/aria/utils/file.py b/aria/utils/file.py
index b515f70..6d1aa16 100644
--- a/aria/utils/file.py
+++ b/aria/utils/file.py
@@ -15,6 +15,7 @@
 
 import errno
 import os
+import shutil
 
 
 def makedirs(path):
@@ -26,3 +27,15 @@ def makedirs(path):
     except IOError as e:
         if e.errno != errno.EEXIST:
             raise
+
+def remove_if_exists(path):
+
+    try:
+        if os.path.isfile(path):
+            os.remove(path)
+        if os.path.isdir(path):
+            shutil.rmtree(path)
+
+    except OSError as e:
+        if e.errno != errno.ENOENT:  # errno.ENOENT = no such file or directory
+            raise  # re-raise exception if a different error occurred

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index 8a223e9..b5e141d 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -83,6 +83,34 @@ def full_type_name(value):
     return name if module == '__builtin__' else '%s.%s' % (module, name)
 
 
+def decode_list(data):
+    decoded_list = []
+    for item in data:
+        if isinstance(item, unicode):
+            item = item.encode('utf-8')
+        elif isinstance(item, list):
+            item = decode_list(item)
+        elif isinstance(item, dict):
+            item = decode_dict(item)
+        decoded_list.append(item)
+    return decoded_list
+
+
+def decode_dict(data):
+    decoded_dict = {}
+    for key, value in data.iteritems():
+        if isinstance(key, unicode):
+            key = key.encode('utf-8')
+        if isinstance(value, unicode):
+            value = value.encode('utf-8')
+        elif isinstance(value, list):
+            value = decode_list(value)
+        elif isinstance(value, dict):
+            value = decode_dict(value)
+        decoded_dict[key] = value
+    return decoded_dict
+
+
 def safe_str(value):
     """
     Like :code:`str` coercion, but makes sure that Unicode strings are properly

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/threading.py
----------------------------------------------------------------------
diff --git a/aria/utils/threading.py b/aria/utils/threading.py
index b99250d..bfd30f5 100644
--- a/aria/utils/threading.py
+++ b/aria/utils/threading.py
@@ -15,6 +15,7 @@
 
 from __future__ import absolute_import  # so we can import standard 'threading'
 
+import sys
 import itertools
 import multiprocessing
 from threading import (Thread, Lock)
@@ -255,3 +256,26 @@ class LockedList(list):
 
     def __exit__(self, the_type, value, traceback):
         return self.lock.__exit__(the_type, value, traceback)
+
+
+class ExceptionThread(Thread):
+    """
+    A thread from which top level exceptions can be retrieved or reraised
+    """
+    def __init__(self, *args, **kwargs):
+        Thread.__init__(self, *args, **kwargs)
+        self.exception = None
+
+    def run(self):
+        try:
+            super(ExceptionThread, self).run()
+        except BaseException:
+            self.exception = sys.exc_info()
+
+    def is_error(self):
+        return self.exception is not None
+
+    def raise_error_if_exists(self):
+        if self.is_error():
+            type_, value, trace = self.exception
+            raise type_, value, trace

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/utils/type.py
----------------------------------------------------------------------
diff --git a/aria/utils/type.py b/aria/utils/type.py
new file mode 100644
index 0000000..dad5427
--- /dev/null
+++ b/aria/utils/type.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def validate_value_type(value, type_name):
+    """
+    Validate a value is of a specific type.
+    A ValueError will be raised on type mismatch.
+    Supports both python and yaml type names.
+    """
+
+    #TODO add timestamp type?
+    name_to_type = {
+        'list': list,
+        'dict': dict,
+        'tuple': tuple,
+        'str': str,
+        'unicode': str,
+        'string': str,
+        'int': int,
+        'integer': int,
+        'bool': bool,
+        'boolean': bool,
+        'float': float
+    }
+
+    type_ = name_to_type.get(type_name.lower())
+    if type_ is None:
+        raise RuntimeError('No supported type_name was provided')
+
+    if not isinstance(value, type_):
+        raise ValueError('Value {0} is not of type {1}'.format(value, type_name))
+
+
+def convert_value_to_type(str_value, type_name):
+    try:
+        if type_name.lower() in ['str', 'unicode']:
+            return str_value.decode('utf-8')
+        elif type_name.lower() == 'int':
+            return int(str_value)
+        elif type_name.lower() == 'bool':
+            return bool(str_value)
+        elif type_name.lower() == 'float':
+            return float(str_value)
+        else:
+            raise ValueError('No supported type_name was provided')
+    except ValueError:
+        raise ValueError('Trying to convert {0} to {1} failed'.format(str_value,
+                                                                      type_name))



[9/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
ARIA-48 cli

Make Execution, Task and Node status properties into methods

Enforce Uniqueness on ServiceTemplate and Service names

fixed tests

small fixes after rebase from master

Only print description in `service-templates` show if one exists

before adding any handlers, we check that theere are no handlers at the moment.

fixed plugins command in CLI

Fix issue in creating services with existing name

Up until now, creating a service with a name that already existed
resulted in a null service stored in the storage. I noticed it while
thinking about tests to the `services list` command.

moved service inputs population earlier in the process

fixed storage exceptions in CLI

extracted unwrap_dict method for parameter objects

core.py now works with ids only again; removed unnecessary NotFoundError exception clauses from cli

Refacor handling storage exceptions stemming from unique names

Fix issue where a service name was passed instead of a service id

fix handling storage errors

We tried to access a wrong attribute of the exception.

few deletion-commands related fixes

created dry-run executor

tiny fix to execution error printing

Fix handling creating a service with an existing name

fixed active executions validation in workflow runner

used right logger in dry executor

workflow runner now works with service id rather than name

added doc to WorkflowRunner

Mock storage and reroute logger

Update requirements.in and requirements.txt

fixed and refactored tests

fixed custom workflows import mechanism

fixed pylint issues

fixed pylint in tests

created fixtures.py for tests

fixed workflows show cli command

improved type check

fixed operation logging issues in tests

moved TASK_LOG_NAME

fixed a test

fixed debugging issue in cli

Create testing framework for the CLI, and add some tests

The tests are of:
service-templates show
service-templates list
service-templates store

Add tests for service-templates delete

Add test for non `unique name` exception from service-templates store

Add tests for service-templates inputs

Add tests for service-templates validate

Clean up the code a bit

added tests for workflow runner

Improve logging from cli tests

fixed ssh tests

fixed pylint issues

add logutils to support configuring logging via dict

fixed pylint issues in tests

added default NullHandler support for py26

fixed simple review comments

reorded imports in logger.py

fixed service-template-filename parameter usage

Refactor basetest and fixtures

Add test for service-templates create-archive

Add tests for services list

Add tests for services create

Add tests for services delete

Add tests for services inputs

Add tests for node-templates show

Add tests for node-templates list

Add tests for nodes show and nodes list

Add checking for initial logger strings for service templates

Parametrize service templates list tests

Refactor service names in the mock storage

Fix pylint issues

Fix tests in nodes list

Fix service templates show tests to use names instead of ids

Change Environment to _Environment

fixed pylint issues


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/aaf66420
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/aaf66420
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/aaf66420

Branch: refs/heads/ARIA-48-aria-cli
Commit: aaf66420de41c678ed37abc88de474c4f6282494
Parents: a7e7826
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Mar 28 12:17:46 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Sat Apr 15 17:11:18 2017 +0300

----------------------------------------------------------------------
 aria/.pylintrc                                  |   2 +-
 aria/__init__.py                                |   2 +-
 aria/cli/VERSION                                |   3 +
 aria/cli/args_parser.py                         | 269 ---------
 aria/cli/cli.py                                 | 113 ----
 aria/cli/cli/__init__.py                        |  14 +
 aria/cli/cli/aria.py                            | 445 +++++++++++++++
 aria/cli/cli/helptexts.py                       |  57 ++
 aria/cli/commands.py                            | 546 -------------------
 aria/cli/commands/__init__.py                   |  25 +
 aria/cli/commands/executions.py                 | 172 ++++++
 aria/cli/commands/logs.py                       |  65 +++
 aria/cli/commands/node_templates.py             |  96 ++++
 aria/cli/commands/nodes.py                      |  88 +++
 aria/cli/commands/plugins.py                    | 133 +++++
 aria/cli/commands/service_templates.py          | 220 ++++++++
 aria/cli/commands/services.py                   | 180 ++++++
 aria/cli/commands/workflows.py                  | 102 ++++
 aria/cli/config.py                              |  46 --
 aria/cli/config/__init__.py                     |  14 +
 aria/cli/config/config.py                       |  70 +++
 aria/cli/config/config_template.yaml            |  12 +
 aria/cli/constants.py                           |  18 +
 aria/cli/csar.py                                |  13 +-
 aria/cli/dry.py                                 |  93 ----
 aria/cli/env.py                                 | 118 ++++
 aria/cli/exceptions.py                          |  54 +-
 aria/cli/inputs.py                              | 118 ++++
 aria/cli/logger.py                              | 114 ++++
 aria/cli/main.py                                |  59 ++
 aria/cli/service_template_utils.py              | 140 +++++
 aria/cli/storage.py                             |  95 ----
 aria/cli/table.py                               |  90 +++
 aria/cli/utils.py                               | 161 ++++++
 aria/core.py                                    | 120 ++++
 aria/exceptions.py                              |  25 +
 aria/logger.py                                  |  12 +
 aria/modeling/__init__.py                       |   2 +
 aria/modeling/exceptions.py                     |  18 +
 aria/modeling/models.py                         |   9 +-
 aria/modeling/orchestration.py                  |  21 +-
 aria/modeling/service_changes.py                |  10 +-
 aria/modeling/service_common.py                 |  21 +-
 aria/modeling/service_instance.py               |  16 +-
 aria/modeling/service_template.py               |  23 +-
 aria/modeling/utils.py                          |  92 +++-
 aria/orchestrator/context/common.py             |  43 +-
 aria/orchestrator/context/operation.py          |   2 -
 aria/orchestrator/context/workflow.py           |  20 +-
 aria/orchestrator/exceptions.py                 |  21 +
 .../execution_plugin/ctx_proxy/server.py        |   3 +-
 aria/orchestrator/plugin.py                     |   3 +-
 aria/orchestrator/runner.py                     | 101 ----
 aria/orchestrator/workflow_runner.py            | 166 ++++++
 aria/orchestrator/workflows/api/task.py         |  96 ++--
 aria/orchestrator/workflows/builtin/__init__.py |   1 +
 .../workflows/builtin/execute_operation.py      |  16 +-
 aria/orchestrator/workflows/builtin/utils.py    |  82 ++-
 aria/orchestrator/workflows/core/engine.py      |   6 +-
 aria/orchestrator/workflows/core/task.py        |   2 -
 aria/orchestrator/workflows/exceptions.py       |  10 +-
 aria/orchestrator/workflows/executor/celery.py  |   4 +-
 aria/orchestrator/workflows/executor/dry.py     |  52 ++
 aria/orchestrator/workflows/executor/process.py |   3 +-
 aria/orchestrator/workflows/executor/thread.py  |   4 +-
 aria/parser/consumption/modeling.py             |   2 +-
 aria/storage/core.py                            |   6 +-
 aria/storage/exceptions.py                      |   4 +
 aria/storage/instrumentation.py                 |   7 +-
 aria/storage/sql_mapi.py                        |   8 +-
 aria/utils/application.py                       | 294 ----------
 aria/utils/archive.py                           |  63 +++
 aria/utils/exceptions.py                        |  11 +
 aria/utils/file.py                              |  13 +
 aria/utils/formatting.py                        |  28 +
 aria/utils/threading.py                         |  24 +
 aria/utils/type.py                              |  61 +++
 .../use-cases/block-storage-1/inputs.yaml       |   3 +
 .../use-cases/block-storage-2/inputs.yaml       |   3 +
 .../use-cases/block-storage-3/inputs.yaml       |   2 +
 .../use-cases/block-storage-4/inputs.yaml       |   2 +
 .../use-cases/block-storage-5/inputs.yaml       |   3 +
 .../use-cases/block-storage-6/inputs.yaml       |   3 +
 .../use-cases/compute-1/inputs.yaml             |   1 +
 .../use-cases/multi-tier-1/inputs.yaml          |   1 +
 .../use-cases/network-1/inputs.yaml             |   1 +
 .../use-cases/network-2/inputs.yaml             |   1 +
 .../use-cases/network-3/inputs.yaml             |   1 +
 .../use-cases/object-storage-1/inputs.yaml      |   1 +
 .../use-cases/software-component-1/inputs.yaml  |   1 +
 .../simple_v1_0/modeling/__init__.py            |   3 +-
 requirements.in                                 |   9 +
 requirements.txt                                |  22 +-
 setup.py                                        |   2 +-
 tests/.pylintrc                                 |   2 +-
 tests/cli/__init__.py                           |  14 +
 tests/cli/base_test.py                          |  62 +++
 tests/cli/runner.py                             |  11 +
 tests/cli/test_node_templates.py                | 101 ++++
 tests/cli/test_nodes.py                         |  76 +++
 tests/cli/test_service_templates.py             | 174 ++++++
 tests/cli/test_services.py                      | 178 ++++++
 tests/cli/utils.py                              | 175 ++++++
 tests/conftest.py                               |  14 +-
 tests/end2end/test_orchestrator.py              |  63 ---
 tests/end2end/test_tosca_simple_v1_0.py         | 112 ----
 tests/fixtures.py                               |  70 +++
 tests/mock/context.py                           |   7 +-
 tests/mock/models.py                            |  78 ++-
 tests/mock/workflow.py                          |  26 +
 tests/modeling/test_models.py                   |  12 +-
 tests/orchestrator/context/test_operation.py    |  45 +-
 .../context/test_resource_render.py             |  12 +-
 tests/orchestrator/context/test_serialize.py    |  13 +-
 tests/orchestrator/context/test_toolbelt.py     |  11 +-
 tests/orchestrator/context/test_workflow.py     |  10 +-
 .../orchestrator/execution_plugin/test_local.py |  15 +-
 tests/orchestrator/execution_plugin/test_ssh.py |  46 +-
 tests/orchestrator/test_runner.py               |  74 ---
 tests/orchestrator/test_workflow_runner.py      | 292 ++++++++++
 tests/orchestrator/workflows/api/test_task.py   |  18 +-
 .../orchestrator/workflows/core/test_engine.py  |  10 +-
 .../test_task_graph_into_exececution_graph.py   | 112 ----
 .../test_task_graph_into_execution_graph.py     | 112 ++++
 .../workflows/executor/test_process_executor.py |  34 +-
 ...process_executor_concurrent_modifications.py |   3 +-
 .../executor/test_process_executor_extension.py |   3 +-
 .../test_process_executor_tracked_changes.py    |   6 +-
 tests/parser/service_templates.py               |   6 +-
 tests/parser/test_tosca_simple_v1_0.py          | 112 ++++
 tests/utils/test_plugin.py                      |  29 +-
 tests/utils/test_threading.py                   |  33 ++
 132 files changed, 5121 insertions(+), 2381 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/.pylintrc
----------------------------------------------------------------------
diff --git a/aria/.pylintrc b/aria/.pylintrc
index 7222605..7da8c56 100644
--- a/aria/.pylintrc
+++ b/aria/.pylintrc
@@ -77,7 +77,7 @@ confidence=
 # --enable=similarities". If you want to run only the classes checker, but have
 # no Warning level messages displayed, use"--disable=all --enable=classes
 # --disable=W"
-disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,logging-format-interpolation,import-error,redefined-variable-type,broad-except,protected-access,global-statement,no-member
+disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,logging-format-interpolation,import-error,redefined-variable-type,broad-except,protected-access,global-statement,no-member,u
 nused-argument
 
 [REPORTS]
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index b9251d5..df75b1e 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -84,6 +84,6 @@ def application_resource_storage(api, api_kwargs=None, initiator=None, initiator
 
     return storage.ResourceStorage(api_cls=api,
                                    api_kwargs=api_kwargs,
-                                   items=['blueprint', 'deployment', 'plugin'],
+                                   items=['service_template', 'service', 'plugin'],
                                    initiator=initiator,
                                    initiator_kwargs=initiator_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/VERSION
----------------------------------------------------------------------
diff --git a/aria/cli/VERSION b/aria/cli/VERSION
new file mode 100644
index 0000000..6a3252e
--- /dev/null
+++ b/aria/cli/VERSION
@@ -0,0 +1,3 @@
+{
+  "version": "0.1.0"
+}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/args_parser.py
----------------------------------------------------------------------
diff --git a/aria/cli/args_parser.py b/aria/cli/args_parser.py
deleted file mode 100644
index 81ee513..0000000
--- a/aria/cli/args_parser.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Argument parsing configuration and functions
-"""
-
-import argparse
-from functools import partial
-
-from ..utils.argparse import ArgumentParser
-
-NO_VERBOSE = 0
-
-
-class SmartFormatter(argparse.HelpFormatter):
-    """
-    TODO: what is this?
-    """
-    def _split_lines(self, text, width):
-        if text.startswith('R|'):
-            return text[2:].splitlines()
-        return super(SmartFormatter, self)._split_lines(text, width)
-
-
-def sub_parser_decorator(func=None, **parser_settings):
-    """
-    Decorated for sub_parser argument definitions
-    """
-    if not func:
-        return partial(sub_parser_decorator, **parser_settings)
-
-    def _wrapper(parser):
-        sub_parser = parser.add_parser(**parser_settings)
-        sub_parser.add_argument(
-            '-v', '--verbose',
-            dest='verbosity',
-            action='count',
-            default=NO_VERBOSE,
-            help='Set verbosity level (can be passed multiple times)')
-        func(sub_parser)
-        return sub_parser
-    return _wrapper
-
-
-def config_parser(parser=None):
-    """
-    Top level argparse configuration
-    """
-    parser = parser or ArgumentParser(
-        prog='ARIA',
-        description="ARIA's Command Line Interface",
-        formatter_class=SmartFormatter)
-    parser.add_argument('-v', '--version', action='version')
-    sub_parser = parser.add_subparsers(title='Commands', dest='command')
-    add_init_parser(sub_parser)
-    add_execute_parser(sub_parser)
-    add_parse_parser(sub_parser)
-    add_workflow_parser(sub_parser)
-    add_spec_parser(sub_parser)
-    add_csar_create_parser(sub_parser)
-    add_csar_open_parser(sub_parser)
-    add_csar_validate_parser(sub_parser)
-    return parser
-
-
-@sub_parser_decorator(
-    name='parse',
-    help='Parse a blueprint',
-    formatter_class=SmartFormatter)
-def add_parse_parser(parse):
-    """
-    ``parse`` command parser configuration
-    """
-    parse.add_argument(
-        'uri',
-        help='URI or file path to service template')
-    parse.add_argument(
-        'consumer',
-        nargs='?',
-        default='validate',
-        help='"validate" (default), "presentation", "template", "types", "instance", or consumer '
-             'class name (full class path or short name)')
-    parse.add_argument(
-        '--loader-source',
-        default='aria.parser.loading.DefaultLoaderSource',
-        help='loader source class for the parser')
-    parse.add_argument(
-        '--reader-source',
-        default='aria.parser.reading.DefaultReaderSource',
-        help='reader source class for the parser')
-    parse.add_argument(
-        '--presenter-source',
-        default='aria.parser.presentation.DefaultPresenterSource',
-        help='presenter source class for the parser')
-    parse.add_argument(
-        '--presenter',
-        help='force use of this presenter class in parser')
-    parse.add_argument(
-        '--prefix', nargs='*',
-        help='prefixes for imports')
-    parse.add_flag_argument(
-        'debug',
-        help_true='print debug info',
-        help_false='don\'t print debug info')
-    parse.add_flag_argument(
-        'cached-methods',
-        help_true='enable cached methods',
-        help_false='disable cached methods',
-        default=True)
-
-
-@sub_parser_decorator(
-    name='workflow',
-    help='Run a workflow on a blueprint',
-    formatter_class=SmartFormatter)
-def add_workflow_parser(workflow):
-    """
-    ``workflow`` command parser configuration
-    """
-    workflow.add_argument(
-        'uri',
-        help='URI or file path to service template')
-    workflow.add_argument(
-        '-w', '--workflow',
-        default='install',
-        help='The workflow name')
-    workflow.add_flag_argument(
-        'dry',
-        default=True,
-        help_true='dry run',
-        help_false='wet run')
-
-
-@sub_parser_decorator(
-    name='init',
-    help='Initialize environment',
-    formatter_class=SmartFormatter)
-def add_init_parser(init):
-    """
-    ``init`` command parser configuration
-    """
-    init.add_argument(
-        '-d', '--deployment-id',
-        required=True,
-        help='A unique ID for the deployment')
-    init.add_argument(
-        '-p', '--blueprint-path',
-        dest='blueprint_path',
-        required=True,
-        help='The path to the desired blueprint')
-    init.add_argument(
-        '-i', '--inputs',
-        dest='input',
-        action='append',
-        help='R|Inputs for the local workflow creation \n'
-             '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files, \n'
-             'a JSON string or as "key1=value1;key2=value2"). \n'
-             'This argument can be used multiple times')
-    init.add_argument(
-        '-b', '--blueprint-id',
-        dest='blueprint_id',
-        required=True,
-        help='The blueprint ID'
-    )
-
-
-@sub_parser_decorator(
-    name='execute',
-    help='Execute a workflow',
-    formatter_class=SmartFormatter)
-def add_execute_parser(execute):
-    """
-    ``execute`` command parser configuration
-    """
-    execute.add_argument(
-        '-d', '--deployment-id',
-        required=True,
-        help='A unique ID for the deployment')
-    execute.add_argument(
-        '-w', '--workflow',
-        dest='workflow_id',
-        help='The workflow to execute')
-    execute.add_argument(
-        '-p', '--parameters',
-        dest='parameters',
-        action='append',
-        help='R|Parameters for the workflow execution\n'
-             '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files,\n'
-             'a JSON string or as "key1=value1;key2=value2").\n'
-             'This argument can be used multiple times.')
-    execute.add_argument(
-        '--task-retries',
-        dest='task_retries',
-        type=int,
-        help='How many times should a task be retried in case of failure')
-    execute.add_argument(
-        '--task-retry-interval',
-        dest='task_retry_interval',
-        default=1,
-        type=int,
-        help='How many seconds to wait before each task is retried')
-
-
-@sub_parser_decorator(
-    name='csar-create',
-    help='Create a CSAR file from a TOSCA service template directory',
-    formatter_class=SmartFormatter)
-def add_csar_create_parser(parse):
-    parse.add_argument(
-        'source',
-        help='Service template directory')
-    parse.add_argument(
-        'entry',
-        help='Entry definition file relative to service template directory')
-    parse.add_argument(
-        '-d', '--destination',
-        help='Output CSAR zip destination',
-        required=True)
-
-
-@sub_parser_decorator(
-    name='csar-open',
-    help='Extracts a CSAR file to a TOSCA service template directory',
-    formatter_class=SmartFormatter)
-def add_csar_open_parser(parse):
-    parse.add_argument(
-        'source',
-        help='CSAR file location')
-    parse.add_argument(
-        '-d', '--destination',
-        help='Output directory to extract the CSAR into',
-        required=True)
-
-
-@sub_parser_decorator(
-    name='csar-validate',
-    help='Validates a CSAR file',
-    formatter_class=SmartFormatter)
-def add_csar_validate_parser(parse):
-    parse.add_argument(
-        'source',
-        help='CSAR file location')
-
-
-@sub_parser_decorator(
-    name='spec',
-    help='Specification tool',
-    formatter_class=SmartFormatter)
-def add_spec_parser(spec):
-    """
-    ``spec`` command parser configuration
-    """
-    spec.add_argument(
-        '--csv',
-        action='store_true',
-        help='output as CSV')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/cli.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli.py b/aria/cli/cli.py
deleted file mode 100644
index 8d014b3..0000000
--- a/aria/cli/cli.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI Entry point
-"""
-
-import os
-import logging
-import tempfile
-
-from .. import install_aria_extensions
-from ..logger import (
-    create_logger,
-    create_console_log_handler,
-    create_file_log_handler,
-    LoggerMixin,
-)
-from ..utils.exceptions import print_exception
-from .args_parser import config_parser
-from .commands import (
-    ParseCommand,
-    WorkflowCommand,
-    InitCommand,
-    ExecuteCommand,
-    CSARCreateCommand,
-    CSAROpenCommand,
-    CSARValidateCommand,
-    SpecCommand,
-)
-
-__version__ = '0.1.0'
-
-
-class AriaCli(LoggerMixin):
-    """
-    Context manager based class that enables proper top level error handling
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(AriaCli, self).__init__(*args, **kwargs)
-        self.commands = {
-            'parse': ParseCommand.with_logger(base_logger=self.logger),
-            'workflow': WorkflowCommand.with_logger(base_logger=self.logger),
-            'init': InitCommand.with_logger(base_logger=self.logger),
-            'execute': ExecuteCommand.with_logger(base_logger=self.logger),
-            'csar-create': CSARCreateCommand.with_logger(base_logger=self.logger),
-            'csar-open': CSAROpenCommand.with_logger(base_logger=self.logger),
-            'csar-validate': CSARValidateCommand.with_logger(base_logger=self.logger),
-            'spec': SpecCommand.with_logger(base_logger=self.logger),
-        }
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        """
-        Here we will handle errors
-        :param exc_type:
-        :param exc_val:
-        :param exc_tb:
-        :return:
-        """
-        # todo: error handling
-        # todo: cleanup if needed
-        # TODO: user message if needed
-        pass
-
-    def run(self):
-        """
-        Parses user arguments and run the appropriate command
-        """
-        parser = config_parser()
-        args, unknown_args = parser.parse_known_args()
-
-        command_handler = self.commands[args.command]
-        self.logger.debug('Running command: {args.command} handler: {0}'.format(
-            command_handler, args=args))
-        try:
-            command_handler(args, unknown_args)
-        except Exception as e:
-            print_exception(e)
-
-
-def main():
-    """
-    CLI entry point
-    """
-    install_aria_extensions()
-    create_logger(
-        handlers=[
-            create_console_log_handler(),
-            create_file_log_handler(file_path=os.path.join(tempfile.gettempdir(), 'aria_cli.log')),
-        ],
-        level=logging.INFO)
-    with AriaCli() as aria:
-        aria.run()
-
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/cli/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/__init__.py b/aria/cli/cli/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/cli/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/cli/aria.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/aria.py b/aria/cli/cli/aria.py
new file mode 100644
index 0000000..da72833
--- /dev/null
+++ b/aria/cli/cli/aria.py
@@ -0,0 +1,445 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import sys
+import difflib
+import StringIO
+import traceback
+from functools import wraps
+
+import click
+
+from ..env import (
+    env,
+    logger
+)
+from ..cli import helptexts
+from ..inputs import inputs_to_dict
+from ..constants import DEFAULT_SERVICE_TEMPLATE_FILENAME
+from ...utils.exceptions import get_exception_as_string
+
+
+CLICK_CONTEXT_SETTINGS = dict(
+    help_option_names=['-h', '--help'],
+    token_normalize_func=lambda param: param.lower())
+
+
+class MutuallyExclusiveOption(click.Option):
+    """Makes options mutually exclusive. The option must pass a `cls` argument
+    with this class name and a `mutually_exclusive` argument with a list of
+    argument names it is mutually exclusive with.
+
+    NOTE: All mutually exclusive options must use this. It's not enough to
+    use it in just one of the options.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', []))
+        self.mutuality_error_message = \
+            kwargs.pop('mutuality_error_message',
+                       helptexts.DEFAULT_MUTUALITY_MESSAGE)
+        self.mutuality_string = ', '.join(self.mutually_exclusive)
+        if self.mutually_exclusive:
+            help = kwargs.get('help', '')
+            kwargs['help'] = (
+                '{0}. This argument is mutually exclusive with '
+                'arguments: [{1}] ({2})'.format(
+                    help,
+                    self.mutuality_string,
+                    self.mutuality_error_message))
+        super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
+
+    def handle_parse_result(self, ctx, opts, args):
+        if self.mutually_exclusive.intersection(opts) and self.name in opts:
+            raise click.UsageError(
+                'Illegal usage: `{0}` is mutually exclusive with '
+                'arguments: [{1}] ({2}).'.format(
+                    self.name,
+                    self.mutuality_string,
+                    self.mutuality_error_message))
+        return super(MutuallyExclusiveOption, self).handle_parse_result(
+            ctx, opts, args)
+
+
+def _format_version_data(version_data,
+                         prefix=None,
+                         suffix=None,
+                         infix=None):
+    all_data = version_data.copy()
+    all_data['prefix'] = prefix or ''
+    all_data['suffix'] = suffix or ''
+    all_data['infix'] = infix or ''
+    output = StringIO.StringIO()
+    output.write('{prefix}{version}'.format(**all_data))
+    output.write('{suffix}'.format(**all_data))
+    return output.getvalue()
+
+
+def show_version(ctx, param, value):
+    if not value:
+        return
+
+    cli_version_data = env.get_version_data()
+    cli_version = _format_version_data(
+        cli_version_data,
+        prefix='ARIA CLI ',
+        infix=' ' * 5,
+        suffix='')
+
+    logger.info(cli_version)
+    ctx.exit()
+
+
+def inputs_callback(ctx, param, value):
+    """Allow to pass any inputs we provide to a command as
+    processed inputs instead of having to call `inputs_to_dict`
+    inside the command.
+
+    `@aria.options.inputs` already calls this callback so that
+    every time you use the option it returns the inputs as a
+    dictionary.
+    """
+    if not value:
+        return {}
+
+    return inputs_to_dict(value)
+
+
+def set_verbosity_level(ctx, param, value):
+    if not value:
+        return
+
+    env.logging.verbosity_level = value
+
+
+def set_cli_except_hook():
+
+    def recommend(possible_solutions):
+        logger.info('Possible solutions:')
+        for solution in possible_solutions:
+            logger.info('  - {0}'.format(solution))
+
+    def new_excepthook(tpe, value, trace):
+        if env.logging.is_high_verbose_level():
+            # log error including traceback
+            logger.error(get_exception_as_string(tpe, value, trace))
+        else:
+            # write the full error to the log file
+            with open(env.logging.log_file, 'a') as log_file:
+                traceback.print_exception(
+                    etype=tpe,
+                    value=value,
+                    tb=trace,
+                    file=log_file)
+            # print only the error message
+            print value
+
+        if hasattr(value, 'possible_solutions'):
+            recommend(getattr(value, 'possible_solutions'))
+
+    sys.excepthook = new_excepthook
+
+
+def pass_logger(func):
+    """Simply passes the logger to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(logger=logger, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_plugin_manager(func):
+    """Simply passes the plugin manager to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(plugin_manager=env.plugin_manager, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_model_storage(func):
+    """Simply passes the model storage to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(model_storage=env.model_storage, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_resource_storage(func):
+    """Simply passes the resource storage to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(resource_storage=env.resource_storage, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_context(func):
+    """Make click context ARIA specific
+
+    This exists purely for aesthetic reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.pass_context(func)
+
+
+class AliasedGroup(click.Group):
+    def __init__(self, *args, **kwargs):
+        self.max_suggestions = kwargs.pop("max_suggestions", 3)
+        self.cutoff = kwargs.pop("cutoff", 0.5)
+        super(AliasedGroup, self).__init__(*args, **kwargs)
+
+    def get_command(self, ctx, cmd_name):
+        cmd = click.Group.get_command(self, ctx, cmd_name)
+        if cmd is not None:
+            return cmd
+        matches = \
+            [x for x in self.list_commands(ctx) if x.startswith(cmd_name)]
+        if not matches:
+            return None
+        elif len(matches) == 1:
+            return click.Group.get_command(self, ctx, matches[0])
+        ctx.fail('Too many matches: {0}'.format(', '.join(sorted(matches))))
+
+    def resolve_command(self, ctx, args):
+        """Override clicks ``resolve_command`` method
+        and appends *Did you mean ...* suggestions
+        to the raised exception message.
+        """
+        try:
+            return super(AliasedGroup, self).resolve_command(ctx, args)
+        except click.exceptions.UsageError as error:
+            error_msg = str(error)
+            original_cmd_name = click.utils.make_str(args[0])
+            matches = difflib.get_close_matches(
+                original_cmd_name,
+                self.list_commands(ctx),
+                self.max_suggestions,
+                self.cutoff)
+            if matches:
+                error_msg += '{0}{0}Did you mean one of these?{0}    {1}'.format(
+                    os.linesep,
+                    '{0}    '.format(os.linesep).join(matches, ))
+            raise click.exceptions.UsageError(error_msg, error.ctx)
+
+
+def group(name):
+    """Allow to create a group with a default click context
+    and a cls for click's `didyoueamn` without having to repeat
+    it for every group.
+    """
+    return click.group(
+        name=name,
+        context_settings=CLICK_CONTEXT_SETTINGS,
+        cls=AliasedGroup)
+
+
+def command(*args, **kwargs):
+    """Make Click commands ARIA specific
+
+    This exists purely for aesthetical reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.command(*args, **kwargs)
+
+
+def argument(*args, **kwargs):
+    """Make Click arguments ARIA specific
+
+    This exists purely for aesthetic reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.argument(*args, **kwargs)
+
+
+class Options(object):
+    def __init__(self):
+        """The options api is nicer when you use each option by calling
+        `@aria.options.some_option` instead of `@aria.some_option`.
+
+        Note that some options are attributes and some are static methods.
+        The reason for that is that we want to be explicit regarding how
+        a developer sees an option. It it can receive arguments, it's a
+        method - if not, it's an attribute.
+        """
+        self.version = click.option(
+            '--version',
+            is_flag=True,
+            callback=show_version,
+            expose_value=False,
+            is_eager=True,
+            help=helptexts.VERSION)
+
+        self.inputs = click.option(
+            '-i',
+            '--inputs',
+            multiple=True,
+            callback=inputs_callback,
+            help=helptexts.INPUTS)
+
+        self.json_output = click.option(
+            '--json-output',
+            is_flag=True,
+            help=helptexts.JSON_OUTPUT)
+
+        self.dry_execution = click.option(
+            '--dry',
+            is_flag=True,
+            help=helptexts.DRY_EXECUTION)
+
+        self.init_hard_reset = click.option(
+            '--hard',
+            is_flag=True,
+            help=helptexts.HARD_RESET)
+
+        self.reset_context = click.option(
+            '-r',
+            '--reset-context',
+            is_flag=True,
+            help=helptexts.RESET_CONTEXT)
+
+        self.enable_colors = click.option(
+            '--enable-colors',
+            is_flag=True,
+            default=False,
+            help=helptexts.ENABLE_COLORS)
+
+        self.node_name = click.option(
+            '-n',
+            '--node-name',
+            required=False,
+            help=helptexts.NODE_NAME)
+
+        self.descending = click.option(
+            '--descending',
+            required=False,
+            is_flag=True,
+            default=False,
+            help=helptexts.DESCENDING)
+
+        self.service_template_filename = click.option(
+            '-n',
+            '--service-template-filename',
+            default=DEFAULT_SERVICE_TEMPLATE_FILENAME,
+            help=helptexts.SERVICE_TEMPLATE_FILENAME)
+
+    @staticmethod
+    def verbose(expose_value=False):
+        return click.option(
+            '-v',
+            '--verbose',
+            count=True,
+            callback=set_verbosity_level,
+            expose_value=expose_value,
+            is_eager=True,
+            help=helptexts.VERBOSE)
+
+    @staticmethod
+    def force(help):
+        return click.option(
+            '-f',
+            '--force',
+            is_flag=True,
+            help=help)
+
+    @staticmethod
+    def task_max_attempts(default=1):
+        return click.option(
+            '--task-max-attempts',
+            type=int,
+            default=default,
+            help=helptexts.TASK_MAX_ATTEMPTS.format(default))
+
+    @staticmethod
+    def sort_by(default='created_at'):
+        return click.option(
+            '--sort-by',
+            required=False,
+            default=default,
+            help=helptexts.SORT_BY)
+
+    @staticmethod
+    def task_retry_interval(default=1):
+        return click.option(
+            '--task-retry-interval',
+            type=int,
+            default=default,
+            help=helptexts.TASK_RETRY_INTERVAL.format(default))
+
+    @staticmethod
+    def service_id(required=False):
+        return click.option(
+            '-s',
+            '--service-id',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+    @staticmethod
+    def execution_id(required=False):
+        return click.option(
+            '-e',
+            '--execution-id',
+            required=required,
+            help=helptexts.EXECUTION_ID)
+
+    @staticmethod
+    def service_template_id(required=False):
+        return click.option(
+            '-t',
+            '--service-template-id',
+            required=required,
+            help=helptexts.SERVICE_TEMPLATE_ID)
+
+    @staticmethod
+    def service_template_path(required=False):
+        return click.option(
+            '-p',
+            '--service-template-path',
+            required=required,
+            type=click.Path(exists=True))
+
+    @staticmethod
+    def service_name(required=False):
+        return click.option(
+            '-s',
+            '--service-name',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+    @staticmethod
+    def service_template_name(required=False):
+        return click.option(
+            '-t',
+            '--service-template-name',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+
+options = Options()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/cli/helptexts.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/helptexts.py b/aria/cli/cli/helptexts.py
new file mode 100644
index 0000000..c50a172
--- /dev/null
+++ b/aria/cli/cli/helptexts.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+VERBOSE = \
+    "Show verbose output. You can supply this up to three times (i.e. -vvv)"
+VERSION = "Display the version and exit"
+
+INPUTS_PARAMS_USAGE = (
+    '(Can be provided as wildcard based paths '
+    '(*.yaml, /my_inputs/, etc..) to YAML files, a JSON string or as '
+    'key1=value1;key2=value2). This argument can be used multiple times'
+)
+
+SERVICE_TEMPLATE_PATH = "The path to the application's service template file"
+SERVICE_TEMPLATE_ID = "The unique identifier for the service template"
+
+RESET_CONTEXT = "Reset the working environment"
+HARD_RESET = "Hard reset the configuration, including coloring and loggers"
+ENABLE_COLORS = "Enable colors in logger (use --hard when working with" \
+                " an initialized environment) [default: False]"
+
+DRY_EXECUTION = "Execute a workflow dry run (prints operations information without causing side " \
+                "effects)"
+SERVICE_TEMPLATE_FILENAME = (
+    "The name of the archive's main service template file. "
+    "This is only relevant if uploading an archive")
+INPUTS = "Inputs for the service {0}".format(INPUTS_PARAMS_USAGE)
+PARAMETERS = "Parameters for the workflow {0}".format(INPUTS_PARAMS_USAGE)
+TASK_RETRY_INTERVAL = \
+    "How long of a minimal interval should occur between task retry attempts [default: {0}]"
+TASK_MAX_ATTEMPTS = \
+    "How many times should a task be attempted in case of failures [default: {0}]"
+
+JSON_OUTPUT = "Output events in a consumable JSON format"
+
+SERVICE_ID = "The unique identifier for the service"
+EXECUTION_ID = "The unique identifier for the execution"
+IGNORE_RUNNING_NODES = "Delete the service even if it has running nodes"
+
+NODE_NAME = "The node's name"
+
+DEFAULT_MUTUALITY_MESSAGE = 'Cannot be used simultaneously'
+
+SORT_BY = "Key for sorting the list"
+DESCENDING = "Sort list in descending order [default: False]"

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands.py b/aria/cli/commands.py
deleted file mode 100644
index ee329e7..0000000
--- a/aria/cli/commands.py
+++ /dev/null
@@ -1,546 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI various commands implementation
-"""
-
-import json
-import os
-import sys
-import csv
-import shutil
-import tempfile
-from glob import glob
-from importlib import import_module
-
-from ruamel import yaml # @UnresolvedImport
-
-from .. import extension
-from ..logger import LoggerMixin
-from ..parser import iter_specifications
-from ..parser.consumption import (
-    ConsumptionContext,
-    ConsumerChain,
-    Read,
-    Validate,
-    ServiceTemplate,
-    Types,
-    Inputs,
-    ServiceInstance
-)
-from ..parser.loading import LiteralLocation, UriLocation
-from ..utils.application import StorageManager
-from ..utils.caching import cachedmethod
-from ..utils.console import (puts, Colored, indent)
-from ..utils.imports import (import_fullname, import_modules)
-from ..utils.collections import OrderedDict
-from ..orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
-from ..orchestrator.runner import Runner
-from ..orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
-from .dry import convert_to_dry
-
-from .exceptions import (
-    AriaCliFormatInputsError,
-    AriaCliYAMLInputsError,
-    AriaCliInvalidInputsError
-)
-from . import csar
-
-
-class BaseCommand(LoggerMixin):
-    """
-    Base class for CLI commands.
-    """
-
-    def __repr__(self):
-        return 'AriaCli({cls.__name__})'.format(cls=self.__class__)
-
-    def __call__(self, args_namespace, unknown_args):
-        """
-        __call__ method is called when running command
-        :param args_namespace:
-        """
-        pass
-
-    def parse_inputs(self, inputs):
-        """
-        Returns a dictionary of inputs `resources` can be:
-        - A list of files.
-        - A single file
-        - A directory containing multiple input files
-        - A key1=value1;key2=value2 pairs string.
-        - Wildcard based string (e.g. *-inputs.yaml)
-        """
-
-        parsed_dict = {}
-
-        def _format_to_dict(input_string):
-            self.logger.info('Processing inputs source: {0}'.format(input_string))
-            try:
-                input_string = input_string.strip()
-                try:
-                    parsed_dict.update(json.loads(input_string))
-                except BaseException:
-                    parsed_dict.update((i.split('=')
-                                        for i in input_string.split(';')
-                                        if i))
-            except Exception as exc:
-                raise AriaCliFormatInputsError(str(exc), inputs=input_string)
-
-        def _handle_inputs_source(input_path):
-            self.logger.info('Processing inputs source: {0}'.format(input_path))
-            try:
-                with open(input_path) as input_file:
-                    content = yaml.safe_load(input_file)
-            except yaml.YAMLError as exc:
-                raise AriaCliYAMLInputsError(
-                    '"{0}" is not a valid YAML. {1}'.format(input_path, str(exc)))
-            if isinstance(content, dict):
-                parsed_dict.update(content)
-                return
-            if content is None:
-                return
-            raise AriaCliInvalidInputsError('Invalid inputs', inputs=input_path)
-
-        for input_string in inputs if isinstance(inputs, list) else [inputs]:
-            if os.path.isdir(input_string):
-                for input_file in os.listdir(input_string):
-                    _handle_inputs_source(os.path.join(input_string, input_file))
-                continue
-            input_files = glob(input_string)
-            if input_files:
-                for input_file in input_files:
-                    _handle_inputs_source(input_file)
-                continue
-            _format_to_dict(input_string)
-        return parsed_dict
-
-
-class ParseCommand(BaseCommand):
-    """
-    :code:`parse` command.
-    
-    Given a blueprint, emits information in human-readable, JSON, or YAML format from various phases
-    of the ARIA parser.
-    """
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(ParseCommand, self).__call__(args_namespace, unknown_args)
-
-        if args_namespace.prefix:
-            for prefix in args_namespace.prefix:
-                extension.parser.uri_loader_prefix().append(prefix)
-
-        cachedmethod.ENABLED = args_namespace.cached_methods
-
-        context = ParseCommand.create_context_from_namespace(args_namespace)
-        context.args = unknown_args
-
-        consumer = ConsumerChain(context, (Read, Validate))
-
-        consumer_class_name = args_namespace.consumer
-        dumper = None
-        if consumer_class_name == 'validate':
-            dumper = None
-        elif consumer_class_name == 'presentation':
-            dumper = consumer.consumers[0]
-        elif consumer_class_name == 'template':
-            consumer.append(ServiceTemplate)
-        elif consumer_class_name == 'types':
-            consumer.append(ServiceTemplate, Types)
-        elif consumer_class_name == 'instance':
-            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
-        else:
-            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
-            consumer.append(import_fullname(consumer_class_name))
-
-        if dumper is None:
-            # Default to last consumer
-            dumper = consumer.consumers[-1]
-
-        consumer.consume()
-
-        if not context.validation.dump_issues():
-            dumper.dump()
-            exit(1)
-
-    @staticmethod
-    def create_context_from_namespace(namespace, **kwargs):
-        args = vars(namespace).copy()
-        args.update(kwargs)
-        return ParseCommand.create_context(**args)
-
-    @staticmethod
-    def create_context(uri,
-                       loader_source,
-                       reader_source,
-                       presenter_source,
-                       presenter,
-                       debug,
-                       **kwargs):
-        context = ConsumptionContext()
-        context.loading.loader_source = import_fullname(loader_source)()
-        context.reading.reader_source = import_fullname(reader_source)()
-        context.presentation.location = UriLocation(uri) if isinstance(uri, basestring) else uri
-        context.presentation.presenter_source = import_fullname(presenter_source)()
-        context.presentation.presenter_class = import_fullname(presenter)
-        context.presentation.print_exceptions = debug
-        return context
-
-
-class WorkflowCommand(BaseCommand):
-    """
-    :code:`workflow` command.
-    """
-
-    WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(WorkflowCommand, self).__call__(args_namespace, unknown_args)
-
-        context = self._parse(args_namespace.uri)
-        workflow_fn, inputs = self._get_workflow(context, args_namespace.workflow)
-        self._dry = args_namespace.dry
-        self._run(context, args_namespace.workflow, workflow_fn, inputs)
-    
-    def _parse(self, uri):
-        # Parse
-        context = ConsumptionContext()
-        context.presentation.location = UriLocation(uri)
-        consumer = ConsumerChain(context, (Read, Validate, ServiceTemplate, Inputs,
-                                           ServiceInstance))
-        consumer.consume()
-
-        if context.validation.dump_issues():
-            exit(1)
-        
-        return context
-    
-    def _get_workflow(self, context, workflow_name):
-        if workflow_name in BUILTIN_WORKFLOWS:
-            workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.{0}'.format(
-                workflow_name))
-            inputs = {}
-        else:
-            workflow = context.modeling.instance.policies.get(workflow_name)
-            if workflow is None:
-                raise AttributeError('workflow policy does not exist: "{0}"'.format(workflow_name))
-            if workflow.type.role != 'workflow':
-                raise AttributeError('policy is not a workflow: "{0}"'.format(workflow_name))
-
-            sys.path.append(os.path.dirname(str(context.presentation.location)))
-    
-            workflow_fn = import_fullname(workflow.properties['implementation'].value)
-    
-            for k in workflow.properties:
-                if k in WORKFLOW_DECORATOR_RESERVED_ARGUMENTS:
-                    raise AttributeError('workflow policy "{0}" defines a reserved property: "{1}"'
-                                         .format(workflow_name, k))
-    
-            inputs = OrderedDict([
-                (k, v.value) for k, v in workflow.properties.iteritems()
-                if k not in WorkflowCommand.WORKFLOW_POLICY_INTERNAL_PROPERTIES
-            ])
-        
-        return workflow_fn, inputs
-    
-    def _run(self, context, workflow_name, workflow_fn, inputs):
-        # Storage
-        def _initialize_storage(model_storage):
-            if self._dry:
-                convert_to_dry(context.modeling.instance)
-            context.modeling.store(model_storage)
-
-        # Create runner
-        runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
-                        lambda: context.modeling.instance.id)
-        
-        # Run
-        runner.run()
-   
-
-class InitCommand(BaseCommand):
-    """
-    :code:`init` command.
-    
-    Broken. Currently maintained for reference.
-    """
-
-    _IN_VIRTUAL_ENV = hasattr(sys, 'real_prefix')
-
-    def __call__(self, args_namespace, unknown_args):
-        super(InitCommand, self).__call__(args_namespace, unknown_args)
-        self._workspace_setup()
-        inputs = self.parse_inputs(args_namespace.input) if args_namespace.input else None
-        plan, deployment_plan = self._parse_blueprint(args_namespace.blueprint_path, inputs)
-        self._create_storage(
-            blueprint_plan=plan,
-            blueprint_path=args_namespace.blueprint_path,
-            deployment_plan=deployment_plan,
-            blueprint_id=args_namespace.blueprint_id,
-            deployment_id=args_namespace.deployment_id,
-            main_file_name=os.path.basename(args_namespace.blueprint_path))
-        self.logger.info('Initiated {0}'.format(args_namespace.blueprint_path))
-        self.logger.info(
-            'If you make changes to the blueprint, '
-            'run `aria local init -p {0}` command again to apply them'.format(
-                args_namespace.blueprint_path))
-
-    def _workspace_setup(self):
-        try:
-            create_user_space()
-            self.logger.debug(
-                'created user space path in: {0}'.format(user_space()))
-        except IOError:
-            self.logger.debug(
-                'user space path already exist - {0}'.format(user_space()))
-        try:
-            create_local_storage()
-            self.logger.debug(
-                'created local storage path in: {0}'.format(local_storage()))
-        except IOError:
-            self.logger.debug(
-                'local storage path already exist - {0}'.format(local_storage()))
-        return local_storage()
-
-    def _parse_blueprint(self, blueprint_path, inputs=None):
-        # TODO
-        pass
-
-    @staticmethod
-    def _create_storage(
-            blueprint_path,
-            blueprint_plan,
-            deployment_plan,
-            blueprint_id,
-            deployment_id,
-            main_file_name=None):
-        resource_storage = application_resource_storage(
-            FileSystemResourceDriver(local_resource_storage()))
-        model_storage = application_model_storage(
-            FileSystemModelDriver(local_model_storage()))
-        resource_storage.setup()
-        model_storage.setup()
-        storage_manager = StorageManager(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            blueprint_path=blueprint_path,
-            blueprint_id=blueprint_id,
-            blueprint_plan=blueprint_plan,
-            deployment_id=deployment_id,
-            deployment_plan=deployment_plan
-        )
-        storage_manager.create_blueprint_storage(
-            blueprint_path,
-            main_file_name=main_file_name
-        )
-        storage_manager.create_nodes_storage()
-        storage_manager.create_deployment_storage()
-        storage_manager.create_node_instances_storage()
-
-
-class ExecuteCommand(BaseCommand):
-    """
-    :code:`execute` command.
-
-    Broken. Currently maintained for reference.
-    """
-
-    def __call__(self, args_namespace, unknown_args):
-        super(ExecuteCommand, self).__call__(args_namespace, unknown_args)
-        parameters = (self.parse_inputs(args_namespace.parameters)
-                      if args_namespace.parameters else {})
-        resource_storage = application_resource_storage(
-            FileSystemResourceDriver(local_resource_storage()))
-        model_storage = application_model_storage(
-            FileSystemModelDriver(local_model_storage()))
-        deployment = model_storage.service_instance.get(args_namespace.deployment_id)
-
-        try:
-            workflow = deployment.workflows[args_namespace.workflow_id]
-        except KeyError:
-            raise ValueError(
-                '{0} workflow does not exist. existing workflows are: {1}'.format(
-                    args_namespace.workflow_id,
-                    deployment.workflows.keys()))
-
-        workflow_parameters = self._merge_and_validate_execution_parameters(
-            workflow,
-            args_namespace.workflow_id,
-            parameters
-        )
-        workflow_context = WorkflowContext(
-            name=args_namespace.workflow_id,
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            deployment_id=args_namespace.deployment_id,
-            workflow_id=args_namespace.workflow_id,
-            parameters=workflow_parameters,
-        )
-        workflow_function = self._load_workflow_handler(workflow['operation'])
-        tasks_graph = workflow_function(workflow_context, **workflow_context.parameters)
-        executor = ProcessExecutor()
-        workflow_engine = Engine(executor=executor,
-                                 workflow_context=workflow_context,
-                                 tasks_graph=tasks_graph)
-        workflow_engine.execute()
-        executor.close()
-
-    @staticmethod
-    def _merge_and_validate_execution_parameters(
-            workflow,
-            workflow_name,
-            execution_parameters):
-        merged_parameters = {}
-        workflow_parameters = workflow.get('parameters', {})
-        missing_mandatory_parameters = set()
-
-        for name, param in workflow_parameters.iteritems():
-            if 'default' not in param:
-                if name not in execution_parameters:
-                    missing_mandatory_parameters.add(name)
-                    continue
-                merged_parameters[name] = execution_parameters[name]
-                continue
-            merged_parameters[name] = (execution_parameters[name] if name in execution_parameters
-                                       else param['default'])
-
-        if missing_mandatory_parameters:
-            raise ValueError(
-                'Workflow "{0}" must be provided with the following '
-                'parameters to execute: {1}'.format(
-                    workflow_name, ','.join(missing_mandatory_parameters)))
-
-        custom_parameters = dict(
-            (k, v) for (k, v) in execution_parameters.iteritems()
-            if k not in workflow_parameters)
-
-        if custom_parameters:
-            raise ValueError(
-                'Workflow "{0}" does not have the following parameters declared: {1}. '
-                'Remove these parameters'.format(
-                    workflow_name, ','.join(custom_parameters.keys())))
-
-        return merged_parameters
-
-    @staticmethod
-    def _load_workflow_handler(handler_path):
-        module_name, spec_handler_name = handler_path.rsplit('.', 1)
-        try:
-            module = import_module(module_name)
-            return getattr(module, spec_handler_name)
-        except ImportError:
-            # TODO: exception handler
-            raise
-        except AttributeError:
-            # TODO: exception handler
-            raise
-
-
-class BaseCSARCommand(BaseCommand):
-    @staticmethod
-    def _parse_and_dump(reader):
-        context = ConsumptionContext()
-        context.loading.prefixes += [os.path.join(reader.destination, 'definitions')]
-        context.presentation.location = LiteralLocation(reader.entry_definitions_yaml)
-        chain = ConsumerChain(context, (Read, Validate, Model, Instance))
-        chain.consume()
-        if context.validation.dump_issues():
-            raise RuntimeError('Validation failed')
-        dumper = chain.consumers[-1]
-        dumper.dump()
-
-    def _read(self, source, destination):
-        reader = csar.read(
-            source=source,
-            destination=destination,
-            logger=self.logger)
-        self.logger.info(
-            'Path: {r.destination}\n'
-            'TOSCA meta file version: {r.meta_file_version}\n'
-            'CSAR Version: {r.csar_version}\n'
-            'Created By: {r.created_by}\n'
-            'Entry definitions: {r.entry_definitions}'
-            .format(r=reader))
-        self._parse_and_dump(reader)
-
-    def _validate(self, source):
-        workdir = tempfile.mkdtemp()
-        try:
-            self._read(
-                source=source,
-                destination=workdir)
-        finally:
-            shutil.rmtree(workdir, ignore_errors=True)
-
-
-class CSARCreateCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSARCreateCommand, self).__call__(args_namespace, unknown_args)
-        csar.write(
-            source=args_namespace.source,
-            entry=args_namespace.entry,
-            destination=args_namespace.destination,
-            logger=self.logger)
-        self._validate(args_namespace.destination)
-
-
-class CSAROpenCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSAROpenCommand, self).__call__(args_namespace, unknown_args)
-        self._read(
-            source=args_namespace.source,
-            destination=args_namespace.destination)
-
-
-class CSARValidateCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSARValidateCommand, self).__call__(args_namespace, unknown_args)
-        self._validate(args_namespace.source)
-
-
-class SpecCommand(BaseCommand):
-    """
-    :code:`spec` command.
-    
-    Emits all uses of :code:`@dsl_specification` in the codebase, in human-readable or CSV format.
-    """
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(SpecCommand, self).__call__(args_namespace, unknown_args)
-
-        # Make sure that all @dsl_specification decorators are processed
-        for pkg in extension.parser.specification_package():
-            import_modules(pkg)
-
-        # TODO: scan YAML documents as well
-
-        if args_namespace.csv:
-            writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
-            writer.writerow(('Specification', 'Section', 'Code', 'URL'))
-            for spec, sections in iter_specifications():
-                for section, details in sections:
-                    writer.writerow((spec, section, details['code'], details['url']))
-
-        else:
-            for spec, sections in iter_specifications():
-                puts(Colored.cyan(spec))
-                with indent(2):
-                    for section, details in sections:
-                        puts(Colored.blue(section))
-                        with indent(2):
-                            for k, v in details.iteritems():
-                                puts('%s: %s' % (Colored.magenta(k), v))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/__init__.py b/aria/cli/commands/__init__.py
new file mode 100644
index 0000000..7777791
--- /dev/null
+++ b/aria/cli/commands/__init__.py
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import (
+    executions,
+    logs,
+    node_templates,
+    nodes,
+    plugins,
+    service_templates,
+    services,
+    workflows
+)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
new file mode 100644
index 0000000..730fd29
--- /dev/null
+++ b/aria/cli/commands/executions.py
@@ -0,0 +1,172 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from .. import utils
+from ..table import print_data
+from ..cli import aria
+from ...modeling.models import Execution
+from ...orchestrator.workflow_runner import WorkflowRunner
+from ...orchestrator.workflows.executor.dry import DryExecutor
+from ...utils import formatting
+from ...utils import threading
+
+EXECUTION_COLUMNS = ['id', 'workflow_name', 'status', 'service_name',
+                     'created_at', 'error']
+
+
+@aria.group(name='executions')
+@aria.options.verbose()
+def executions():
+    """Handle workflow executions
+    """
+    pass
+
+
+@executions.command(name='show',
+                    short_help='Show execution information')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(execution_id, model_storage, logger):
+    """Show information for a specific execution
+
+    `EXECUTION_ID` is the execution to get information on.
+    """
+    logger.info('Showing execution {0}'.format(execution_id))
+    execution = model_storage.execution.get(execution_id)
+
+    print_data(EXECUTION_COLUMNS, execution.to_dict(), 'Execution:', max_width=50)
+
+    # print execution parameters
+    logger.info('Execution Inputs:')
+    if execution.inputs:
+        #TODO check this section, havent tested it
+        execution_inputs = [ei.to_dict() for ei in execution.inputs]
+        for input_name, input_value in formatting.decode_dict(
+                execution_inputs).iteritems():
+            logger.info('\t{0}: \t{1}'.format(input_name, input_value))
+    else:
+        logger.info('\tNo inputs')
+    logger.info('')
+
+
+@executions.command(name='list',
+                    short_help='List service executions')
+@aria.options.service_name(required=False)
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List executions
+
+    If `SERVICE_NAME` is provided, list executions for that service.
+    Otherwise, list executions for all services.
+    """
+    if service_name:
+        logger.info('Listing executions for service {0}...'.format(
+            service_name))
+        service = model_storage.service.get_by_name(service_name)
+        filters = dict(service=service)
+    else:
+        logger.info('Listing all executions...')
+        filters = {}
+
+    executions_list = [e.to_dict() for e in model_storage.execution.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(EXECUTION_COLUMNS, executions_list, 'Executions:')
+
+
+@executions.command(name='start',
+                    short_help='Execute a workflow')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.inputs
+@aria.options.dry_execution
+@aria.options.task_max_attempts()
+@aria.options.task_retry_interval()
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def start(workflow_name,
+          service_name,
+          inputs,
+          dry,
+          task_max_attempts,
+          task_retry_interval,
+          model_storage,
+          resource_storage,
+          plugin_manager,
+          logger):
+    """Execute a workflow
+
+    `WORKFLOW_NAME` is the name of the workflow to execute (e.g. `uninstall`)
+    """
+    service = model_storage.service.get_by_name(service_name)
+    executor = DryExecutor() if dry else None  # use WorkflowRunner's default executor
+
+    workflow_runner = \
+        WorkflowRunner(workflow_name, service.id, inputs,
+                       model_storage, resource_storage, plugin_manager,
+                       executor, task_max_attempts, task_retry_interval)
+
+    execution_thread_name = '{0}_{1}'.format(service_name, workflow_name)
+    execution_thread = threading.ExceptionThread(target=workflow_runner.execute,
+                                                 name=execution_thread_name)
+    execution_thread.daemon = True  # allows force-cancel to exit immediately
+
+    logger.info('Starting {0}execution. Press Ctrl+C cancel'.format('dry ' if dry else ''))
+    execution_thread.start()
+    try:
+        while execution_thread.is_alive():
+            # using join without a timeout blocks and ignores KeyboardInterrupt
+            execution_thread.join(1)
+    except KeyboardInterrupt:
+        _cancel_execution(workflow_runner, execution_thread, logger)
+
+    # raise any errors from the execution thread (note these are not workflow execution errors)
+    execution_thread.raise_error_if_exists()
+
+    execution = workflow_runner.execution
+    logger.info('Execution has ended with "{0}" status'.format(execution.status))
+    if execution.status == Execution.FAILED and execution.error:
+        logger.info('Execution error:{0}{1}'.format(os.linesep, execution.error))
+
+    if dry:
+        # remove traces of the dry execution (including tasks, logs, inputs..)
+        model_storage.execution.delete(execution)
+
+
+def _cancel_execution(workflow_runner, execution_thread, logger):
+    logger.info('Cancelling execution. Press Ctrl+C again to force-cancel')
+    try:
+        workflow_runner.cancel()
+        while execution_thread.is_alive():
+            execution_thread.join(1)
+    except KeyboardInterrupt:
+        logger.info('Force-cancelling execution')
+        # TODO handle execution (update status etc.) and exit process

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/logs.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py
new file mode 100644
index 0000000..f8873cd
--- /dev/null
+++ b/aria/cli/commands/logs.py
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+
+
+@aria.group(name='logs')
+@aria.options.verbose()
+def logs():
+    """Show logs from workflow executions
+    """
+    pass
+
+
+@logs.command(name='list',
+              short_help='List execution logs')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(execution_id,
+         model_storage,
+         logger):
+    """Display logs for an execution
+    """
+    logger.info('Listing logs for execution id {0}'.format(execution_id))
+    logs_list = model_storage.log.list(filters=dict(execution_fk=execution_id),
+                                       sort=utils.storage_sort_param('created_at', False))
+    # TODO: print logs nicely
+    if logs_list:
+        for log in logs_list:
+            print log
+    else:
+        logger.info('\tNo logs')
+
+
+@logs.command(name='delete',
+              short_help='Delete execution logs')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(execution_id, model_storage, logger):
+    """Delete logs of an execution
+
+    `EXECUTION_ID` is the execution logs to delete.
+    """
+    logger.info('Deleting logs for execution id {0}'.format(execution_id))
+    logs_list = model_storage.log.list(filters=dict(execution_fk=execution_id))
+    for log in logs_list:
+        model_storage.log.delete(log)
+    logger.info('Deleted logs for execution id {0}'.format(execution_id))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
new file mode 100644
index 0000000..cf50ceb
--- /dev/null
+++ b/aria/cli/commands/node_templates.py
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from .. import utils
+from ..cli import aria
+
+
+NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
+
+
+@aria.group(name='node-templates')
+@aria.options.verbose()
+def node_templates():
+    """Handle a service template's node templates
+    """
+    pass
+
+
+@node_templates.command(name='show',
+                        short_help='Show node information')
+@aria.argument('node-template-id')
+# @aria.options.service_template_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_template_id, model_storage, logger):
+    """Show information for a specific node of a specific service template
+
+    `NODE_TEMPLATE_ID` is the node id to get information on.
+    """
+    # logger.info('Showing node template {0} for service template {1}'.format(
+    #     node_template_id, service_template_name))
+    logger.info('Showing node template {0}'.format(node_template_id))
+    #TODO get node template of a specific service template instead?
+    node_template = model_storage.node_template.get(node_template_id)
+
+    print_data(NODE_TEMPLATE_COLUMNS, node_template.to_dict(), 'Node template:', max_width=50)
+
+    # print node template properties
+    logger.info('Node template properties:')
+    if node_template.properties:
+        logger.info(utils.get_parameter_templates_as_string(node_template.properties))
+    else:
+        logger.info('\tNo properties')
+
+    # print node IDs
+    nodes = node_template.nodes
+    logger.info('Nodes:')
+    if nodes:
+        for node in nodes:
+            logger.info('\t{0}'.format(node.name))
+    else:
+        logger.info('\tNo nodes')
+
+
+@node_templates.command(name='list',
+                        short_help='List node templates for a service template')
+@aria.options.service_template_name()
+@aria.options.sort_by('service_template_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_name, sort_by, descending, model_storage, logger):
+    """List node templates
+
+    If `SERVICE_TEMPLATE_NAME` is provided, list nodes for that service template.
+    Otherwise, list node templates for all service templates.
+    """
+    if service_template_name:
+        logger.info('Listing node templates for service template {0}...'.format(
+            service_template_name))
+        service_template = model_storage.service_template.get_by_name(service_template_name)
+        filters = dict(service_template=service_template)
+    else:
+        logger.info('Listing all node templates...')
+        filters = {}
+
+    node_templates_list = [nt.to_dict() for nt in model_storage.node_template.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(NODE_TEMPLATE_COLUMNS, node_templates_list, 'Node templates:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
new file mode 100644
index 0000000..fd65e24
--- /dev/null
+++ b/aria/cli/commands/nodes.py
@@ -0,0 +1,88 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+from ..table import print_data
+
+
+NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
+
+
+@aria.group(name='nodes')
+@aria.options.verbose()
+def nodes():
+    """Handle a service's nodes
+    """
+    pass
+
+
+@nodes.command(name='show',
+               short_help='Show node information')
+@aria.argument('node_id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_id, model_storage, logger):
+    """Showing information for a specific node
+
+    `NODE_ID` is the id of the node to get information on.
+    """
+    logger.info('Showing node {0}'.format(node_id))
+    node = model_storage.node.get(node_id)
+
+    print_data(NODE_COLUMNS, node.to_dict(), 'Node:', 50)
+
+    # print node attributes
+    logger.info('Node attributes:')
+    if node.runtime_properties:
+        for prop_name, prop_value in node.runtime_properties.iteritems():
+            logger.info('\t{0}: {1}'.format(prop_name, prop_value))
+    else:
+        logger.info('\tNo attributes')
+    logger.info('')
+
+
+@nodes.command(name='list',
+               short_help='List node for a service')
+@aria.options.service_name(required=False)
+@aria.options.sort_by('service_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List nodes
+
+    If `SERVICE_NAME` is provided, list nodes for that service.
+    Otherwise, list nodes for all services.
+    """
+    if service_name:
+        logger.info('Listing nodes for service {0}...'.format(service_name))
+        service = model_storage.service.get_by_name(service_name)
+        filters = dict(service=service)
+    else:
+        logger.info('Listing all nodes...')
+        filters = {}
+
+    nodes_list = [node.to_dict() for node in model_storage.node.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(NODE_COLUMNS, nodes_list, 'Nodes:')