You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by ra...@apache.org on 2017/04/04 10:21:55 UTC

[01/24] incubator-ariatosca git commit: ARIA-136-ctx-binary-doesnt-get-installed-via-pip-install [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-48-aria-cli 600a9b26c -> 31a0c794f (forced update)


ARIA-136-ctx-binary-doesnt-get-installed-via-pip-install


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/369323b6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/369323b6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/369323b6

Branch: refs/heads/ARIA-48-aria-cli
Commit: 369323b66cdd02ccfba6cc3e3c547ac4694fc2fd
Parents: 2d83475
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun Apr 2 19:24:20 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue Apr 4 11:48:08 2017 +0300

----------------------------------------------------------------------
 setup.py | 47 ++++++++++++++++++++++++++++++++++++-----------
 1 file changed, 36 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/369323b6/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index 7be5275..3d72ebc 100644
--- a/setup.py
+++ b/setup.py
@@ -19,6 +19,7 @@ import sys
 
 from setuptools import setup, find_packages
 from setuptools.command.install import install
+from setuptools.command.develop import develop
 
 _PACKAGE_NAME = 'aria'
 _PYTHON_SUPPORTED_VERSIONS = [(2, 6), (2, 7)]
@@ -63,20 +64,43 @@ except IOError:
 console_scripts = ['aria = aria.cli.cli:main']
 
 
-class InstallCommand(install):
-    user_options = install.user_options + [
+def _generate_user_options(command):
+    return command.user_options + [
         ('skip-ctx', None, 'Install with or without the ctx (Defaults to False)')
     ]
-    boolean_options = install.boolean_options + ['skip-ctx']
 
-    def initialize_options(self):
-        install.initialize_options(self)
-        self.skip_ctx = False
 
-    def run(self):
-        if self.skip_ctx is False:
-            console_scripts.append('ctx = aria.orchestrator.execution_plugin.ctx_proxy.client:main')
-        install.run(self)
+def _generate_boolean_options(command):
+    return command.boolean_options + ['skip-ctx']
+
+
+def _initialize_options(custom_cmd):
+    custom_cmd.command.initialize_options(custom_cmd)
+    custom_cmd.skip_ctx = False
+
+
+def _run(custom_cmd):
+    if custom_cmd.skip_ctx is False:
+        console_scripts.append('ctx = aria.orchestrator.execution_plugin.ctx_proxy.client:main')
+    custom_cmd.command.run(custom_cmd)
+
+
+class InstallCommand(install):
+    command = install
+
+    user_options = _generate_user_options(install)
+    boolean_options = _generate_boolean_options(install)
+    initialize_options = _initialize_options
+    run = _run
+
+
+class DevelopCommand(develop):
+    command = develop
+
+    user_options = _generate_user_options(develop)
+    boolean_options = _generate_boolean_options(develop)
+    initialize_options = _initialize_options
+    run = _run
 
 setup(
     name=_PACKAGE_NAME,
@@ -116,6 +140,7 @@ setup(
         'console_scripts': console_scripts
     },
     cmdclass={
-        'install': InstallCommand
+        'install': InstallCommand,      # used in pip install ...
+        'develop': DevelopCommand       # used in pip install -e ...
     }
 )


[19/24] incubator-ariatosca git commit: extracted unwrap_dict method for parameter objects

Posted by ra...@apache.org.
extracted unwrap_dict method for parameter objects


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/141dbb05
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/141dbb05
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/141dbb05

Branch: refs/heads/ARIA-48-aria-cli
Commit: 141dbb05f0403b55f860d9bbb46351b0c22c0045
Parents: c40f361
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon Apr 3 14:59:40 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/modeling/service_common.py                 | 9 +++++++++
 aria/orchestrator/workflow_runner.py            | 3 +--
 aria/orchestrator/workflows/executor/celery.py  | 4 +++-
 aria/orchestrator/workflows/executor/process.py | 3 ++-
 aria/orchestrator/workflows/executor/thread.py  | 4 +++-
 5 files changed, 18 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/141dbb05/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index 48c3170..17e0a17 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -88,6 +88,15 @@ class ParameterBase(TemplateModelMixin):
             console.puts(context.style.meta(self.description))
 
     @classmethod
+    def unwrap_dict(cls, parameters_dict):
+        """
+        Takes a parameters dict and simplifies it into key-value dict
+        :param parameters_dict: a parameter-name to parameter dict
+        :return: a parameter-name to parameter value dict
+        """
+        return dict((k, v.value) for k, v in parameters_dict.iteritems())
+
+    @classmethod
     def wrap(cls, name, value, description=None):
         """
         Wraps an arbitrary value as a parameter. The type will be guessed via introspection.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/141dbb05/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 8b6b431..78b17b8 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -68,8 +68,7 @@ class WorkflowRunner(object):
             task_retry_interval=task_retry_interval)
 
         # transforming the execution inputs to dict, to pass them to the workflow function
-        execution_inputs_dict = {input.name: input.value for input in
-                                 self.execution.inputs.values()}
+        execution_inputs_dict = models.Parameter.unwrap_dict(self.execution.inputs)
         self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
 
         self._engine = Engine(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/141dbb05/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index baa0375..3c98197 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -22,6 +22,8 @@ import Queue
 
 from aria.orchestrator.workflows.executor import BaseExecutor
 
+from ....modeling.models import Parameter
+
 
 class CeleryExecutor(BaseExecutor):
     """
@@ -44,7 +46,7 @@ class CeleryExecutor(BaseExecutor):
 
     def execute(self, task):
         self._tasks[task.id] = task
-        inputs = dict((k, v.value) for k, v in task.inputs.iteritems())
+        inputs = Parameter.unwrap_dict(task.inputs.iteritems())
         inputs['ctx'] = task.context
         self._results[task.id] = self._app.send_task(
             task.operation_mapping,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/141dbb05/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 6397e88..dc369ab 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -48,6 +48,7 @@ from aria.utils import exceptions
 from aria.orchestrator.workflows.executor import base
 from aria.storage import instrumentation
 from aria.modeling import types as modeling_types
+from aria.modeling.models import Parameter
 
 _IS_WIN = os.name == 'nt'
 
@@ -148,7 +149,7 @@ class ProcessExecutor(base.BaseExecutor):
         return {
             'task_id': task.id,
             'implementation': task.implementation,
-            'operation_inputs': dict((k, v.value) for k, v in task.inputs.iteritems()),
+            'operation_inputs': Parameter.unwrap_dict(task.inputs),
             'port': self._server_port,
             'context': task.context.serialization_dict,
         }

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/141dbb05/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 1a49af5..8b443cc 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -21,7 +21,9 @@ import Queue
 import threading
 
 from aria.utils import imports
+
 from .base import BaseExecutor
+from ....modeling.models import Parameter
 
 
 class ThreadExecutor(BaseExecutor):
@@ -58,7 +60,7 @@ class ThreadExecutor(BaseExecutor):
                 self._task_started(task)
                 try:
                     task_func = imports.load_attribute(task.implementation)
-                    inputs = dict((k, v.value) for k, v in task.inputs.iteritems())
+                    inputs = Parameter.unwrap_dict(task.inputs)
                     task_func(ctx=task.context, **inputs)
                     self._task_succeeded(task)
                 except BaseException as e:


[17/24] incubator-ariatosca git commit: moved service inputs population earlier in the process

Posted by ra...@apache.org.
moved service inputs population earlier in the process


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/349d4d05
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/349d4d05
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/349d4d05

Branch: refs/heads/ARIA-48-aria-cli
Commit: 349d4d05c3faf725da95e00404f346272d388393
Parents: 72c9969
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon Apr 3 01:01:00 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/core.py                      | 6 +-----
 aria/modeling/service_template.py | 8 +++++---
 2 files changed, 6 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/349d4d05/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index 5368f9c..6be6fd1 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -74,11 +74,7 @@ class Core(object):
         # creating an empty ConsumptionContext, initiating a threadlocal context
         ConsumptionContext()
         with self.model_storage._all_api_kwargs['session'].no_autoflush:
-            service = service_template.instantiate(None)
-
-        template_inputs = service_template.inputs
-        service.inputs = modeling_utils.create_inputs(inputs, template_inputs)
-        # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
+            service = service_template.instantiate(None, inputs)
 
         service.name = service_name or '{0}_{1}'.format(service_template_name, service.id)
         self.model_storage.service.put(service)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/349d4d05/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 86cf81a..1c6d393 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -279,7 +279,7 @@ class ServiceTemplateBase(TemplateModelMixin):
             ('interface_types', formatting.as_raw(self.interface_types)),
             ('artifact_types', formatting.as_raw(self.artifact_types))))
 
-    def instantiate(self, container):
+    def instantiate(self, container, inputs=None):
         from . import models
         context = ConsumptionContext.get_thread_local()
         now = datetime.now()
@@ -287,9 +287,12 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  updated_at=now,
                                  description=deepcopy_with_locators(self.description),
                                  service_template=self)
-
         context.modeling.instance = service
 
+        # utils.instantiate_dict(self, service.inputs, self.inputs)
+        service.inputs = utils.create_inputs(inputs or {}, self.inputs)
+        # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
+
         utils.instantiate_dict(self, service.meta_data, self.meta_data)
 
         for node_template in self.node_templates.itervalues():
@@ -305,7 +308,6 @@ class ServiceTemplateBase(TemplateModelMixin):
         if self.substitution_template is not None:
             service.substitution = self.substitution_template.instantiate(container)
 
-        utils.instantiate_dict(self, service.inputs, self.inputs)
         utils.instantiate_dict(self, service.outputs, self.outputs)
 
         return service


[20/24] incubator-ariatosca git commit: Fix issue where a service name was passed instead of a service id

Posted by ra...@apache.org.
Fix issue where a service name was passed instead of a service id


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/a26f245e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/a26f245e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/a26f245e

Branch: refs/heads/ARIA-48-aria-cli
Commit: a26f245e83ac9e00d6ea5d9dd1248cf461bd3888
Parents: 375228b
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Mon Apr 3 17:09:25 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/services.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/a26f245e/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index e73c43b..9f9d104 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -97,7 +97,8 @@ def create(service_template_name,
 
     try:
         core = Core(model_storage, resource_storage, plugin_manager)
-        service = core.create_service(service_template_name, inputs, service_name)
+        service_template = model_storage.service_template.get_by_name(service_template_name)
+        service = core.create_service(service_template.id, inputs, service_name)
     except storage_exceptions.StorageError as e:
         handle_storage_exception(e, 'service', service_name)
     except AriaException as e:


[02/24] incubator-ariatosca git commit: ARIA-132-fixed-support-for-cascading-deletion

Posted by ra...@apache.org.
ARIA-132-fixed-support-for-cascading-deletion


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/e7ffc735
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/e7ffc735
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/e7ffc735

Branch: refs/heads/ARIA-48-aria-cli
Commit: e7ffc7353dd2f23fdffde22bdb2ffac20542d601
Parents: 369323b
Author: max-orlov <ma...@gigaspaces.com>
Authored: Tue Apr 4 12:22:12 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue Apr 4 12:22:12 2017 +0300

----------------------------------------------------------------------
 aria/modeling/relationship.py       |  3 ++-
 tests/storage/test_model_storage.py | 13 +++++++++++++
 2 files changed, 15 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e7ffc735/aria/modeling/relationship.py
----------------------------------------------------------------------
diff --git a/aria/modeling/relationship.py b/aria/modeling/relationship.py
index ac1de28..e6830b8 100644
--- a/aria/modeling/relationship.py
+++ b/aria/modeling/relationship.py
@@ -193,7 +193,8 @@ def one_to_many(model_class,
         child_table,
         back_populates=back_populates,
         other_fk=child_fk,
-        dict_key=dict_key)
+        dict_key=dict_key,
+        relationship_kwargs=dict(cascade='all'))
 
 
 def many_to_one(model_class,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/e7ffc735/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index fa57e6f..e4f3eba 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -108,6 +108,19 @@ def test_application_storage_factory():
     tests_storage.release_sqlite_storage(storage)
 
 
+def test_cascade_deletion(context):
+    service = context.model.service.list()[0]
+
+    assert len(context.model.service_template.list()) == 1
+    assert len(service.nodes) == len(context.model.node.list()) == 2
+
+    context.model.service.delete(service)
+
+    assert len(context.model.service_template.list()) == 1
+    assert len(context.model.service.list()) == 0
+    assert len(context.model.node.list()) == 0
+
+
 @pytest.fixture
 def context(tmpdir):
     result = mock.context.simple(str(tmpdir))


[16/24] incubator-ariatosca git commit: Fix issue in creating services with existing name

Posted by ra...@apache.org.
Fix issue in creating services with existing name

Up until now, creating a service with a name that already existed
resulted in a null service stored in the storage. I noticed it while
thinking about tests to the `services list` command.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/72c9969c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/72c9969c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/72c9969c

Branch: refs/heads/ARIA-48-aria-cli
Commit: 72c9969c2f4124f59a3f5b787393b1f8bece248d
Parents: bf0adb5
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Sun Apr 2 23:27:55 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/core.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72c9969c/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index f2dfb09..5368f9c 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -80,10 +80,8 @@ class Core(object):
         service.inputs = modeling_utils.create_inputs(inputs, template_inputs)
         # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
 
-        # first put the service model so it could have an id, as fallback for setting its name
-        self.model_storage.service.put(service)
         service.name = service_name or '{0}_{1}'.format(service_template_name, service.id)
-        self.model_storage.service.update(service)
+        self.model_storage.service.put(service)
         return service
 
     def delete_service(self, service_name, force=False):


[18/24] incubator-ariatosca git commit: fix handling storage errors

Posted by ra...@apache.org.
fix handling storage errors

We tried to access a wrong attribute of the exception.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/84b811c3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/84b811c3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/84b811c3

Branch: refs/heads/ARIA-48-aria-cli
Commit: 84b811c38843b808277aabb16a76f5809b81a6f8
Parents: a26f245
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Mon Apr 3 17:10:17 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/utils.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/84b811c3/aria/cli/utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/utils.py b/aria/cli/utils.py
index fff8e3a..950c295 100644
--- a/aria/cli/utils.py
+++ b/aria/cli/utils.py
@@ -153,7 +153,7 @@ def generate_progress_handler(file_path, action='', max_bar_length=80):
 
 
 def handle_storage_exception(e, model_class, name):
-    if 'UNIQUE constraint failed' in e.msg:
+    if 'UNIQUE constraint failed' in e.message:
         msg = 'Could not store {model_class} `{name}`\n' \
               'There already a exists a {model_class} with the same name' \
               .format(model_class=model_class, name=name)


[24/24] incubator-ariatosca git commit: few deletion-commands related fixes

Posted by ra...@apache.org.
few deletion-commands related fixes


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/108d8a69
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/108d8a69
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/108d8a69

Branch: refs/heads/ARIA-48-aria-cli
Commit: 108d8a69bb97f44da906cd782fb262929cc22b0f
Parents: 84b811c
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon Apr 3 23:28:11 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/service_templates.py | 13 +++++++------
 aria/cli/commands/services.py          |  3 ++-
 aria/core.py                           |  4 ++--
 3 files changed, 11 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/108d8a69/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index 6c6224b..a324131 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -126,23 +126,24 @@ def store(service_template_path, service_template_name, model_storage, resource_
 
 @service_templates.command(name='delete',
                            short_help='Delete a service template')
-@aria.argument('service-template-id')
+@aria.argument('service-template-name')
 @aria.options.verbose()
 @aria.pass_model_storage
 @aria.pass_resource_storage
 @aria.pass_plugin_manager
 @aria.pass_logger
-def delete(service_template_id, model_storage, resource_storage, plugin_manager, logger):
+def delete(service_template_name, model_storage, resource_storage, plugin_manager, logger):
     """Delete a service template
-    `SERVICE_TEMPLATE_ID` is the id of the service template to delete.
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to delete.
     """
-    logger.info('Deleting service template {0}...'.format(service_template_id))
+    logger.info('Deleting service template {0}...'.format(service_template_name))
+    service_template = model_storage.service_template.get_by_name(service_template_name)
     core = Core(model_storage, resource_storage, plugin_manager)
     try:
-        core.delete_service_template(service_template_id)
+        core.delete_service_template(service_template.id)
     except storage_exceptions.NotFoundError:
         raise AriaCliError()
-    logger.info('Service template {0} deleted'.format(service_template_id))
+    logger.info('Service template {0} deleted'.format(service_template_name))
 
 
 @service_templates.command(name='inputs',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/108d8a69/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 9f9d104..e09db21 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -123,8 +123,9 @@ def delete(service_name, force, model_storage, resource_storage, plugin_manager,
     `SERVICE_NAME` is the name of the service to delete.
     """
     logger.info('Deleting service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
     core = Core(model_storage, resource_storage, plugin_manager)
-    core.delete_service(service_name, force=force)
+    core.delete_service(service.id, force=force)
     logger.info('Service {0} deleted'.format(service_name))
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/108d8a69/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index a589a8e..1fd3cf7 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -88,11 +88,11 @@ class Core(object):
                 "Active execution id: {1}".format(service.name, active_executions[0].id))
 
         if not force:
-            available_nodes = [n for n in service.nodes.values() if n.is_available()]
+            available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()]
             if available_nodes:
                 raise exceptions.DependentAvailableNodesError(
                     "Can't delete service {0} - there are available nodes for this service. "
-                    "Available node ids: {1}".format(service.name, available_nodes))
+                    "Available node ids: {1}".format(service.name, ', '.join(available_nodes)))
 
         self.model_storage.service.delete(service)
 


[21/24] incubator-ariatosca git commit: core.py now works with ids only again; removed unnecessary NotFoundError exception clauses from cli

Posted by ra...@apache.org.
core.py now works with ids only again; removed unnecessary NotFoundError exception clauses from cli


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/38d58d4d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/38d58d4d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/38d58d4d

Branch: refs/heads/ARIA-48-aria-cli
Commit: 38d58d4d28cd7be21c152de767ccc567622fb263
Parents: 141dbb0
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon Apr 3 15:10:51 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/executions.py        | 16 ++++------------
 aria/cli/commands/logs.py              | 16 +++-------------
 aria/cli/commands/node_templates.py    | 16 ++++------------
 aria/cli/commands/nodes.py             | 14 +++-----------
 aria/cli/commands/service_templates.py |  9 ++++++---
 aria/cli/commands/services.py          |  9 ++-------
 aria/cli/commands/workflows.py         | 21 ++++++++-------------
 aria/core.py                           | 16 +++++++---------
 8 files changed, 37 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
index fd47eb3..6d8b949 100644
--- a/aria/cli/commands/executions.py
+++ b/aria/cli/commands/executions.py
@@ -16,9 +16,7 @@
 from .. import utils
 from ..table import print_data
 from ..cli import aria
-from ..exceptions import AriaCliError
 from ...modeling.models import Execution
-from ...storage import exceptions as storage_exceptions
 from ...orchestrator.workflow_runner import WorkflowRunner
 from ...utils import formatting
 from ...utils import threading
@@ -46,11 +44,8 @@ def show(execution_id, model_storage, logger):
 
     `EXECUTION_ID` is the execution to get information on.
     """
-    try:
-        logger.info('Showing execution {0}'.format(execution_id))
-        execution = model_storage.execution.get(execution_id)
-    except storage_exceptions.NotFoundError:
-        raise AriaCliError('Execution {0} not found'.format(execution_id))
+    logger.info('Showing execution {0}'.format(execution_id))
+    execution = model_storage.execution.get(execution_id)
 
     print_data(EXECUTION_COLUMNS, execution.to_dict(), 'Execution:', max_width=50)
 
@@ -88,11 +83,8 @@ def list(service_name,
     if service_name:
         logger.info('Listing executions for service {0}...'.format(
             service_name))
-        try:
-            service = model_storage.service.get_by_name(service_name)
-            filters = dict(service=service)
-        except storage_exceptions.NotFoundError:
-            raise AriaCliError('Service {0} does not exist'.format(service_name))
+        service = model_storage.service.get_by_name(service_name)
+        filters = dict(service=service)
     else:
         logger.info('Listing all executions...')
         filters = {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/logs.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py
index 6890fb4..3662063 100644
--- a/aria/cli/commands/logs.py
+++ b/aria/cli/commands/logs.py
@@ -15,8 +15,6 @@
 
 from .. import utils
 from ..cli import aria
-from ..exceptions import AriaCliError
-from ...storage import exceptions as storage_exceptions
 
 
 @aria.group(name='logs')
@@ -42,12 +40,8 @@ def list(execution_id,
     """
     logger.info('Listing logs for execution id {0}'.format(execution_id))
     # events_logger = get_events_logger(json_output)
-    try:
-        logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
-                                      sort=utils.storage_sort_param('created_at', False))
-    except storage_exceptions.NotFoundError:
-        raise AriaCliError('Execution {0} does not exist'.format(execution_id))
-
+    logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
+                                  sort=utils.storage_sort_param('created_at', False))
     # TODO: print logs nicely
     if logs:
         for log in logs:
@@ -68,11 +62,7 @@ def delete(execution_id, model_storage, logger):
     `EXECUTION_ID` is the execution logs to delete.
     """
     logger.info('Deleting logs for execution id {0}'.format(execution_id))
-    try:
-        logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
-    except storage_exceptions.NotFoundError:
-        raise AriaCliError('Execution {0} does not exist'.format(execution_id))
-
+    logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
     for log in logs:
         model_storage.log.delete(log)
     logger.info('Deleted logs for execution id {0}'.format(execution_id))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
index 0910ded..b97d4a2 100644
--- a/aria/cli/commands/node_templates.py
+++ b/aria/cli/commands/node_templates.py
@@ -16,8 +16,6 @@
 from ..table import print_data
 from .. import utils
 from ..cli import aria
-from ..exceptions import AriaCliError
-from ...storage import exceptions as storage_exceptions
 
 
 NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
@@ -46,11 +44,8 @@ def show(node_template_id, model_storage, logger):
     # logger.info('Showing node template {0} for service template {1}'.format(
     #     node_template_id, service_template_name))
     logger.info('Showing node template {0}'.format(node_template_id))
-    try:
-        #TODO get node template of a specific service template instead?
-        node_template = model_storage.node_template.get(node_template_id)
-    except storage_exceptions.NotFoundError:
-        raise AriaCliError('Node template {0} was not found'.format(node_template_id))
+    #TODO get node template of a specific service template instead?
+    node_template = model_storage.node_template.get(node_template_id)
 
     print_data(NODE_TEMPLATE_COLUMNS, node_template.to_dict(), 'Node template:', max_width=50)
 
@@ -88,11 +83,8 @@ def list(service_template_name, sort_by, descending, model_storage, logger):
     if service_template_name:
         logger.info('Listing node templates for service template {0}...'.format(
             service_template_name))
-        try:
-            service_template = model_storage.service_template.get_by_name(service_template_name)
-            filters = dict(service_template=service_template)
-        except storage_exceptions.NotFoundException:
-            raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
+        service_template = model_storage.service_template.get_by_name(service_template_name)
+        filters = dict(service_template=service_template)
     else:
         logger.info('Listing all node templates...')
         filters = {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
index 8600ff2..0c9c4e5 100644
--- a/aria/cli/commands/nodes.py
+++ b/aria/cli/commands/nodes.py
@@ -16,8 +16,6 @@
 from .. import utils
 from ..cli import aria
 from ..table import print_data
-from ..exceptions import AriaCliError
-from ...storage import exceptions as storage_exceptions
 
 
 NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
@@ -43,10 +41,7 @@ def show(node_id, model_storage, logger):
     `NODE_ID` is the id of the node to get information on.
     """
     logger.info('Showing node {0}'.format(node_id))
-    try:
-        node = model_storage.node.get(node_id)
-    except storage_exceptions.NotFoundError:
-        raise AriaCliError('Node {0} not found'.format(node_id))
+    node = model_storage.node.get(node_id)
 
     print_data(NODE_COLUMNS, node.to_dict(), 'Node:', 50)
 
@@ -80,11 +75,8 @@ def list(service_name,
     """
     if service_name:
         logger.info('Listing nodes for service {0}...'.format(service_name))
-        try:
-            service = model_storage.service.get_by_name(service_name)
-            filters = dict(service=service)
-        except storage_exceptions.NotFoundError:
-            raise AriaCliError('Service {0} does not exist'.format(service_name))
+        service = model_storage.service.get_by_name(service_name)
+        filters = dict(service=service)
     else:
         logger.info('Listing all nodes...')
         filters = {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index af6252a..79f8012 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -26,7 +26,7 @@ from ..table import print_data
 from ..exceptions import AriaCliError
 from ...core import Core
 from ...exceptions import AriaException
-from ...storage.exceptions import StorageError
+from ...storage import exceptions as storage_exceptions
 
 
 DESCRIPTION_LIMIT = 20
@@ -119,7 +119,7 @@ def store(service_template_path, service_template_name, model_storage, resource_
         core.create_service_template(service_template_path,
                                      os.path.dirname(service_template_path),
                                      service_template_name)
-    except StorageError:
+    except storage_exceptions.StorageError:
         logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
             model_class='service template',
             name=service_template_name))
@@ -143,7 +143,10 @@ def delete(service_template_id, model_storage, resource_storage, plugin_manager,
     """
     logger.info('Deleting service template {0}...'.format(service_template_id))
     core = Core(model_storage, resource_storage, plugin_manager)
-    core.delete_service_template(service_template_id)
+    try:
+        core.delete_service_template(service_template_id)
+    except storage_exceptions.NotFoundError:
+        raise AriaCliError()
     logger.info('Service template {0} deleted'.format(service_template_id))
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 00b093c..28fb499 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -59,11 +59,8 @@ def list(service_template_name,
     if service_template_name:
         logger.info('Listing services for service template {0}...'.format(
             service_template_name))
-        try:
-            service_template = model_storage.service_template.get(service_template_name)
-            filters = dict(service_template=service_template)
-        except storage_exceptions.NotFoundError:
-            raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
+        service_template = model_storage.service_template.get(service_template_name)
+        filters = dict(service_template=service_template)
     else:
         logger.info('Listing all service...')
         filters = {}
@@ -102,8 +99,6 @@ def create(service_template_name,
     try:
         core = Core(model_storage, resource_storage, plugin_manager)
         service = core.create_service(service_template_name, inputs, service_name)
-    except storage_exceptions.NotFoundError:
-        raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
     except storage_exceptions.StorageError:
         logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
             model_class='service',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/cli/commands/workflows.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/workflows.py b/aria/cli/commands/workflows.py
index 2180168..efa373e 100644
--- a/aria/cli/commands/workflows.py
+++ b/aria/cli/commands/workflows.py
@@ -14,10 +14,8 @@
 # limitations under the License.
 
 from ..table import print_data
-from .. import utils
 from ..cli import aria
 from ..exceptions import AriaCliError
-from ...storage.exceptions import StorageError
 
 WORKFLOW_COLUMNS = ['name', 'service_template_name', 'service_name']
 
@@ -41,17 +39,14 @@ def show(workflow_name, service_name, model_storage, logger):
 
     `WORKFLOW_NAME` is the name of the workflow to get information on.
     """
-    try:
-        logger.info('Retrieving workflow {0} for service {1}'.format(
-            workflow_name, service_name))
-        service = model_storage.service.get(service_name)
-        workflow = next((wf for wf in service.workflows if
-                         wf.name == workflow_name), None)
-        if not workflow:
-            raise AriaCliError(
-                'Workflow {0} not found for service {1}'.format(workflow_name, service_name))
-    except StorageError:
-        raise AriaCliError('service {0} not found'.format(service_name))
+    logger.info('Retrieving workflow {0} for service {1}'.format(
+        workflow_name, service_name))
+    service = model_storage.service.get(service_name)
+    workflow = next((wf for wf in service.workflows if
+                     wf.name == workflow_name), None)
+    if not workflow:
+        raise AriaCliError(
+            'Workflow {0} not found for service {1}'.format(workflow_name, service_name))
 
     defaults = {
         'service_template_name': service.service_template_name,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/38d58d4d/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index 6be6fd1..a589a8e 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -14,8 +14,6 @@
 # limitations under the License.
 
 from . import exceptions
-from .modeling import models
-from .modeling import utils as modeling_utils
 from .parser.consumption import (
     ConsumptionContext,
     ConsumerChain,
@@ -68,33 +66,33 @@ class Core(object):
         self.model_storage.service_template.delete(service_template)
         self.resource_storage.service_template.delete(entry_id=str(service_template.id))
 
-    def create_service(self, service_template_name, inputs, service_name=None):
-        service_template = self.model_storage.service_template.get_by_name(service_template_name)
+    def create_service(self, service_template_id, inputs, service_name=None):
+        service_template = self.model_storage.service_template.get(service_template_id)
 
         # creating an empty ConsumptionContext, initiating a threadlocal context
         ConsumptionContext()
         with self.model_storage._all_api_kwargs['session'].no_autoflush:
             service = service_template.instantiate(None, inputs)
 
-        service.name = service_name or '{0}_{1}'.format(service_template_name, service.id)
+        service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
         self.model_storage.service.put(service)
         return service
 
-    def delete_service(self, service_name, force=False):
-        service = self.model_storage.service.get_by_name(service_name)
+    def delete_service(self, service_id, force=False):
+        service = self.model_storage.service.get(service_id)
 
         active_executions = [e for e in service.executions if e.is_active()]
         if active_executions:
             raise exceptions.DependentActiveExecutionsError(
                 "Can't delete service {0} - there is an active execution for this service. "
-                "Active execution id: {1}".format(service_name, active_executions[0].id))
+                "Active execution id: {1}".format(service.name, active_executions[0].id))
 
         if not force:
             available_nodes = [n for n in service.nodes.values() if n.is_available()]
             if available_nodes:
                 raise exceptions.DependentAvailableNodesError(
                     "Can't delete service {0} - there are available nodes for this service. "
-                    "Available node ids: {1}".format(service_name, available_nodes))
+                    "Available node ids: {1}".format(service.name, available_nodes))
 
         self.model_storage.service.delete(service)
 


[07/24] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index 127641f..f631e79 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -68,17 +68,6 @@ class BaseContext(object):
         self._workdir = workdir
         self.logger = None
 
-    def _create_execution(self):
-        now = datetime.utcnow()
-        execution = self.model.execution.model_cls(
-            service_instance=self.service_instance,
-            workflow_name=self._workflow_name,
-            created_at=now,
-            parameters=self.parameters,
-        )
-        self.model.execution.put(execution)
-        return execution.id
-
     def _register_logger(self, logger_name=None, level=None, task_id=None):
         self.logger = self.PrefixedLogger(logging.getLogger(logger_name or self.__class__.__name__),
                                           self.logging_id,
@@ -168,13 +157,13 @@ class BaseContext(object):
         Download a blueprint resource from the resource storage
         """
         try:
-            self.resource.deployment.download(entry_id=str(self.service.id),
-                                              destination=destination,
-                                              path=path)
+            self.resource.service.download(entry_id=str(self.service.id),
+                                           destination=destination,
+                                           path=path)
         except exceptions.StorageError:
-            self.resource.blueprint.download(entry_id=str(self.service_template.id),
-                                             destination=destination,
-                                             path=path)
+            self.resource.service_template.download(entry_id=str(self.service_template.id),
+                                                    destination=destination,
+                                                    path=path)
 
     def download_resource_and_render(self, destination, path=None, variables=None):
         """
@@ -193,9 +182,9 @@ class BaseContext(object):
         Read a deployment resource as string from the resource storage
         """
         try:
-            return self.resource.deployment.read(entry_id=str(self.service.id), path=path)
+            return self.resource.service.read(entry_id=str(self.service.id), path=path)
         except exceptions.StorageError:
-            return self.resource.deployment.read(entry_id=str(self.service_template.id), path=path)
+            return self.resource.service_template.read(entry_id=str(self.service_template.id), path=path)
 
     def get_resource_and_render(self, path=None, variables=None):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 5f86d9d..bc9f653 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -31,11 +31,11 @@ class WorkflowContext(BaseContext):
     """
     def __init__(self,
                  workflow_name,
+                 execution_id,
                  parameters=None,
                  task_max_attempts=1,
                  task_retry_interval=0,
                  task_ignore_failure=False,
-                 execution_id=None,
                  *args, **kwargs):
         super(WorkflowContext, self).__init__(*args, **kwargs)
         self._workflow_name = workflow_name
@@ -43,28 +43,15 @@ class WorkflowContext(BaseContext):
         self._task_max_attempts = task_max_attempts
         self._task_retry_interval = task_retry_interval
         self._task_ignore_failure = task_ignore_failure
-        # TODO: execution creation should happen somewhere else
-        # should be moved there, when such logical place exists
-        self._execution_id = execution_id or self._create_execution()
+        self._execution_id = execution_id
         self._register_logger()
 
     def __repr__(self):
         return (
             '{name}(deployment_id={self._service_id}, '
-            'workflow_name={self._workflow_name}'.format(
+            'workflow_name={self._workflow_name}, execution_id={self._execution_id})'.format(
                 name=self.__class__.__name__, self=self))
 
-    def _create_execution(self):
-        now = datetime.utcnow()
-        execution = self.model.execution.model_cls(
-            service=self.service,
-            workflow_name=self._workflow_name,
-            created_at=now,
-            parameters=self.parameters,
-        )
-        self.model.execution.put(execution)
-        return execution.id
-
     @property
     def logging_id(self):
         return '{0}[{1}]'.format(self._workflow_name, self._execution_id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
index 817d064..52a5312 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
@@ -24,6 +24,7 @@ import StringIO
 import wsgiref.simple_server
 
 import bottle
+from aria import modeling
 
 from .. import exceptions
 
@@ -111,7 +112,7 @@ class CtxProxy(object):
             result = json.dumps({
                 'type': result_type,
                 'payload': payload
-            })
+            }, cls=modeling.utils.ModelJSONEncoder)
         except Exception as e:
             traceback_out = StringIO.StringIO()
             traceback.print_exc(file=traceback_out)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/runner.py b/aria/orchestrator/runner.py
deleted file mode 100644
index f1633fa..0000000
--- a/aria/orchestrator/runner.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow runner
-"""
-
-import tempfile
-import os
-
-from .context.workflow import WorkflowContext
-from .workflows.core.engine import Engine
-from .workflows.executor.thread import ThreadExecutor
-from ..storage import (
-    sql_mapi,
-    filesystem_rapi,
-)
-from .. import (
-    application_model_storage,
-    application_resource_storage
-)
-
-
-class Runner(object):
-    """
-    Runs workflows on a deployment. By default uses temporary storage (either on disk or in memory)
-    but can also be used with existing storage.
-
-    Handles the initialization of the storage engine and provides convenience methods for
-    sub-classes to create tasks.
-
-    :param path: path to Sqlite database file; use '' (the default) to use a temporary file,
-                 and None to use an in-memory database
-    :type path: string
-    """
-
-    def __init__(self, workflow_name, workflow_fn, inputs, initialize_model_storage_fn,
-                 service_id_fn, storage_path='', is_storage_temporary=True):
-        if storage_path == '':
-            # Temporary file storage
-            the_file, storage_path = tempfile.mkstemp(suffix='.db', prefix='aria-')
-            os.close(the_file)
-
-        self._storage_path = storage_path
-        self._storage_dir = os.path.dirname(storage_path)
-        self._storage_name = os.path.basename(storage_path)
-        self._is_storage_temporary = is_storage_temporary
-
-        workflow_context = self.create_workflow_context(workflow_name, initialize_model_storage_fn,
-                                                        service_id_fn)
-
-        tasks_graph = workflow_fn(ctx=workflow_context, **inputs)
-
-        self._engine = Engine(
-            executor=ThreadExecutor(),
-            workflow_context=workflow_context,
-            tasks_graph=tasks_graph)
-
-    def run(self):
-        try:
-            self._engine.execute()
-        finally:
-            self.cleanup()
-
-    def create_workflow_context(self,
-                                workflow_name,
-                                initialize_model_storage_fn,
-                                service_id_fn):
-        self.cleanup()
-        model_storage = application_model_storage(
-            sql_mapi.SQLAlchemyModelAPI,
-            initiator_kwargs=dict(base_dir=self._storage_dir, filename=self._storage_name))
-        if initialize_model_storage_fn:
-            initialize_model_storage_fn(model_storage)
-        resource_storage = application_resource_storage(
-            filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory='.'))
-        return WorkflowContext(
-            name=workflow_name,
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            service_id=service_id_fn(),
-            workflow_name=self.__class__.__name__,
-            task_max_attempts=1,
-            task_retry_interval=1)
-
-    def cleanup(self):
-        if (self._is_storage_temporary and (self._storage_path is not None) and
-                os.path.isfile(self._storage_path)):
-            os.remove(self._storage_path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
new file mode 100644
index 0000000..8b6b431
--- /dev/null
+++ b/aria/orchestrator/workflow_runner.py
@@ -0,0 +1,147 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Workflow runner
+"""
+
+import sys
+from datetime import datetime
+
+from .context.workflow import WorkflowContext
+from .workflows.builtin import BUILTIN_WORKFLOWS, BUILTIN_WORKFLOWS_PATH_PREFIX
+from .workflows.core.engine import Engine
+from .workflows.executor.process import ProcessExecutor
+from ..exceptions import AriaException
+from ..modeling import utils as modeling_utils
+from ..modeling import models
+from ..utils.imports import import_fullname
+
+
+DEFAULT_TASK_MAX_ATTEMPTS = 1
+DEFAULT_TASK_RETRY_INTERVAL = 1
+# TODO move this constant somewhere in the DSL parser?
+WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
+
+
+class WorkflowRunner(object):
+
+    def __init__(self, workflow_name, service_name, inputs,
+                 model_storage, resource_storage, plugin_manager,
+                 task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
+                 task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
+
+        self._model_storage = model_storage
+        self._workflow_name = workflow_name
+        service = model_storage.service.get_by_name(service_name)
+        # the IDs are stored rather than the models themselves, so this module could be used
+        # by several threads without raising errors on model objects shared between threads
+        self._service_id = service.id
+
+        self._validate_workflow_exists_for_service()
+
+        workflow_fn = self._get_workflow_fn()
+
+        execution = self._create_execution_models(inputs)
+        self._execution_id = execution.id
+
+        workflow_context = WorkflowContext(
+            name=self.__class__.__name__,
+            model_storage=self._model_storage,
+            resource_storage=resource_storage,
+            service_id=service.id,
+            execution_id=execution.id,
+            workflow_name=workflow_name,
+            task_max_attempts=task_max_attempts,
+            task_retry_interval=task_retry_interval)
+
+        # transforming the execution inputs to dict, to pass them to the workflow function
+        execution_inputs_dict = {input.name: input.value for input in
+                                 self.execution.inputs.values()}
+        self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
+
+        self._engine = Engine(
+            executor=ProcessExecutor(plugin_manager=plugin_manager),
+            workflow_context=workflow_context,
+            tasks_graph=self._tasks_graph)
+
+    @property
+    def execution(self):
+        return self._model_storage.execution.get(self._execution_id)
+
+    @property
+    def service(self):
+        return self._model_storage.service.get(self._service_id)
+
+    def execute(self):
+        #TODO uncomment, commented for testing purposes
+        # self._validate_no_active_executions()
+        self._engine.execute()
+
+    def cancel(self):
+        self._engine.cancel_execution()
+
+    def _create_execution_models(self, inputs):
+        execution = models.Execution(
+            created_at=datetime.utcnow(),
+            service=self.service,
+            workflow_name=self._workflow_name,
+            inputs={})
+
+        # built-in workflows don't have any inputs, and are also
+        # not a part of the service's workflows field
+        if self._workflow_name not in BUILTIN_WORKFLOWS:
+            workflow_inputs = {k: v for k, v in
+                               self.service.workflows[self._workflow_name].inputs
+                               if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES}
+
+            execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs)
+
+        self._model_storage.execution.put(execution)
+        return execution
+
+    def _validate_workflow_exists_for_service(self):
+        if self._workflow_name not in self.service.workflows and \
+                        self._workflow_name not in BUILTIN_WORKFLOWS:
+            raise AriaException('No workflow policy {0} declared in service instance {1}'
+                                .format(self._workflow_name, self.service.name))
+
+    def _validate_no_active_executions(self):
+        active_executions_filter = dict(service=self.service,
+                                        status=models.Execution.ACTIVE_STATES)
+        active_executions = self._model_storage.execution.list(filter=active_executions_filter)
+        if active_executions:
+            raise AriaException("Can't start execution; Service {0} has a running "
+                                "execution with id {1}"
+                                .format(self.service.name, active_executions[0].id))
+
+    def _get_workflow_fn(self):
+        if self._workflow_name in BUILTIN_WORKFLOWS:
+            return import_fullname('{0}.{1}'.format(BUILTIN_WORKFLOWS_PATH_PREFIX,
+                                                    self._workflow_name))
+
+        workflow = self.service.workflows[self._workflow_name]
+
+        try:
+            # TODO: perhaps pass to import_fullname as paths instead of appending to sys path?
+            # TODO: revisit; workflow.implementation to be used instead?
+            sys.path.append(workflow.properties['implementation'].value)
+            # sys.path.append(os.path.dirname(str(context.presentation.location)))
+        except KeyError:
+            # no implementation field - a script has been provided directly
+            pass
+
+        workflow_fn = import_fullname(workflow.properties['implementation'].value)
+        return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index f49ec2e..2ec85b9 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -19,7 +19,7 @@ Provides the tasks to be entered into the task graph
 import copy
 
 from ....modeling import models
-from ....utils.collections import OrderedDict
+from ....modeling import utils as modeling_utils
 from ....utils.uuid import generate_uuid
 from ... import context
 from .. import exceptions
@@ -63,7 +63,6 @@ class OperationTask(BaseTask):
 
     def __init__(self,
                  actor,
-                 actor_type,
                  interface_name,
                  operation_name,
                  runs_on=None,
@@ -76,6 +75,7 @@ class OperationTask(BaseTask):
         :meth:`for_relationship`.
         """
 
+        actor_type = type(actor).__name__.lower()
         assert isinstance(actor, (models.Node, models.Relationship))
         assert actor_type in ('node', 'relationship')
         assert interface_name and operation_name
@@ -93,22 +93,7 @@ class OperationTask(BaseTask):
         self.interface_name = interface_name
         self.operation_name = operation_name
 
-        # Wrap inputs
-        inputs = copy.deepcopy(inputs) if inputs else {}
-        for k, v in inputs.iteritems():
-            if not isinstance(v, models.Parameter):
-                inputs[k] = models.Parameter.wrap(k, v)
-
-        # TODO: Suggestion: these extra inputs could be stored as a separate entry in the task
-        # model, because they are different from the operation inputs. If we do this, then the two
-        # kinds of inputs should *not* be merged here.
-
-        operation = self._get_operation()
-        if operation is None:
-            raise exceptions.OperationNotFoundException(
-                'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
-                .format(self.operation_name, self.interface_name, actor_type, actor.name))
-
+        operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
         self.plugin = None
         if operation.plugin_specification:
             self.plugin = OperationTask._find_plugin(operation.plugin_specification)
@@ -117,9 +102,8 @@ class OperationTask(BaseTask):
                     'Could not find plugin of operation "{0}" on interface "{1}" for {2} "{3}"'
                     .format(self.operation_name, self.interface_name, actor_type, actor.name))
 
+        self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs)
         self.implementation = operation.implementation
-        self.inputs = OperationTask._merge_inputs(operation.inputs, inputs)
-
         self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
                                                      name=actor.name,
                                                      interface=self.interface_name,
@@ -128,14 +112,6 @@ class OperationTask(BaseTask):
     def __repr__(self):
         return self.name
 
-    def _get_operation(self):
-        interface = self.actor.interfaces.get(self.interface_name)
-        if interface:
-            return interface.operations.get(self.operation_name)
-        return None
-
-
-
     @classmethod
     def for_node(cls,
                  node,
@@ -163,7 +139,6 @@ class OperationTask(BaseTask):
         assert isinstance(node, models.Node)
         return cls(
             actor=node,
-            actor_type='node',
             interface_name=interface_name,
             operation_name=operation_name,
             max_attempts=max_attempts,
@@ -202,7 +177,6 @@ class OperationTask(BaseTask):
         assert runs_on in models.Task.RUNS_ON
         return cls(
             actor=relationship,
-            actor_type='relationship',
             interface_name=interface_name,
             operation_name=operation_name,
             runs_on=runs_on,
@@ -216,13 +190,6 @@ class OperationTask(BaseTask):
         workflow_context = context.workflow.current.get()
         return plugin_specification.find_plugin(workflow_context.model.plugin.list())
 
-    @staticmethod
-    def _merge_inputs(operation_inputs, override_inputs=None):
-        final_inputs = OrderedDict(operation_inputs)
-        if override_inputs:
-            final_inputs.update(override_inputs)
-        return final_inputs
-
 
 class WorkflowTask(BaseTask):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/__init__.py b/aria/orchestrator/workflows/builtin/__init__.py
index d43a962..8b13c62 100644
--- a/aria/orchestrator/workflows/builtin/__init__.py
+++ b/aria/orchestrator/workflows/builtin/__init__.py
@@ -24,6 +24,7 @@ from .stop import stop
 
 
 BUILTIN_WORKFLOWS = ('install', 'uninstall', 'start', 'stop')
+BUILTIN_WORKFLOWS_PATH_PREFIX = 'aria.orchestrator.workflows.builtin'
 
 
 __all__ = [

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 348f47a..7ee135f 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -17,6 +17,7 @@
 Builtin execute_operation workflow
 """
 
+from . import utils
 from ..api.task import OperationTask
 from ... import workflow
 
@@ -28,7 +29,6 @@ def execute_operation(
         interface_name,
         operation_name,
         operation_kwargs,
-        allow_kwargs_override,
         run_by_dependency_order,
         type_names,
         node_template_ids,
@@ -41,7 +41,6 @@ def execute_operation(
     :param TaskGraph graph: the graph which will describe the workflow.
     :param basestring operation: the operation name to execute
     :param dict operation_kwargs:
-    :param bool allow_kwargs_override:
     :param bool run_by_dependency_order:
     :param type_names:
     :param node_template_ids:
@@ -71,8 +70,7 @@ def execute_operation(
                 node=node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                operation_kwargs=operation_kwargs,
-                allow_kwargs_override=allow_kwargs_override
+                operation_kwargs=operation_kwargs
             )
         )
 
@@ -108,21 +106,16 @@ def _create_node_task(
         node,
         interface_name,
         operation_name,
-        operation_kwargs,
-        allow_kwargs_override):
+        operation_kwargs):
     """
     A workflow which executes a single operation
     :param node: the node instance to install
     :param basestring operation: the operation name
     :param dict operation_kwargs:
-    :param bool allow_kwargs_override:
     :return:
     """
 
-    if allow_kwargs_override is not None:
-        operation_kwargs['allow_kwargs_override'] = allow_kwargs_override
-
-    return OperationTask.for_node(
+    return utils.create_node_task(
         node=node,
         interface_name=interface_name,
         operation_name=operation_name,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/orchestrator/workflows/builtin/utils.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/utils.py b/aria/orchestrator/workflows/builtin/utils.py
index d79318f..8890084 100644
--- a/aria/orchestrator/workflows/builtin/utils.py
+++ b/aria/orchestrator/workflows/builtin/utils.py
@@ -12,26 +12,31 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from ..api.task import OperationTask
+
+from ..api.task import OperationTask, StubTask
 from .. import exceptions
 
 
-def create_node_task(node, interface_name, operation_name):
+def create_node_task(node, interface_name, operation_name, **kwargs):
     """
     Returns a new operation task if the operation exists in the node, otherwise returns None.
     """
 
     try:
+        if _is_empty_task(node, interface_name, operation_name):
+            return StubTask()
+
         return OperationTask.for_node(node=node,
                                       interface_name=interface_name,
-                                      operation_name=operation_name)
+                                      operation_name=operation_name,
+                                      **kwargs)
     except exceptions.OperationNotFoundException:
         # We will skip nodes which do not have the operation
         return None
 
 
 def create_relationships_tasks(
-        node, interface_name, source_operation_name=None, target_operation_name=None):
+        node, interface_name, source_operation_name=None, target_operation_name=None, **kwargs):
     """
     Creates a relationship task (source and target) for all of a node_instance relationships.
     :param basestring source_operation_name: the relationship operation name.
@@ -43,21 +48,18 @@ def create_relationships_tasks(
     """
     sub_tasks = []
     for relationship in node.outbound_relationships:
-        try:
-            relationship_operations = relationship_tasks(
-                relationship,
-                interface_name,
-                source_operation_name=source_operation_name,
-                target_operation_name=target_operation_name)
-            sub_tasks.append(relationship_operations)
-        except exceptions.OperationNotFoundException:
-            # We will skip relationships which do not have the operation
-            pass
+        relationship_operations = relationship_tasks(
+            relationship,
+            interface_name,
+            source_operation_name=source_operation_name,
+            target_operation_name=target_operation_name,
+            **kwargs)
+        sub_tasks.append(relationship_operations)
     return sub_tasks
 
 
-def relationship_tasks(
-        relationship, interface_name, source_operation_name=None, target_operation_name=None):
+def relationship_tasks(relationship, interface_name, source_operation_name=None,
+                       target_operation_name=None, **kwargs):
     """
     Creates a relationship task source and target.
     :param Relationship relationship: the relationship instance itself
@@ -68,19 +70,35 @@ def relationship_tasks(
     """
     operations = []
     if source_operation_name:
-        operations.append(
-            OperationTask.for_relationship(relationship=relationship,
-                                           interface_name=interface_name,
-                                           operation_name=source_operation_name,
-                                           runs_on='source')
-        )
+        try:
+            if _is_empty_task(relationship, interface_name, source_operation_name):
+                operations.append(StubTask())
+
+            operations.append(
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=source_operation_name,
+                                               runs_on='source',
+                                               **kwargs)
+            )
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
     if target_operation_name:
-        operations.append(
-            OperationTask.for_relationship(relationship=relationship,
-                                           interface_name=interface_name,
-                                           operation_name=target_operation_name,
-                                           runs_on='target')
-        )
+        try:
+            if _is_empty_task(relationship, interface_name, target_operation_name):
+                operations.append(StubTask())
+
+            operations.append(
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=target_operation_name,
+                                               runs_on='target',
+                                               **kwargs)
+            )
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
 
     return operations
 
@@ -108,3 +126,15 @@ def create_node_task_dependencies(graph, tasks_and_nodes, reverse=False):
                     graph.add_dependency(dependency, task)
             else:
                 graph.add_dependency(task, dependencies)
+
+
+def _is_empty_task(actor, interface_name, operation_name):
+    interface = actor.interfaces.get(interface_name)
+    if interface:
+        operation = interface.operations.get(operation_name)
+        if operation:
+            return operation.implementation is None
+
+    raise exceptions.OperationNotFoundException(
+        'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
+            .format(operation_name, interface_name, type(actor).__name__.lower(), actor.name))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index 8302fc9..0e900bb 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -37,6 +37,7 @@ API:
     * drivers - module, a pool of ARIA standard drivers.
     * StorageDriver - class, abstract model implementation.
 """
+import logging
 
 from aria.logger import LoggerMixin
 from . import sql_mapi
@@ -71,6 +72,10 @@ class Storage(LoggerMixin):
         :param kwargs:
         """
         super(Storage, self).__init__(**kwargs)
+        # Set the logger handler of any storage object to NullHandler.
+        # This is since the absence of a handler shows up while using the CLI in the form of:
+        # `No handlers could be found for logger "aria.ResourceStorage"`.
+        self.logger.addHandler(logging.NullHandler())
         self.api = api_cls
         self.registered = {}
         self._initiator = initiator

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/utils/archive.py
----------------------------------------------------------------------
diff --git a/aria/utils/archive.py b/aria/utils/archive.py
new file mode 100644
index 0000000..5077dec
--- /dev/null
+++ b/aria/utils/archive.py
@@ -0,0 +1,63 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import tarfile
+import zipfile
+import tempfile
+from contextlib import closing
+
+
+def is_archive(source):
+    return tarfile.is_tarfile(source) or zipfile.is_zipfile(source)
+
+
+def extract_archive(source):
+    if tarfile.is_tarfile(source):
+        return untar(source)
+    elif zipfile.is_zipfile(source):
+        return unzip(source)
+    raise ValueError(
+        'Unsupported archive type provided or archive is not valid: {0}.'.format(source))
+
+
+def tar(source, destination):
+    with closing(tarfile.open(destination, 'w:gz')) as tar:
+        tar.add(source, arcname=os.path.basename(source))
+
+
+def untar(archive, destination=None):
+    if not destination:
+        destination = tempfile.mkdtemp()
+    with closing(tarfile.open(name=archive)) as tar:
+        tar.extractall(path=destination, members=tar.getmembers())
+    return destination
+
+
+def zip(source, destination):
+    with closing(zipfile.ZipFile(destination, 'w')) as zip_file:
+        for root, _, files in os.walk(source):
+            for filename in files:
+                file_path = os.path.join(root, filename)
+                source_dir = os.path.dirname(source)
+                zip_file.write(
+                    file_path, os.path.relpath(file_path, source_dir))
+    return destination
+
+
+def unzip(archive, destination=None):
+    if not destination:
+        destination = tempfile.mkdtemp()
+    with closing(zipfile.ZipFile(archive, 'r')) as zip_file:
+        zip_file.extractall(destination)
+    return destination

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/utils/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/utils/exceptions.py b/aria/utils/exceptions.py
index 9e3e80f..b60cee4 100644
--- a/aria/utils/exceptions.py
+++ b/aria/utils/exceptions.py
@@ -15,6 +15,7 @@
 
 import sys
 import linecache
+import StringIO
 import traceback as tb
 
 import jsonpickle
@@ -89,6 +90,16 @@ def _print_stack(frame):
                 puts(line)
 
 
+def get_exception_as_string(exc_type, exc_val, traceback):
+    s_traceback = StringIO.StringIO()
+    tb.print_exception(
+        etype=exc_type,
+        value=exc_val,
+        tb=traceback,
+        file=s_traceback)
+    return s_traceback.getvalue()
+
+
 class _WrappedException(Exception):
 
     def __init__(self, exception_type, exception_str):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/utils/file.py
----------------------------------------------------------------------
diff --git a/aria/utils/file.py b/aria/utils/file.py
index b515f70..6d1aa16 100644
--- a/aria/utils/file.py
+++ b/aria/utils/file.py
@@ -15,6 +15,7 @@
 
 import errno
 import os
+import shutil
 
 
 def makedirs(path):
@@ -26,3 +27,15 @@ def makedirs(path):
     except IOError as e:
         if e.errno != errno.EEXIST:
             raise
+
+def remove_if_exists(path):
+
+    try:
+        if os.path.isfile(path):
+            os.remove(path)
+        if os.path.isdir(path):
+            shutil.rmtree(path)
+
+    except OSError as e:
+        if e.errno != errno.ENOENT:  # errno.ENOENT = no such file or directory
+            raise  # re-raise exception if a different error occurred

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index 8a223e9..698393f 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -83,6 +83,49 @@ def full_type_name(value):
     return name if module == '__builtin__' else '%s.%s' % (module, name)
 
 
+def decode_list(data):
+    rv = []
+    for item in data:
+        if isinstance(item, unicode):
+            item = item.encode('utf-8')
+        elif isinstance(item, list):
+            item = decode_list(item)
+        elif isinstance(item, dict):
+            item = decode_dict(item)
+        rv.append(item)
+    return rv
+
+
+def decode_dict(data):
+    rv = {}
+    for key, value in data.iteritems():
+        if isinstance(key, unicode):
+            key = key.encode('utf-8')
+        if isinstance(value, unicode):
+            value = value.encode('utf-8')
+        elif isinstance(value, list):
+            value = decode_list(value)
+        elif isinstance(value, dict):
+            value = decode_dict(value)
+        rv[key] = value
+    return rv
+
+
+def try_convert_from_str(string, target_type):
+    if target_type == basestring:
+        return string
+    if target_type == bool:
+        if string.lower() == 'true':
+            return True
+        if string.lower() == 'false':
+            return False
+        return string
+    try:
+        return target_type(string)
+    except ValueError:
+        return string
+
+
 def safe_str(value):
     """
     Like :code:`str` coercion, but makes sure that Unicode strings are properly

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/utils/threading.py
----------------------------------------------------------------------
diff --git a/aria/utils/threading.py b/aria/utils/threading.py
index b99250d..f4e9c0e 100644
--- a/aria/utils/threading.py
+++ b/aria/utils/threading.py
@@ -15,6 +15,7 @@
 
 from __future__ import absolute_import  # so we can import standard 'threading'
 
+import sys
 import itertools
 import multiprocessing
 from threading import (Thread, Lock)
@@ -255,3 +256,26 @@ class LockedList(list):
 
     def __exit__(self, the_type, value, traceback):
         return self.lock.__exit__(the_type, value, traceback)
+
+
+class ExceptionThread(Thread):
+    """
+    A thread from which top level exceptions can be retrieved or reraised
+    """
+    def __init__(self, *args, **kwargs):
+        Thread.__init__(self, *args, **kwargs)
+        self.exception = None
+
+    def run(self):
+        try:
+            super(ExceptionThread, self).run()
+        except BaseException:
+            self.exception = sys.exc_info()
+
+    def is_error(self):
+        return self.exception is not None
+
+    def raise_error_if_exists(self):
+        if self.is_error():
+            t, v, tb = self.exception
+            raise t, v, tb

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/utils/type.py
----------------------------------------------------------------------
diff --git a/aria/utils/type.py b/aria/utils/type.py
new file mode 100644
index 0000000..e427be1
--- /dev/null
+++ b/aria/utils/type.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def validate_value_type(value, type_name):
+    """Supports both python and yaml type names"""
+    #TODO add timestamp type?
+
+    name_to_type = {
+        'list': list,
+        'dict': dict,
+        'str': str,
+        'unicode': str,
+        'string': str,
+        'int': int,
+        'integer': int,
+        'bool': bool,
+        'boolean': bool,
+        'float': float
+    }
+
+    type = name_to_type.get(type_name.lower())
+    if type is None:
+        raise ValueError('No supported type_name was provided')
+    try:
+        type(value)
+    except ValueError:
+        raise False
+
+
+def convert_value_to_type(str_value, type_name):
+    try:
+        if type_name.lower() in ['str', 'unicode']:
+            return str_value.decode('utf-8')
+        elif type_name.lower() == 'int':
+            return int(str_value)
+        elif type_name.lower() == 'bool':
+            return bool(str_value)
+        elif type_name.lower() == 'float':
+            return float(str_value)
+        else:
+            raise ValueError('No supported type_name was provided')
+    except ValueError:
+        raise ValueError('Trying to convert {0} to {1} failed'.format(str_value,
+                                                                      type_name))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index d0a39e6..1661623 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -19,6 +19,7 @@ Creates ARIA service template models based on the TOSCA presentation.
 Relies on many helper methods in the presentation classes. 
 """
 
+import os
 import re
 from types import FunctionType
 from datetime import datetime
@@ -34,7 +35,7 @@ from ..data_types import coerce_value
 
 def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches
     model = ServiceTemplate(created_at=datetime.now(),
-                            main_file_name=str(context.presentation.location))
+                            main_file_name=os.path.basename(str(context.presentation.location)))
 
     model.description = context.presentation.get('service_template', 'description', 'value')
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/requirements.in
----------------------------------------------------------------------
diff --git a/requirements.in b/requirements.in
index bc27479..2c7978f 100644
--- a/requirements.in
+++ b/requirements.in
@@ -25,6 +25,11 @@ SQLAlchemy>=1.1.0, <1.2  # version 1.2 dropped support of python 2.6
 wagon==0.6.0
 bottle>=0.12.0, <0.13
 Fabric>=1.13.0, <1.14
+click==4.1
+colorama==0.3.3
+PrettyTable>=0.7,<0.8
+click_didyoumean==0.0.3
+backports.shutil_get_terminal_size==1.0.0
 
 # Since the tool we are using to generate our requirements.txt, `pip-tools`,
 # does not currently support conditional dependencies (;), we're adding our original

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index 3d72ebc..b64453a 100644
--- a/setup.py
+++ b/setup.py
@@ -61,7 +61,7 @@ except IOError:
     extras_require = {}
 
 
-console_scripts = ['aria = aria.cli.cli:main']
+console_scripts = ['aria = aria.cli.main:main']
 
 
 def _generate_user_options(command):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index f943d7e..ac0a8a7 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -39,12 +39,17 @@ def simple(tmpdir, inmemory=False, context_kwargs=None, topology=None):
         api_kwargs=dict(directory=os.path.join(tmpdir, 'resources'))
     )
 
+    service_id = topology(model_storage)
+    execution = models.create_execution(model_storage.service.get(service_id))
+    model_storage.execution.put(execution)
+
     final_kwargs = dict(
         name='simple_context',
         model_storage=model_storage,
         resource_storage=resource_storage,
-        service_id=topology(model_storage),
+        service_id=service_id,
         workflow_name=models.WORKFLOW_NAME,
+        execution_id=execution.id,
         task_max_attempts=models.TASK_MAX_ATTEMPTS,
         task_retry_interval=models.TASK_RETRY_INTERVAL
     )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 1d29e2d..6b7f810 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -40,7 +40,6 @@ from aria.orchestrator.workflows.builtin.workflows import (
 SERVICE_NAME = 'test_service_name'
 SERVICE_TEMPLATE_NAME = 'test_service_template_name'
 WORKFLOW_NAME = 'test_workflow_name'
-EXECUTION_NAME = 'test_execution_name'
 TASK_RETRY_INTERVAL = 1
 TASK_MAX_ATTEMPTS = 1
 
@@ -168,6 +167,13 @@ def create_interface_template(service_template, interface_name, operation_name,
 def create_interface(service, interface_name, operation_name, operation_kwargs=None,
                      interface_kwargs=None):
     the_type = service.service_template.interface_types.get_descendant('test_interface_type')
+
+    if operation_kwargs and operation_kwargs.get('inputs'):
+        wrapped_inputs = {}
+        for input_name, input_value in operation_kwargs['inputs'].iteritems():
+            wrapped_inputs[input_name] = models.Parameter.wrap(input_name, input_value)
+        operation_kwargs['inputs'] = wrapped_inputs
+
     operation = models.Operation(
         name=operation_name,
         **(operation_kwargs or {})
@@ -183,10 +189,11 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N
 def create_execution(service):
     return models.Execution(
         service=service,
-        status=models.Execution.STARTED,
+        status=models.Execution.PENDING,
         workflow_name=WORKFLOW_NAME,
+        created_at=datetime.utcnow(),
         started_at=datetime.utcnow(),
-        parameters=None
+        inputs={}
     )
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 8cd00f8..e459821 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -253,7 +253,7 @@ class TestService(object):
 class TestExecution(object):
 
     @pytest.mark.parametrize(
-        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, '
+        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, inputs, '
         'status, workflow_name',
         [
             (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
@@ -268,11 +268,11 @@ class TestExecution(object):
             (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
             (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'),
             (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
+            (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
         ]
     )
     def test_execution_model_creation(self, service_storage, is_valid, created_at, started_at,
-                                      ended_at, error, is_system_workflow, parameters, status,
+                                      ended_at, error, is_system_workflow, inputs, status,
                                       workflow_name):
         execution = _test_model(
             is_valid=is_valid,
@@ -285,7 +285,7 @@ class TestExecution(object):
                 ended_at=ended_at,
                 error=error,
                 is_system_workflow=is_system_workflow,
-                parameters=parameters,
+                inputs=inputs,
                 status=status,
                 workflow_name=workflow_name,
             ))
@@ -299,7 +299,7 @@ class TestExecution(object):
                 id='e_id',
                 workflow_name='w_name',
                 status=status,
-                parameters={},
+                inputs={},
                 created_at=now,
             )
             return execution

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index f55b83e..47c82dc 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -69,16 +69,17 @@ def test_node_operation_task_execution(ctx, thread_executor):
     interface_name = 'Standard'
     operation_name = 'create'
 
+    inputs = {'putput': True}
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+                              inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -124,17 +125,18 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
     interface_name = 'Configure'
     operation_name = 'post_configure'
 
+    inputs = {'putput': True}
     relationship = ctx.model.relationship.list()[0]
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)),
+        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+                              inputs=inputs),
     )
 
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -232,21 +234,21 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
     ctx.model.plugin.put(plugin)
     plugin_specification = mock.models.create_plugin_specification()
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    filename = 'test_file'
+    content = 'file content'
+    inputs = {'filename': filename, 'content': content}
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
             implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
-            plugin_specification=plugin_specification)
+            plugin_specification=plugin_specification,
+            inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
-    filename = 'test_file'
-    content = 'file content'
-    inputs = {'filename': filename, 'content': content}
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(api.task.OperationTask.for_node(node=node,
@@ -278,21 +280,22 @@ def test_node_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
 
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+    inputs = {
+        'op_start': 'op_start',
+        'op_end': 'op_end',
+    }
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
-            implementation=op_path(logged_operation, module_path=__name__))
+            implementation=op_path(logged_operation, module_path=__name__),
+            inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
-    inputs = {
-        'op_start': 'op_start',
-        'op_end': 'op_end',
-    }
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
@@ -312,20 +315,20 @@ def test_relationship_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0]
 
     relationship = ctx.model.relationship.list()[0]
+    inputs = {
+        'op_start': 'op_start',
+        'op_end': 'op_end',
+    }
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__),
+                              inputs=inputs)
     )
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
 
-    inputs = {
-        'op_start': 'op_start',
-        'op_end': 'op_end',
-    }
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/context/test_resource_render.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py
index 696e9b3..8113746 100644
--- a/tests/orchestrator/context/test_resource_render.py
+++ b/tests/orchestrator/context/test_resource_render.py
@@ -64,9 +64,9 @@ def resources(tmpdir, ctx):
     implicit_ctx_template_path.write(_IMPLICIT_CTX_TEMPLATE)
     variables_template_path = tmpdir.join(_VARIABLES_TEMPLATE_PATH)
     variables_template_path.write(_VARIABLES_TEMPLATE)
-    ctx.resource.deployment.upload(entry_id='1',
+    ctx.resource.service.upload(entry_id='1',
                                    source=str(implicit_ctx_template_path),
                                    path=_IMPLICIT_CTX_TEMPLATE_PATH)
-    ctx.resource.deployment.upload(entry_id='1',
+    ctx.resource.service.upload(entry_id='1',
                                    source=str(variables_template_path),
                                    path=_VARIABLES_TEMPLATE_PATH)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index db45e8e..5fdb674 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -34,7 +34,7 @@ def test_serialize_operation_context(context, executor, tmpdir):
     test_file = tmpdir.join(TEST_FILE_NAME)
     test_file.write(TEST_FILE_CONTENT)
     resource = context.resource
-    resource.blueprint.upload(TEST_FILE_ENTRY_ID, str(test_file))
+    resource.service_template.upload(TEST_FILE_ENTRY_ID, str(test_file))
     graph = _mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
@@ -73,7 +73,7 @@ def _mock_operation(ctx):
     # a correct ctx.deployment.name tells us we kept the correct deployment_id
     assert ctx.service.name == mock.models.SERVICE_NAME
     # Here we test that the resource storage was properly re-created
-    test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
+    test_file_content = ctx.resource.service_template.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
     assert test_file_content == TEST_FILE_CONTENT
     # a non empty plugin workdir tells us that we kept the correct base_workdir
     assert ctx.plugin_workdir is not None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index cf82127..213d964 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -76,15 +76,16 @@ def test_host_ip(workflow_context, executor):
     interface_name = 'Standard'
     operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
+    inputs = {'putput': True}
     interface = mock.models.create_interface(
         dependency_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__),
+                              inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
     workflow_context.model.node.update(dependency_node)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -106,17 +107,17 @@ def test_relationship_tool_belt(workflow_context, executor):
     interface_name = 'Configure'
     operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
+    inputs = {'putput': True}
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__),
+                              inputs=inputs)
     )
     relationship.interfaces[interface.name] = interface
     workflow_context.model.relationship.update(relationship)
 
-    inputs = {'putput': True}
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index fa1f387..3c35435 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -35,7 +35,7 @@ class TestWorkflowContext(object):
         assert execution.service_template == storage.service_template.get_by_name(
             models.SERVICE_TEMPLATE_NAME)
         assert execution.status == storage.execution.model_cls.PENDING
-        assert execution.parameters == {}
+        assert execution.inputs == {}
         assert execution.created_at <= datetime.utcnow()
 
     def test_subsequent_workflow_context_creation_do_not_fail(self, storage):
@@ -49,11 +49,13 @@ class TestWorkflowContext(object):
         :param storage:
         :return WorkflowContext:
         """
+        service = storage.service.get_by_name(models.SERVICE_NAME)
         return context.workflow.WorkflowContext(
             name='simple_context',
             model_storage=storage,
             resource_storage=None,
-            service_id=storage.service.get_by_name(models.SERVICE_NAME).id,
+            service_id=service,
+            execution_id=storage.execution.list(filters=dict(service=service))[0].id,
             workflow_name=models.WORKFLOW_NAME,
             task_max_attempts=models.TASK_MAX_ATTEMPTS,
             task_retry_interval=models.TASK_RETRY_INTERVAL
@@ -66,6 +68,8 @@ def storage():
         sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage)
     workflow_storage.service_template.put(models.create_service_template())
     service_template = workflow_storage.service_template.get_by_name(models.SERVICE_TEMPLATE_NAME)
-    workflow_storage.service.put(models.create_service(service_template))
+    service = models.create_service(service_template)
+    workflow_storage.service.put(service)
+    workflow_storage.execution.put(models.create_execution(service))
     yield workflow_storage
     test_storage.release_sqlite_storage(workflow_storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index e3612cf..67d527c 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -460,14 +460,15 @@ if __name__ == '__main__':
              env_var='value',
              inputs=None):
         local_script_path = script_path
-        script_path = os.path.basename(local_script_path) if local_script_path else None
+        script_path = os.path.basename(local_script_path) if local_script_path else ''
+        inputs = inputs or {}
+        process = process or {}
         if script_path:
-            workflow_context.resource.deployment.upload(
+            workflow_context.resource.service.upload(
                 entry_id=str(workflow_context.service.id),
                 source=local_script_path,
                 path=script_path)
 
-        inputs = inputs or {}
         inputs.update({
             'script_path': script_path,
             'process': process,
@@ -483,7 +484,8 @@ if __name__ == '__main__':
                 'op',
                 operation_kwargs=dict(implementation='{0}.{1}'.format(
                     operations.__name__,
-                    operations.run_script_locally.__name__))
+                    operations.run_script_locally.__name__),
+                    inputs=inputs)
             )
             node.interfaces[interface.name] = interface
             graph.add_tasks(api.task.OperationTask.for_node(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index dd36466..d17def1 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -258,7 +258,7 @@ class TestWithActualSSHServer(object):
         return collected[signal][0]['kwargs']['exception']
 
     def _upload(self, source, path):
-        self._workflow_context.resource.deployment.upload(
+        self._workflow_context.resource.service.upload(
             entry_id=str(self._workflow_context.service.id),
             source=source,
             path=path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index 360e17d..41e2b6b 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -34,7 +34,8 @@ def test_execute_operation(ctx):
     interface = mock.models.create_interface(
         ctx.service,
         interface_name,
-        operation_name
+        operation_name,
+        operation_kwargs={'implementation': 'stub-implementation'}
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 6f97952..e793c49 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -61,12 +61,18 @@ class BaseTest(object):
             retry_interval=None,
             ignore_failure=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+        operation_kwargs = dict(implementation='{name}.{func.__name__}'.format(
+            name=__name__, func=func))
+        if inputs:
+            # the operation has to declare the inputs before those may be passed
+            operation_kwargs['inputs'] = inputs
+
         interface = mock.models.create_interface(
             node.service,
             'aria.interfaces.lifecycle',
             'create',
-            operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
-                                                                                 func=func))
+            operation_kwargs=operation_kwargs
         )
         node.interfaces[interface.name] = interface
         return api.task.OperationTask.for_node(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
deleted file mode 100644
index 0a95d43..0000000
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from networkx import topological_sort, DiGraph
-
-from aria.orchestrator import context
-from aria.orchestrator.workflows import api, core
-
-from tests import mock
-from tests import storage
-
-
-def test_task_graph_into_execution_graph(tmpdir):
-    interface_name = 'Standard'
-    operation_name = 'create'
-    task_context = mock.context.simple(str(tmpdir))
-    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    interface = mock.models.create_interface(
-        node.service,
-        interface_name,
-        operation_name
-    )
-    node.interfaces[interface.name] = interface
-    task_context.model.node.update(node)
-
-    def sub_workflow(name, **_):
-        return api.task_graph.TaskGraph(name)
-
-    with context.workflow.current.push(task_context):
-        test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
-        simple_before_task = api.task.OperationTask.for_node(node=node,
-                                                             interface_name=interface_name,
-                                                             operation_name=operation_name)
-        simple_after_task = api.task.OperationTask.for_node(node=node,
-                                                            interface_name=interface_name,
-                                                            operation_name=operation_name)
-
-        inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
-        inner_task = api.task.OperationTask.for_node(node=node,
-                                                     interface_name=interface_name,
-                                                     operation_name=operation_name)
-        inner_task_graph.add_tasks(inner_task)
-
-    test_task_graph.add_tasks(simple_before_task)
-    test_task_graph.add_tasks(simple_after_task)
-    test_task_graph.add_tasks(inner_task_graph)
-    test_task_graph.add_dependency(inner_task_graph, simple_before_task)
-    test_task_graph.add_dependency(simple_after_task, inner_task_graph)
-
-    # Direct check
-    execution_graph = DiGraph()
-    core.translation.build_execution_graph(task_graph=test_task_graph,
-                                           execution_graph=execution_graph)
-    execution_tasks = topological_sort(execution_graph)
-
-    assert len(execution_tasks) == 7
-
-    expected_tasks_names = [
-        '{0}-Start'.format(test_task_graph.id),
-        simple_before_task.id,
-        '{0}-Start'.format(inner_task_graph.id),
-        inner_task.id,
-        '{0}-End'.format(inner_task_graph.id),
-        simple_after_task.id,
-        '{0}-End'.format(test_task_graph.id)
-    ]
-
-    assert expected_tasks_names == execution_tasks
-
-    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
-                      core.task.StartWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
-                                  simple_before_task)
-    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
-                      core.task.StartSubWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
-                                  inner_task)
-    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
-                      core.task.EndSubWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
-                                  simple_after_task)
-    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
-                      core.task.EndWorkflowTask)
-    storage.release_sqlite_storage(task_context.model)
-
-
-def _assert_execution_is_api_task(execution_task, api_task):
-    assert execution_task.id == api_task.id
-    assert execution_task.name == api_task.name
-    assert execution_task.implementation == api_task.implementation
-    assert execution_task.actor == api_task.actor
-    assert execution_task.inputs == api_task.inputs
-
-
-def _get_task_by_name(task_name, graph):
-    return graph.node[task_name]['task']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
new file mode 100644
index 0000000..0a95d43
--- /dev/null
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -0,0 +1,111 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from networkx import topological_sort, DiGraph
+
+from aria.orchestrator import context
+from aria.orchestrator.workflows import api, core
+
+from tests import mock
+from tests import storage
+
+
+def test_task_graph_into_execution_graph(tmpdir):
+    interface_name = 'Standard'
+    operation_name = 'create'
+    task_context = mock.context.simple(str(tmpdir))
+    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
+        operation_name
+    )
+    node.interfaces[interface.name] = interface
+    task_context.model.node.update(node)
+
+    def sub_workflow(name, **_):
+        return api.task_graph.TaskGraph(name)
+
+    with context.workflow.current.push(task_context):
+        test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
+        simple_before_task = api.task.OperationTask.for_node(node=node,
+                                                             interface_name=interface_name,
+                                                             operation_name=operation_name)
+        simple_after_task = api.task.OperationTask.for_node(node=node,
+                                                            interface_name=interface_name,
+                                                            operation_name=operation_name)
+
+        inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
+        inner_task = api.task.OperationTask.for_node(node=node,
+                                                     interface_name=interface_name,
+                                                     operation_name=operation_name)
+        inner_task_graph.add_tasks(inner_task)
+
+    test_task_graph.add_tasks(simple_before_task)
+    test_task_graph.add_tasks(simple_after_task)
+    test_task_graph.add_tasks(inner_task_graph)
+    test_task_graph.add_dependency(inner_task_graph, simple_before_task)
+    test_task_graph.add_dependency(simple_after_task, inner_task_graph)
+
+    # Direct check
+    execution_graph = DiGraph()
+    core.translation.build_execution_graph(task_graph=test_task_graph,
+                                           execution_graph=execution_graph)
+    execution_tasks = topological_sort(execution_graph)
+
+    assert len(execution_tasks) == 7
+
+    expected_tasks_names = [
+        '{0}-Start'.format(test_task_graph.id),
+        simple_before_task.id,
+        '{0}-Start'.format(inner_task_graph.id),
+        inner_task.id,
+        '{0}-End'.format(inner_task_graph.id),
+        simple_after_task.id,
+        '{0}-End'.format(test_task_graph.id)
+    ]
+
+    assert expected_tasks_names == execution_tasks
+
+    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
+                      core.task.StartWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
+                                  simple_before_task)
+    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
+                      core.task.StartSubWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
+                                  inner_task)
+    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
+                      core.task.EndSubWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
+                                  simple_after_task)
+    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
+                      core.task.EndWorkflowTask)
+    storage.release_sqlite_storage(task_context.model)
+
+
+def _assert_execution_is_api_task(execution_task, api_task):
+    assert execution_task.id == api_task.id
+    assert execution_task.name == api_task.name
+    assert execution_task.implementation == api_task.implementation
+    assert execution_task.actor == api_task.actor
+    assert execution_task.inputs == api_task.inputs
+
+
+def _get_task_by_name(task_name, graph):
+    return graph.node[task_name]['task']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/tests/utils/test_threading.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_threading.py b/tests/utils/test_threading.py
new file mode 100644
index 0000000..39ce717
--- /dev/null
+++ b/tests/utils/test_threading.py
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import pytest
+
+from aria.utils import threading
+
+
+class TestPluginManager(object):
+
+    def test_exception_raised_from_thread(self):
+
+        def error_raising_func():
+            raise ValueError('This is an error')
+
+        thread = threading.ExceptionThread(target=error_raising_func)
+        thread.start()
+        thread.join()
+
+        assert thread.is_error()
+        with pytest.raises(ValueError):
+            thread.raise_error_if_exists()


[09/24] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
ARIA-48 cli


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/d8722826
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/d8722826
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/d8722826

Branch: refs/heads/ARIA-48-aria-cli
Commit: d872282688a50178d396fb3cbd8946b0ec37d80a
Parents: e7ffc73
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Mar 28 12:17:46 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/__init__.py                                |   2 +-
 aria/cli/VERSION                                |   3 +
 aria/cli/args_parser.py                         | 269 ---------
 aria/cli/cli.py                                 | 113 ----
 aria/cli/cli/__init__.py                        |  14 +
 aria/cli/cli/aria.py                            | 453 +++++++++++++++
 aria/cli/cli/helptexts.py                       |  56 ++
 aria/cli/colorful_event.py                      | 152 ++++++
 aria/cli/commands.py                            | 546 -------------------
 aria/cli/commands/__init__.py                   |  14 +
 aria/cli/commands/executions.py                 | 172 ++++++
 aria/cli/commands/logs.py                       |  68 +++
 aria/cli/commands/node_templates.py             | 104 ++++
 aria/cli/commands/nodes.py                      |  96 ++++
 aria/cli/commands/plugins.py                    | 145 +++++
 aria/cli/commands/service_templates.py          | 207 +++++++
 aria/cli/commands/services.py                   | 175 ++++++
 aria/cli/commands/workflows.py                  | 107 ++++
 aria/cli/config.py                              |  46 --
 aria/cli/config/__init__.py                     |  14 +
 aria/cli/config/config.py                       |  70 +++
 aria/cli/config/config_template.yaml            |  12 +
 aria/cli/constants.py                           |  18 +
 aria/cli/csar.py                                |  11 +-
 aria/cli/dry.py                                 |  88 ---
 aria/cli/env.py                                 | 118 ++++
 aria/cli/exceptions.py                          |  54 +-
 aria/cli/inputs.py                              | 118 ++++
 aria/cli/logger.py                              | 113 ++++
 aria/cli/main.py                                |  73 +++
 aria/cli/service_template_utils.py              | 140 +++++
 aria/cli/storage.py                             |  95 ----
 aria/cli/table.py                               |  90 +++
 aria/cli/utils.py                               | 152 ++++++
 aria/core.py                                    | 116 ++++
 aria/exceptions.py                              |  16 +
 aria/modeling/__init__.py                       |   2 +
 aria/modeling/exceptions.py                     |  18 +
 aria/modeling/orchestration.py                  |  14 +-
 aria/modeling/service_changes.py                |  10 +-
 aria/modeling/service_instance.py               |  15 +-
 aria/modeling/service_template.py               |  16 +-
 aria/modeling/utils.py                          |  87 ++-
 aria/orchestrator/context/common.py             |  27 +-
 aria/orchestrator/context/workflow.py           |  19 +-
 .../execution_plugin/ctx_proxy/server.py        |   3 +-
 aria/orchestrator/runner.py                     | 101 ----
 aria/orchestrator/workflow_runner.py            | 147 +++++
 aria/orchestrator/workflows/api/task.py         |  41 +-
 aria/orchestrator/workflows/builtin/__init__.py |   1 +
 .../workflows/builtin/execute_operation.py      |  15 +-
 aria/orchestrator/workflows/builtin/utils.py    |  86 ++-
 aria/storage/core.py                            |   5 +
 aria/utils/archive.py                           |  63 +++
 aria/utils/exceptions.py                        |  11 +
 aria/utils/file.py                              |  13 +
 aria/utils/formatting.py                        |  43 ++
 aria/utils/threading.py                         |  24 +
 aria/utils/type.py                              |  57 ++
 .../simple_v1_0/modeling/__init__.py            |   3 +-
 requirements.in                                 |   5 +
 setup.py                                        |   2 +-
 tests/mock/context.py                           |   7 +-
 tests/mock/models.py                            |  13 +-
 tests/modeling/test_models.py                   |  10 +-
 tests/orchestrator/context/test_operation.py    |  45 +-
 .../context/test_resource_render.py             |   4 +-
 tests/orchestrator/context/test_serialize.py    |   4 +-
 tests/orchestrator/context/test_toolbelt.py     |  11 +-
 tests/orchestrator/context/test_workflow.py     |  10 +-
 .../orchestrator/execution_plugin/test_local.py |  10 +-
 tests/orchestrator/execution_plugin/test_ssh.py |   2 +-
 .../workflows/builtin/test_execute_operation.py |   3 +-
 .../orchestrator/workflows/core/test_engine.py  |  10 +-
 .../test_task_graph_into_exececution_graph.py   | 111 ----
 .../test_task_graph_into_execution_graph.py     | 111 ++++
 tests/utils/test_threading.py                   |  35 ++
 77 files changed, 3633 insertions(+), 1621 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index b9251d5..df75b1e 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -84,6 +84,6 @@ def application_resource_storage(api, api_kwargs=None, initiator=None, initiator
 
     return storage.ResourceStorage(api_cls=api,
                                    api_kwargs=api_kwargs,
-                                   items=['blueprint', 'deployment', 'plugin'],
+                                   items=['service_template', 'service', 'plugin'],
                                    initiator=initiator,
                                    initiator_kwargs=initiator_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/VERSION
----------------------------------------------------------------------
diff --git a/aria/cli/VERSION b/aria/cli/VERSION
new file mode 100644
index 0000000..6a3252e
--- /dev/null
+++ b/aria/cli/VERSION
@@ -0,0 +1,3 @@
+{
+  "version": "0.1.0"
+}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/args_parser.py
----------------------------------------------------------------------
diff --git a/aria/cli/args_parser.py b/aria/cli/args_parser.py
deleted file mode 100644
index 81ee513..0000000
--- a/aria/cli/args_parser.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Argument parsing configuration and functions
-"""
-
-import argparse
-from functools import partial
-
-from ..utils.argparse import ArgumentParser
-
-NO_VERBOSE = 0
-
-
-class SmartFormatter(argparse.HelpFormatter):
-    """
-    TODO: what is this?
-    """
-    def _split_lines(self, text, width):
-        if text.startswith('R|'):
-            return text[2:].splitlines()
-        return super(SmartFormatter, self)._split_lines(text, width)
-
-
-def sub_parser_decorator(func=None, **parser_settings):
-    """
-    Decorated for sub_parser argument definitions
-    """
-    if not func:
-        return partial(sub_parser_decorator, **parser_settings)
-
-    def _wrapper(parser):
-        sub_parser = parser.add_parser(**parser_settings)
-        sub_parser.add_argument(
-            '-v', '--verbose',
-            dest='verbosity',
-            action='count',
-            default=NO_VERBOSE,
-            help='Set verbosity level (can be passed multiple times)')
-        func(sub_parser)
-        return sub_parser
-    return _wrapper
-
-
-def config_parser(parser=None):
-    """
-    Top level argparse configuration
-    """
-    parser = parser or ArgumentParser(
-        prog='ARIA',
-        description="ARIA's Command Line Interface",
-        formatter_class=SmartFormatter)
-    parser.add_argument('-v', '--version', action='version')
-    sub_parser = parser.add_subparsers(title='Commands', dest='command')
-    add_init_parser(sub_parser)
-    add_execute_parser(sub_parser)
-    add_parse_parser(sub_parser)
-    add_workflow_parser(sub_parser)
-    add_spec_parser(sub_parser)
-    add_csar_create_parser(sub_parser)
-    add_csar_open_parser(sub_parser)
-    add_csar_validate_parser(sub_parser)
-    return parser
-
-
-@sub_parser_decorator(
-    name='parse',
-    help='Parse a blueprint',
-    formatter_class=SmartFormatter)
-def add_parse_parser(parse):
-    """
-    ``parse`` command parser configuration
-    """
-    parse.add_argument(
-        'uri',
-        help='URI or file path to service template')
-    parse.add_argument(
-        'consumer',
-        nargs='?',
-        default='validate',
-        help='"validate" (default), "presentation", "template", "types", "instance", or consumer '
-             'class name (full class path or short name)')
-    parse.add_argument(
-        '--loader-source',
-        default='aria.parser.loading.DefaultLoaderSource',
-        help='loader source class for the parser')
-    parse.add_argument(
-        '--reader-source',
-        default='aria.parser.reading.DefaultReaderSource',
-        help='reader source class for the parser')
-    parse.add_argument(
-        '--presenter-source',
-        default='aria.parser.presentation.DefaultPresenterSource',
-        help='presenter source class for the parser')
-    parse.add_argument(
-        '--presenter',
-        help='force use of this presenter class in parser')
-    parse.add_argument(
-        '--prefix', nargs='*',
-        help='prefixes for imports')
-    parse.add_flag_argument(
-        'debug',
-        help_true='print debug info',
-        help_false='don\'t print debug info')
-    parse.add_flag_argument(
-        'cached-methods',
-        help_true='enable cached methods',
-        help_false='disable cached methods',
-        default=True)
-
-
-@sub_parser_decorator(
-    name='workflow',
-    help='Run a workflow on a blueprint',
-    formatter_class=SmartFormatter)
-def add_workflow_parser(workflow):
-    """
-    ``workflow`` command parser configuration
-    """
-    workflow.add_argument(
-        'uri',
-        help='URI or file path to service template')
-    workflow.add_argument(
-        '-w', '--workflow',
-        default='install',
-        help='The workflow name')
-    workflow.add_flag_argument(
-        'dry',
-        default=True,
-        help_true='dry run',
-        help_false='wet run')
-
-
-@sub_parser_decorator(
-    name='init',
-    help='Initialize environment',
-    formatter_class=SmartFormatter)
-def add_init_parser(init):
-    """
-    ``init`` command parser configuration
-    """
-    init.add_argument(
-        '-d', '--deployment-id',
-        required=True,
-        help='A unique ID for the deployment')
-    init.add_argument(
-        '-p', '--blueprint-path',
-        dest='blueprint_path',
-        required=True,
-        help='The path to the desired blueprint')
-    init.add_argument(
-        '-i', '--inputs',
-        dest='input',
-        action='append',
-        help='R|Inputs for the local workflow creation \n'
-             '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files, \n'
-             'a JSON string or as "key1=value1;key2=value2"). \n'
-             'This argument can be used multiple times')
-    init.add_argument(
-        '-b', '--blueprint-id',
-        dest='blueprint_id',
-        required=True,
-        help='The blueprint ID'
-    )
-
-
-@sub_parser_decorator(
-    name='execute',
-    help='Execute a workflow',
-    formatter_class=SmartFormatter)
-def add_execute_parser(execute):
-    """
-    ``execute`` command parser configuration
-    """
-    execute.add_argument(
-        '-d', '--deployment-id',
-        required=True,
-        help='A unique ID for the deployment')
-    execute.add_argument(
-        '-w', '--workflow',
-        dest='workflow_id',
-        help='The workflow to execute')
-    execute.add_argument(
-        '-p', '--parameters',
-        dest='parameters',
-        action='append',
-        help='R|Parameters for the workflow execution\n'
-             '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files,\n'
-             'a JSON string or as "key1=value1;key2=value2").\n'
-             'This argument can be used multiple times.')
-    execute.add_argument(
-        '--task-retries',
-        dest='task_retries',
-        type=int,
-        help='How many times should a task be retried in case of failure')
-    execute.add_argument(
-        '--task-retry-interval',
-        dest='task_retry_interval',
-        default=1,
-        type=int,
-        help='How many seconds to wait before each task is retried')
-
-
-@sub_parser_decorator(
-    name='csar-create',
-    help='Create a CSAR file from a TOSCA service template directory',
-    formatter_class=SmartFormatter)
-def add_csar_create_parser(parse):
-    parse.add_argument(
-        'source',
-        help='Service template directory')
-    parse.add_argument(
-        'entry',
-        help='Entry definition file relative to service template directory')
-    parse.add_argument(
-        '-d', '--destination',
-        help='Output CSAR zip destination',
-        required=True)
-
-
-@sub_parser_decorator(
-    name='csar-open',
-    help='Extracts a CSAR file to a TOSCA service template directory',
-    formatter_class=SmartFormatter)
-def add_csar_open_parser(parse):
-    parse.add_argument(
-        'source',
-        help='CSAR file location')
-    parse.add_argument(
-        '-d', '--destination',
-        help='Output directory to extract the CSAR into',
-        required=True)
-
-
-@sub_parser_decorator(
-    name='csar-validate',
-    help='Validates a CSAR file',
-    formatter_class=SmartFormatter)
-def add_csar_validate_parser(parse):
-    parse.add_argument(
-        'source',
-        help='CSAR file location')
-
-
-@sub_parser_decorator(
-    name='spec',
-    help='Specification tool',
-    formatter_class=SmartFormatter)
-def add_spec_parser(spec):
-    """
-    ``spec`` command parser configuration
-    """
-    spec.add_argument(
-        '--csv',
-        action='store_true',
-        help='output as CSV')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/cli.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli.py b/aria/cli/cli.py
deleted file mode 100644
index 8d014b3..0000000
--- a/aria/cli/cli.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI Entry point
-"""
-
-import os
-import logging
-import tempfile
-
-from .. import install_aria_extensions
-from ..logger import (
-    create_logger,
-    create_console_log_handler,
-    create_file_log_handler,
-    LoggerMixin,
-)
-from ..utils.exceptions import print_exception
-from .args_parser import config_parser
-from .commands import (
-    ParseCommand,
-    WorkflowCommand,
-    InitCommand,
-    ExecuteCommand,
-    CSARCreateCommand,
-    CSAROpenCommand,
-    CSARValidateCommand,
-    SpecCommand,
-)
-
-__version__ = '0.1.0'
-
-
-class AriaCli(LoggerMixin):
-    """
-    Context manager based class that enables proper top level error handling
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(AriaCli, self).__init__(*args, **kwargs)
-        self.commands = {
-            'parse': ParseCommand.with_logger(base_logger=self.logger),
-            'workflow': WorkflowCommand.with_logger(base_logger=self.logger),
-            'init': InitCommand.with_logger(base_logger=self.logger),
-            'execute': ExecuteCommand.with_logger(base_logger=self.logger),
-            'csar-create': CSARCreateCommand.with_logger(base_logger=self.logger),
-            'csar-open': CSAROpenCommand.with_logger(base_logger=self.logger),
-            'csar-validate': CSARValidateCommand.with_logger(base_logger=self.logger),
-            'spec': SpecCommand.with_logger(base_logger=self.logger),
-        }
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        """
-        Here we will handle errors
-        :param exc_type:
-        :param exc_val:
-        :param exc_tb:
-        :return:
-        """
-        # todo: error handling
-        # todo: cleanup if needed
-        # TODO: user message if needed
-        pass
-
-    def run(self):
-        """
-        Parses user arguments and run the appropriate command
-        """
-        parser = config_parser()
-        args, unknown_args = parser.parse_known_args()
-
-        command_handler = self.commands[args.command]
-        self.logger.debug('Running command: {args.command} handler: {0}'.format(
-            command_handler, args=args))
-        try:
-            command_handler(args, unknown_args)
-        except Exception as e:
-            print_exception(e)
-
-
-def main():
-    """
-    CLI entry point
-    """
-    install_aria_extensions()
-    create_logger(
-        handlers=[
-            create_console_log_handler(),
-            create_file_log_handler(file_path=os.path.join(tempfile.gettempdir(), 'aria_cli.log')),
-        ],
-        level=logging.INFO)
-    with AriaCli() as aria:
-        aria.run()
-
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/cli/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/__init__.py b/aria/cli/cli/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/cli/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/cli/aria.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/aria.py b/aria/cli/cli/aria.py
new file mode 100644
index 0000000..baa72eb
--- /dev/null
+++ b/aria/cli/cli/aria.py
@@ -0,0 +1,453 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import difflib
+import StringIO
+import traceback
+from functools import wraps
+
+import click
+
+from ..env import env, logger
+from ..cli import helptexts
+from ..inputs import inputs_to_dict
+from ..constants import SAMPLE_SERVICE_TEMPLATE_FILENAME
+from ...utils.exceptions import get_exception_as_string
+
+
+CLICK_CONTEXT_SETTINGS = dict(
+    help_option_names=['-h', '--help'],
+    token_normalize_func=lambda param: param.lower())
+
+
+class MutuallyExclusiveOption(click.Option):
+    """Makes options mutually exclusive. The option must pass a `cls` argument
+    with this class name and a `mutually_exclusive` argument with a list of
+    argument names it is mutually exclusive with.
+
+    NOTE: All mutually exclusive options must use this. It's not enough to
+    use it in just one of the options.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', []))
+        self.mutuality_error_message = \
+            kwargs.pop('mutuality_error_message',
+                       helptexts.DEFAULT_MUTUALITY_MESSAGE)
+        self.mutuality_string = ', '.join(self.mutually_exclusive)
+        if self.mutually_exclusive:
+            help = kwargs.get('help', '')
+            kwargs['help'] = (
+                '{0}. This argument is mutually exclusive with '
+                'arguments: [{1}] ({2})'.format(
+                    help,
+                    self.mutuality_string,
+                    self.mutuality_error_message))
+        super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
+
+    def handle_parse_result(self, ctx, opts, args):
+        if self.mutually_exclusive.intersection(opts) and self.name in opts:
+            raise click.UsageError(
+                'Illegal usage: `{0}` is mutually exclusive with '
+                'arguments: [{1}] ({2}).'.format(
+                    self.name,
+                    self.mutuality_string,
+                    self.mutuality_error_message))
+        return super(MutuallyExclusiveOption, self).handle_parse_result(
+            ctx, opts, args)
+
+
+def _format_version_data(version_data,
+                         prefix=None,
+                         suffix=None,
+                         infix=None):
+    all_data = version_data.copy()
+    all_data['prefix'] = prefix or ''
+    all_data['suffix'] = suffix or ''
+    all_data['infix'] = infix or ''
+    output = StringIO.StringIO()
+    output.write('{prefix}{version}'.format(**all_data))
+    output.write('{suffix}'.format(**all_data))
+    return output.getvalue()
+
+
+def show_version(ctx, param, value):
+    if not value:
+        return
+
+    cli_version_data = env.get_version_data()
+    cli_version = _format_version_data(
+        cli_version_data,
+        prefix='ARIA CLI ',
+        infix=' ' * 5,
+        suffix='')
+
+    logger.info(cli_version)
+    ctx.exit()
+
+
+def inputs_callback(ctx, param, value):
+    """Allow to pass any inputs we provide to a command as
+    processed inputs instead of having to call `inputs_to_dict`
+    inside the command.
+
+    `@aria.options.inputs` already calls this callback so that
+    every time you use the option it returns the inputs as a
+    dictionary.
+    """
+    if not value:
+        return {}
+
+    return inputs_to_dict(value)
+
+
+def set_verbosity_level(ctx, param, value):
+    if not value:
+        return
+
+    env.logging.verbosity_level = value
+
+
+def set_cli_except_hook():
+
+    def recommend(possible_solutions):
+        logger.info('Possible solutions:')
+        for solution in possible_solutions:
+            logger.info('  - {0}'.format(solution))
+
+    def new_excepthook(tpe, value, tb):
+        if env.logging.is_high_verbose_level():
+            # log error including traceback
+            logger.error(get_exception_as_string(tpe, value, tb))
+        else:
+            # write the full error to the log file
+            with open(env.logging.log_file, 'a') as log_file:
+                traceback.print_exception(
+                    etype=tpe,
+                    value=value,
+                    tb=tb,
+                    file=log_file)
+            # print only the error message
+            print value
+
+        if hasattr(value, 'possible_solutions'):
+            recommend(getattr(value, 'possible_solutions'))
+
+    sys.excepthook = new_excepthook
+
+
+def pass_logger(func):
+    """Simply passes the logger to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(logger=logger, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_plugin_manager(func):
+    """Simply passes the plugin manager to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(plugin_manager=env.plugin_manager, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_model_storage(func):
+    """Simply passes the model storage to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(model_storage=env.model_storage, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_resource_storage(func):
+    """Simply passes the resource storage to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(resource_storage=env.resource_storage, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_context(func):
+    """Make click context ARIA specific
+
+    This exists purely for aesthetic reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.pass_context(func)
+
+
+class AliasedGroup(click.Group):
+    def __init__(self, *args, **kwargs):
+        self.max_suggestions = kwargs.pop("max_suggestions", 3)
+        self.cutoff = kwargs.pop("cutoff", 0.5)
+        super(AliasedGroup, self).__init__(*args, **kwargs)
+
+    def get_command(self, ctx, cmd_name):
+        rv = click.Group.get_command(self, ctx, cmd_name)
+        if rv is not None:
+            return rv
+        matches = \
+            [x for x in self.list_commands(ctx) if x.startswith(cmd_name)]
+        if not matches:
+            return None
+        elif len(matches) == 1:
+            return click.Group.get_command(self, ctx, matches[0])
+        ctx.fail('Too many matches: {0}'.format(', '.join(sorted(matches))))
+
+    def resolve_command(self, ctx, args):
+        """Override clicks ``resolve_command`` method
+        and appends *Did you mean ...* suggestions
+        to the raised exception message.
+        """
+        try:
+            return super(AliasedGroup, self).resolve_command(ctx, args)
+        except click.exceptions.UsageError as error:
+            error_msg = str(error)
+            original_cmd_name = click.utils.make_str(args[0])
+            matches = difflib.get_close_matches(
+                original_cmd_name,
+                self.list_commands(ctx),
+                self.max_suggestions,
+                self.cutoff)
+            if matches:
+                error_msg += '\n\nDid you mean one of these?\n    {0}'.format(
+                    '\n    '.join(matches))
+            raise click.exceptions.UsageError(error_msg, error.ctx)
+
+
+def group(name):
+    """Allow to create a group with a default click context
+    and a cls for click's `didyoueamn` without having to repeat
+    it for every group.
+    """
+    return click.group(
+        name=name,
+        context_settings=CLICK_CONTEXT_SETTINGS,
+        cls=AliasedGroup)
+
+
+def command(*args, **kwargs):
+    """Make Click commands ARIA specific
+
+    This exists purely for aesthetical reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.command(*args, **kwargs)
+
+
+def argument(*args, **kwargs):
+    """Make Click arguments ARIA specific
+
+    This exists purely for aesthetic reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.argument(*args, **kwargs)
+
+
+class Options(object):
+    def __init__(self):
+        """The options api is nicer when you use each option by calling
+        `@aria.options.some_option` instead of `@aria.some_option`.
+
+        Note that some options are attributes and some are static methods.
+        The reason for that is that we want to be explicit regarding how
+        a developer sees an option. It it can receive arguments, it's a
+        method - if not, it's an attribute.
+        """
+        self.version = click.option(
+            '--version',
+            is_flag=True,
+            callback=show_version,
+            expose_value=False,
+            is_eager=True,
+            help=helptexts.VERSION)
+
+        self.inputs = click.option(
+            '-i',
+            '--inputs',
+            multiple=True,
+            callback=inputs_callback,
+            help=helptexts.INPUTS)
+
+        self.json_output = click.option(
+            '--json-output',
+            is_flag=True,
+            help=helptexts.JSON_OUTPUT)
+
+        self.init_hard_reset = click.option(
+            '--hard',
+            is_flag=True,
+            help=helptexts.HARD_RESET)
+
+        self.reset_context = click.option(
+            '-r',
+            '--reset-context',
+            is_flag=True,
+            help=helptexts.RESET_CONTEXT)
+
+        self.enable_colors = click.option(
+            '--enable-colors',
+            is_flag=True,
+            default=False,
+            help=helptexts.ENABLE_COLORS)
+
+        self.node_name = click.option(
+            '-n',
+            '--node-name',
+            required=False,
+            help=helptexts.NODE_NAME)
+
+        self.descending = click.option(
+            '--descending',
+            required=False,
+            is_flag=True,
+            default=False,
+            help=helptexts.DESCENDING)
+
+    @staticmethod
+    def verbose(expose_value=False):
+        return click.option(
+            '-v',
+            '--verbose',
+            count=True,
+            callback=set_verbosity_level,
+            expose_value=expose_value,
+            is_eager=True,
+            help=helptexts.VERBOSE)
+
+    @staticmethod
+    def force(help):
+        return click.option(
+            '-f',
+            '--force',
+            is_flag=True,
+            help=help)
+
+    @staticmethod
+    def service_template_filename():
+        return click.option(
+            '-n',
+            '--service-template-filename',
+            default=SAMPLE_SERVICE_TEMPLATE_FILENAME,
+            help=helptexts.SERVICE_TEMPLATE_FILENAME)
+
+    @staticmethod
+    def workflow_id(default):
+        return click.option(
+            '-w',
+            '--workflow-id',
+            default=default,
+            help=helptexts.WORKFLOW_TO_EXECUTE.format(default))
+
+    @staticmethod
+    def task_thread_pool_size(default=1):
+        return click.option(
+            '--task-thread-pool-size',
+            type=int,
+            default=default,
+            help=helptexts.TASK_THREAD_POOL_SIZE.format(default))
+
+    @staticmethod
+    def task_max_attempts(default=1):
+        return click.option(
+            '--task-max-attempts',
+            type=int,
+            default=default,
+            help=helptexts.TASK_MAX_ATTEMPTS.format(default))
+
+    @staticmethod
+    def sort_by(default='created_at'):
+        return click.option(
+            '--sort-by',
+            required=False,
+            default=default,
+            help=helptexts.SORT_BY)
+
+    @staticmethod
+    def task_retry_interval(default=1):
+        return click.option(
+            '--task-retry-interval',
+            type=int,
+            default=default,
+            help=helptexts.TASK_RETRY_INTERVAL.format(default))
+
+    @staticmethod
+    def service_id(required=False):
+        return click.option(
+            '-s',
+            '--service-id',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+    @staticmethod
+    def execution_id(required=False):
+        return click.option(
+            '-e',
+            '--execution-id',
+            required=required,
+            help=helptexts.EXECUTION_ID)
+
+    @staticmethod
+    def service_template_id(required=False):
+        return click.option(
+            '-t',
+            '--service-template-id',
+            required=required,
+            help=helptexts.SERVICE_TEMPLATE_ID)
+
+    @staticmethod
+    def service_template_path(required=False):
+        return click.option(
+            '-p',
+            '--service-template-path',
+            required=required,
+            type=click.Path(exists=True))
+
+    @staticmethod
+    def service_name(required=False):
+        return click.option(
+            '-s',
+            '--service-name',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+    @staticmethod
+    def service_template_name(required=False):
+        return click.option(
+            '-t',
+            '--service-template-name',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+
+options = Options()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/cli/helptexts.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/helptexts.py b/aria/cli/cli/helptexts.py
new file mode 100644
index 0000000..02519cb
--- /dev/null
+++ b/aria/cli/cli/helptexts.py
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+VERBOSE = \
+    "Show verbose output. You can supply this up to three times (i.e. -vvv)"
+VERSION = "Display the version and exit"
+
+INPUTS_PARAMS_USAGE = (
+    '(Can be provided as wildcard based paths '
+    '(*.yaml, /my_inputs/, etc..) to YAML files, a JSON string or as '
+    'key1=value1;key2=value2). This argument can be used multiple times'
+)
+WORKFLOW_TO_EXECUTE = "The workflow to execute [default: {0}]"
+
+SERVICE_TEMPLATE_PATH = "The path to the application's service template file"
+SERVICE_TEMPLATE_ID = "The unique identifier for the service template"
+
+RESET_CONTEXT = "Reset the working environment"
+HARD_RESET = "Hard reset the configuration, including coloring and loggers"
+ENABLE_COLORS = "Enable colors in logger (use --hard when working with" \
+                " an initialized environment) [default: False]"
+
+SERVICE_TEMPLATE_FILENAME = (
+    "The name of the archive's main service template file. "
+    "This is only relevant if uploading an archive")
+INPUTS = "Inputs for the service {0}".format(INPUTS_PARAMS_USAGE)
+PARAMETERS = "Parameters for the workflow {0}".format(INPUTS_PARAMS_USAGE)
+TASK_RETRY_INTERVAL = \
+    "How long of a minimal interval should occur between task retry attempts [default: {0}]"
+TASK_MAX_ATTEMPTS = \
+    "How many times should a task be attempted in case of failures [default: {0}]"
+
+JSON_OUTPUT = "Output events in a consumable JSON format"
+
+SERVICE_ID = "The unique identifier for the service"
+EXECUTION_ID = "The unique identifier for the execution"
+IGNORE_RUNNING_NODES = "Delete the service even if it has running nodes"
+
+NODE_NAME = "The node's name"
+
+DEFAULT_MUTUALITY_MESSAGE = 'Cannot be used simultaneously'
+
+SORT_BY = "Key for sorting the list"
+DESCENDING = "Sort list in descending order [default: False]"

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/colorful_event.py
----------------------------------------------------------------------
diff --git a/aria/cli/colorful_event.py b/aria/cli/colorful_event.py
new file mode 100644
index 0000000..53e3d02
--- /dev/null
+++ b/aria/cli/colorful_event.py
@@ -0,0 +1,152 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from contextlib import contextmanager
+
+from colorama import Fore, Style
+
+from cloudify.event import Event
+
+
+def colorful_property(prop):
+    """
+    A decorator for coloring values of the parent event type properties
+    :param prop: the property to color (should be a method returning a color)
+    :return: a property which colors the value of the parent event type's
+             property with the same name
+    """
+    def _decorator(self):
+        # getting value of property from parent event type
+        val = getattr(super(ColorfulEvent, self), prop.__name__)
+        # getting the desired color
+        color = prop(self)
+        # coloring the value
+        return self._color_message(val, color)
+    return property(_decorator)
+
+
+class ColorfulEvent(Event):
+
+    RESET_COLOR = Fore.RESET + Style.RESET_ALL
+
+    TIMESTAMP_COLOR = Fore.RESET
+    LOG_TYPE_COLOR = Fore.YELLOW
+    EVENT_TYPE_COLOR = Fore.MAGENTA
+    DEPLOYMENT_ID_COLOR = Fore.CYAN
+    OPERATION_INFO_COLOR = Fore.RESET
+    NODE_ID_COLOR = Fore.BLUE
+    SOURCE_ID_COLOR = Fore.BLUE
+    TARGET_ID_COLOR = Fore.BLUE
+    OPERATION_COLOR = Fore.YELLOW
+
+    # colors entire message part according to event type
+    _message_color_by_event_type = {
+        'workflow_started': Style.BRIGHT + Fore.GREEN,
+        'workflow_succeeded': Style.BRIGHT + Fore.GREEN,
+        'workflow_failed': Style.BRIGHT + Fore.RED,
+        'workflow_cancelled': Style.BRIGHT + Fore.YELLOW,
+
+        'sending_task': Fore.RESET,
+        'task_started': Fore.RESET,
+        'task_succeeded': Fore.GREEN,
+        'task_rescheduled': Fore.YELLOW,
+        'task_failed': Fore.RED
+    }
+
+    # colors only the log level part
+    _log_level_to_color = {
+        'INFO': Fore.CYAN,
+        'WARN': Fore.YELLOW,
+        'WARNING': Fore.YELLOW,
+        'ERROR': Fore.RED,
+        'FATAL': Fore.RED
+    }
+
+    _color_context = RESET_COLOR
+
+    @property
+    def operation_info(self):
+        color = self.OPERATION_INFO_COLOR
+
+        with self._nest_colors(color):
+            op_info = super(ColorfulEvent, self).operation_info
+
+        return self._color_message(op_info, color)
+
+    @property
+    def text(self):
+        event_type = super(ColorfulEvent, self).event_type  # might be None
+        color = self._message_color_by_event_type.get(event_type)
+
+        with self._nest_colors(color):
+            msg = super(ColorfulEvent, self).text
+
+        return self._color_message(msg, color)
+
+    @property
+    def log_level(self):
+        lvl = super(ColorfulEvent, self).log_level
+        color = self._log_level_to_color.get(lvl)
+        return self._color_message(lvl, color)
+
+    @colorful_property
+    def timestamp(self):
+        return self.TIMESTAMP_COLOR
+
+    @colorful_property
+    def printable_timestamp(self):
+        return self.TIMESTAMP_COLOR
+
+    @colorful_property
+    def event_type_indicator(self):
+        return self.LOG_TYPE_COLOR if self.is_log_message else \
+            self.EVENT_TYPE_COLOR
+
+    @colorful_property
+    def operation(self):
+        return self.OPERATION_COLOR
+
+    @colorful_property
+    def node_id(self):
+        return self.NODE_ID_COLOR
+
+    @colorful_property
+    def source_id(self):
+        return self.SOURCE_ID_COLOR
+
+    @colorful_property
+    def target_id(self):
+        return self.TARGET_ID_COLOR
+
+    @colorful_property
+    def deployment_id(self):
+        return self.DEPLOYMENT_ID_COLOR
+
+    @contextmanager
+    def _nest_colors(self, nesting_color):
+        prev_color_context = self._color_context
+        if nesting_color:
+            self._color_context = nesting_color
+        yield
+        self._color_context = prev_color_context
+
+    def _color_message(self, val, color):
+        if not val or not color:
+            return val
+
+        return "{0}{1}{2}".format(
+            color,
+            val,
+            self._color_context)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands.py b/aria/cli/commands.py
deleted file mode 100644
index ee329e7..0000000
--- a/aria/cli/commands.py
+++ /dev/null
@@ -1,546 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI various commands implementation
-"""
-
-import json
-import os
-import sys
-import csv
-import shutil
-import tempfile
-from glob import glob
-from importlib import import_module
-
-from ruamel import yaml # @UnresolvedImport
-
-from .. import extension
-from ..logger import LoggerMixin
-from ..parser import iter_specifications
-from ..parser.consumption import (
-    ConsumptionContext,
-    ConsumerChain,
-    Read,
-    Validate,
-    ServiceTemplate,
-    Types,
-    Inputs,
-    ServiceInstance
-)
-from ..parser.loading import LiteralLocation, UriLocation
-from ..utils.application import StorageManager
-from ..utils.caching import cachedmethod
-from ..utils.console import (puts, Colored, indent)
-from ..utils.imports import (import_fullname, import_modules)
-from ..utils.collections import OrderedDict
-from ..orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
-from ..orchestrator.runner import Runner
-from ..orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
-from .dry import convert_to_dry
-
-from .exceptions import (
-    AriaCliFormatInputsError,
-    AriaCliYAMLInputsError,
-    AriaCliInvalidInputsError
-)
-from . import csar
-
-
-class BaseCommand(LoggerMixin):
-    """
-    Base class for CLI commands.
-    """
-
-    def __repr__(self):
-        return 'AriaCli({cls.__name__})'.format(cls=self.__class__)
-
-    def __call__(self, args_namespace, unknown_args):
-        """
-        __call__ method is called when running command
-        :param args_namespace:
-        """
-        pass
-
-    def parse_inputs(self, inputs):
-        """
-        Returns a dictionary of inputs `resources` can be:
-        - A list of files.
-        - A single file
-        - A directory containing multiple input files
-        - A key1=value1;key2=value2 pairs string.
-        - Wildcard based string (e.g. *-inputs.yaml)
-        """
-
-        parsed_dict = {}
-
-        def _format_to_dict(input_string):
-            self.logger.info('Processing inputs source: {0}'.format(input_string))
-            try:
-                input_string = input_string.strip()
-                try:
-                    parsed_dict.update(json.loads(input_string))
-                except BaseException:
-                    parsed_dict.update((i.split('=')
-                                        for i in input_string.split(';')
-                                        if i))
-            except Exception as exc:
-                raise AriaCliFormatInputsError(str(exc), inputs=input_string)
-
-        def _handle_inputs_source(input_path):
-            self.logger.info('Processing inputs source: {0}'.format(input_path))
-            try:
-                with open(input_path) as input_file:
-                    content = yaml.safe_load(input_file)
-            except yaml.YAMLError as exc:
-                raise AriaCliYAMLInputsError(
-                    '"{0}" is not a valid YAML. {1}'.format(input_path, str(exc)))
-            if isinstance(content, dict):
-                parsed_dict.update(content)
-                return
-            if content is None:
-                return
-            raise AriaCliInvalidInputsError('Invalid inputs', inputs=input_path)
-
-        for input_string in inputs if isinstance(inputs, list) else [inputs]:
-            if os.path.isdir(input_string):
-                for input_file in os.listdir(input_string):
-                    _handle_inputs_source(os.path.join(input_string, input_file))
-                continue
-            input_files = glob(input_string)
-            if input_files:
-                for input_file in input_files:
-                    _handle_inputs_source(input_file)
-                continue
-            _format_to_dict(input_string)
-        return parsed_dict
-
-
-class ParseCommand(BaseCommand):
-    """
-    :code:`parse` command.
-    
-    Given a blueprint, emits information in human-readable, JSON, or YAML format from various phases
-    of the ARIA parser.
-    """
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(ParseCommand, self).__call__(args_namespace, unknown_args)
-
-        if args_namespace.prefix:
-            for prefix in args_namespace.prefix:
-                extension.parser.uri_loader_prefix().append(prefix)
-
-        cachedmethod.ENABLED = args_namespace.cached_methods
-
-        context = ParseCommand.create_context_from_namespace(args_namespace)
-        context.args = unknown_args
-
-        consumer = ConsumerChain(context, (Read, Validate))
-
-        consumer_class_name = args_namespace.consumer
-        dumper = None
-        if consumer_class_name == 'validate':
-            dumper = None
-        elif consumer_class_name == 'presentation':
-            dumper = consumer.consumers[0]
-        elif consumer_class_name == 'template':
-            consumer.append(ServiceTemplate)
-        elif consumer_class_name == 'types':
-            consumer.append(ServiceTemplate, Types)
-        elif consumer_class_name == 'instance':
-            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
-        else:
-            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
-            consumer.append(import_fullname(consumer_class_name))
-
-        if dumper is None:
-            # Default to last consumer
-            dumper = consumer.consumers[-1]
-
-        consumer.consume()
-
-        if not context.validation.dump_issues():
-            dumper.dump()
-            exit(1)
-
-    @staticmethod
-    def create_context_from_namespace(namespace, **kwargs):
-        args = vars(namespace).copy()
-        args.update(kwargs)
-        return ParseCommand.create_context(**args)
-
-    @staticmethod
-    def create_context(uri,
-                       loader_source,
-                       reader_source,
-                       presenter_source,
-                       presenter,
-                       debug,
-                       **kwargs):
-        context = ConsumptionContext()
-        context.loading.loader_source = import_fullname(loader_source)()
-        context.reading.reader_source = import_fullname(reader_source)()
-        context.presentation.location = UriLocation(uri) if isinstance(uri, basestring) else uri
-        context.presentation.presenter_source = import_fullname(presenter_source)()
-        context.presentation.presenter_class = import_fullname(presenter)
-        context.presentation.print_exceptions = debug
-        return context
-
-
-class WorkflowCommand(BaseCommand):
-    """
-    :code:`workflow` command.
-    """
-
-    WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(WorkflowCommand, self).__call__(args_namespace, unknown_args)
-
-        context = self._parse(args_namespace.uri)
-        workflow_fn, inputs = self._get_workflow(context, args_namespace.workflow)
-        self._dry = args_namespace.dry
-        self._run(context, args_namespace.workflow, workflow_fn, inputs)
-    
-    def _parse(self, uri):
-        # Parse
-        context = ConsumptionContext()
-        context.presentation.location = UriLocation(uri)
-        consumer = ConsumerChain(context, (Read, Validate, ServiceTemplate, Inputs,
-                                           ServiceInstance))
-        consumer.consume()
-
-        if context.validation.dump_issues():
-            exit(1)
-        
-        return context
-    
-    def _get_workflow(self, context, workflow_name):
-        if workflow_name in BUILTIN_WORKFLOWS:
-            workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.{0}'.format(
-                workflow_name))
-            inputs = {}
-        else:
-            workflow = context.modeling.instance.policies.get(workflow_name)
-            if workflow is None:
-                raise AttributeError('workflow policy does not exist: "{0}"'.format(workflow_name))
-            if workflow.type.role != 'workflow':
-                raise AttributeError('policy is not a workflow: "{0}"'.format(workflow_name))
-
-            sys.path.append(os.path.dirname(str(context.presentation.location)))
-    
-            workflow_fn = import_fullname(workflow.properties['implementation'].value)
-    
-            for k in workflow.properties:
-                if k in WORKFLOW_DECORATOR_RESERVED_ARGUMENTS:
-                    raise AttributeError('workflow policy "{0}" defines a reserved property: "{1}"'
-                                         .format(workflow_name, k))
-    
-            inputs = OrderedDict([
-                (k, v.value) for k, v in workflow.properties.iteritems()
-                if k not in WorkflowCommand.WORKFLOW_POLICY_INTERNAL_PROPERTIES
-            ])
-        
-        return workflow_fn, inputs
-    
-    def _run(self, context, workflow_name, workflow_fn, inputs):
-        # Storage
-        def _initialize_storage(model_storage):
-            if self._dry:
-                convert_to_dry(context.modeling.instance)
-            context.modeling.store(model_storage)
-
-        # Create runner
-        runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
-                        lambda: context.modeling.instance.id)
-        
-        # Run
-        runner.run()
-   
-
-class InitCommand(BaseCommand):
-    """
-    :code:`init` command.
-    
-    Broken. Currently maintained for reference.
-    """
-
-    _IN_VIRTUAL_ENV = hasattr(sys, 'real_prefix')
-
-    def __call__(self, args_namespace, unknown_args):
-        super(InitCommand, self).__call__(args_namespace, unknown_args)
-        self._workspace_setup()
-        inputs = self.parse_inputs(args_namespace.input) if args_namespace.input else None
-        plan, deployment_plan = self._parse_blueprint(args_namespace.blueprint_path, inputs)
-        self._create_storage(
-            blueprint_plan=plan,
-            blueprint_path=args_namespace.blueprint_path,
-            deployment_plan=deployment_plan,
-            blueprint_id=args_namespace.blueprint_id,
-            deployment_id=args_namespace.deployment_id,
-            main_file_name=os.path.basename(args_namespace.blueprint_path))
-        self.logger.info('Initiated {0}'.format(args_namespace.blueprint_path))
-        self.logger.info(
-            'If you make changes to the blueprint, '
-            'run `aria local init -p {0}` command again to apply them'.format(
-                args_namespace.blueprint_path))
-
-    def _workspace_setup(self):
-        try:
-            create_user_space()
-            self.logger.debug(
-                'created user space path in: {0}'.format(user_space()))
-        except IOError:
-            self.logger.debug(
-                'user space path already exist - {0}'.format(user_space()))
-        try:
-            create_local_storage()
-            self.logger.debug(
-                'created local storage path in: {0}'.format(local_storage()))
-        except IOError:
-            self.logger.debug(
-                'local storage path already exist - {0}'.format(local_storage()))
-        return local_storage()
-
-    def _parse_blueprint(self, blueprint_path, inputs=None):
-        # TODO
-        pass
-
-    @staticmethod
-    def _create_storage(
-            blueprint_path,
-            blueprint_plan,
-            deployment_plan,
-            blueprint_id,
-            deployment_id,
-            main_file_name=None):
-        resource_storage = application_resource_storage(
-            FileSystemResourceDriver(local_resource_storage()))
-        model_storage = application_model_storage(
-            FileSystemModelDriver(local_model_storage()))
-        resource_storage.setup()
-        model_storage.setup()
-        storage_manager = StorageManager(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            blueprint_path=blueprint_path,
-            blueprint_id=blueprint_id,
-            blueprint_plan=blueprint_plan,
-            deployment_id=deployment_id,
-            deployment_plan=deployment_plan
-        )
-        storage_manager.create_blueprint_storage(
-            blueprint_path,
-            main_file_name=main_file_name
-        )
-        storage_manager.create_nodes_storage()
-        storage_manager.create_deployment_storage()
-        storage_manager.create_node_instances_storage()
-
-
-class ExecuteCommand(BaseCommand):
-    """
-    :code:`execute` command.
-
-    Broken. Currently maintained for reference.
-    """
-
-    def __call__(self, args_namespace, unknown_args):
-        super(ExecuteCommand, self).__call__(args_namespace, unknown_args)
-        parameters = (self.parse_inputs(args_namespace.parameters)
-                      if args_namespace.parameters else {})
-        resource_storage = application_resource_storage(
-            FileSystemResourceDriver(local_resource_storage()))
-        model_storage = application_model_storage(
-            FileSystemModelDriver(local_model_storage()))
-        deployment = model_storage.service_instance.get(args_namespace.deployment_id)
-
-        try:
-            workflow = deployment.workflows[args_namespace.workflow_id]
-        except KeyError:
-            raise ValueError(
-                '{0} workflow does not exist. existing workflows are: {1}'.format(
-                    args_namespace.workflow_id,
-                    deployment.workflows.keys()))
-
-        workflow_parameters = self._merge_and_validate_execution_parameters(
-            workflow,
-            args_namespace.workflow_id,
-            parameters
-        )
-        workflow_context = WorkflowContext(
-            name=args_namespace.workflow_id,
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            deployment_id=args_namespace.deployment_id,
-            workflow_id=args_namespace.workflow_id,
-            parameters=workflow_parameters,
-        )
-        workflow_function = self._load_workflow_handler(workflow['operation'])
-        tasks_graph = workflow_function(workflow_context, **workflow_context.parameters)
-        executor = ProcessExecutor()
-        workflow_engine = Engine(executor=executor,
-                                 workflow_context=workflow_context,
-                                 tasks_graph=tasks_graph)
-        workflow_engine.execute()
-        executor.close()
-
-    @staticmethod
-    def _merge_and_validate_execution_parameters(
-            workflow,
-            workflow_name,
-            execution_parameters):
-        merged_parameters = {}
-        workflow_parameters = workflow.get('parameters', {})
-        missing_mandatory_parameters = set()
-
-        for name, param in workflow_parameters.iteritems():
-            if 'default' not in param:
-                if name not in execution_parameters:
-                    missing_mandatory_parameters.add(name)
-                    continue
-                merged_parameters[name] = execution_parameters[name]
-                continue
-            merged_parameters[name] = (execution_parameters[name] if name in execution_parameters
-                                       else param['default'])
-
-        if missing_mandatory_parameters:
-            raise ValueError(
-                'Workflow "{0}" must be provided with the following '
-                'parameters to execute: {1}'.format(
-                    workflow_name, ','.join(missing_mandatory_parameters)))
-
-        custom_parameters = dict(
-            (k, v) for (k, v) in execution_parameters.iteritems()
-            if k not in workflow_parameters)
-
-        if custom_parameters:
-            raise ValueError(
-                'Workflow "{0}" does not have the following parameters declared: {1}. '
-                'Remove these parameters'.format(
-                    workflow_name, ','.join(custom_parameters.keys())))
-
-        return merged_parameters
-
-    @staticmethod
-    def _load_workflow_handler(handler_path):
-        module_name, spec_handler_name = handler_path.rsplit('.', 1)
-        try:
-            module = import_module(module_name)
-            return getattr(module, spec_handler_name)
-        except ImportError:
-            # TODO: exception handler
-            raise
-        except AttributeError:
-            # TODO: exception handler
-            raise
-
-
-class BaseCSARCommand(BaseCommand):
-    @staticmethod
-    def _parse_and_dump(reader):
-        context = ConsumptionContext()
-        context.loading.prefixes += [os.path.join(reader.destination, 'definitions')]
-        context.presentation.location = LiteralLocation(reader.entry_definitions_yaml)
-        chain = ConsumerChain(context, (Read, Validate, Model, Instance))
-        chain.consume()
-        if context.validation.dump_issues():
-            raise RuntimeError('Validation failed')
-        dumper = chain.consumers[-1]
-        dumper.dump()
-
-    def _read(self, source, destination):
-        reader = csar.read(
-            source=source,
-            destination=destination,
-            logger=self.logger)
-        self.logger.info(
-            'Path: {r.destination}\n'
-            'TOSCA meta file version: {r.meta_file_version}\n'
-            'CSAR Version: {r.csar_version}\n'
-            'Created By: {r.created_by}\n'
-            'Entry definitions: {r.entry_definitions}'
-            .format(r=reader))
-        self._parse_and_dump(reader)
-
-    def _validate(self, source):
-        workdir = tempfile.mkdtemp()
-        try:
-            self._read(
-                source=source,
-                destination=workdir)
-        finally:
-            shutil.rmtree(workdir, ignore_errors=True)
-
-
-class CSARCreateCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSARCreateCommand, self).__call__(args_namespace, unknown_args)
-        csar.write(
-            source=args_namespace.source,
-            entry=args_namespace.entry,
-            destination=args_namespace.destination,
-            logger=self.logger)
-        self._validate(args_namespace.destination)
-
-
-class CSAROpenCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSAROpenCommand, self).__call__(args_namespace, unknown_args)
-        self._read(
-            source=args_namespace.source,
-            destination=args_namespace.destination)
-
-
-class CSARValidateCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSARValidateCommand, self).__call__(args_namespace, unknown_args)
-        self._validate(args_namespace.source)
-
-
-class SpecCommand(BaseCommand):
-    """
-    :code:`spec` command.
-    
-    Emits all uses of :code:`@dsl_specification` in the codebase, in human-readable or CSV format.
-    """
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(SpecCommand, self).__call__(args_namespace, unknown_args)
-
-        # Make sure that all @dsl_specification decorators are processed
-        for pkg in extension.parser.specification_package():
-            import_modules(pkg)
-
-        # TODO: scan YAML documents as well
-
-        if args_namespace.csv:
-            writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
-            writer.writerow(('Specification', 'Section', 'Code', 'URL'))
-            for spec, sections in iter_specifications():
-                for section, details in sections:
-                    writer.writerow((spec, section, details['code'], details['url']))
-
-        else:
-            for spec, sections in iter_specifications():
-                puts(Colored.cyan(spec))
-                with indent(2):
-                    for section, details in sections:
-                        puts(Colored.blue(section))
-                        with indent(2):
-                            for k, v in details.iteritems():
-                                puts('%s: %s' % (Colored.magenta(k), v))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/__init__.py b/aria/cli/commands/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/commands/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
new file mode 100644
index 0000000..d767fa1
--- /dev/null
+++ b/aria/cli/commands/executions.py
@@ -0,0 +1,172 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import time
+
+from .. import utils
+from ..table import print_data
+from ..cli import aria, helptexts
+from ..exceptions import AriaCliError
+from ...modeling.models import Execution
+from ...storage.exceptions import StorageError
+from ...orchestrator.workflow_runner import WorkflowRunner
+from ...utils import formatting
+from ...utils import threading
+
+EXECUTION_COLUMNS = ['id', 'workflow_name', 'status', 'service_name',
+                     'created_at', 'error']
+
+
+@aria.group(name='executions')
+@aria.options.verbose()
+def executions():
+    """Handle workflow executions
+    """
+    pass
+
+
+@executions.command(name='show',
+                    short_help='Show execution information')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(execution_id, model_storage, logger):
+    """Show information for a specific execution
+
+    `EXECUTION_ID` is the execution to get information on.
+    """
+    try:
+        logger.info('Showing execution {0}'.format(execution_id))
+        execution = model_storage.execution.get(execution_id)
+    except StorageError:
+        raise AriaCliError('Execution {0} not found'.format(execution_id))
+
+    print_data(EXECUTION_COLUMNS, execution.to_dict(), 'Execution:', max_width=50)
+
+    # print execution parameters
+    logger.info('Execution Inputs:')
+    if execution.inputs:
+        #TODO check this section, havent tested it
+        execution_inputs = [ei.to_dict() for ei in execution.inputs]
+        for input_name, input_value in formatting.decode_dict(
+                execution_inputs).iteritems():
+            logger.info('\t{0}: \t{1}'.format(input_name, input_value))
+    else:
+        logger.info('\tNo inputs')
+    logger.info('')
+
+
+@executions.command(name='list',
+                    short_help='List service executions')
+@aria.options.service_name(required=False)
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List executions
+
+    If `SERVICE_NAME` is provided, list executions for that service.
+    Otherwise, list executions for all services.
+    """
+    if service_name:
+        logger.info('Listing executions for service {0}...'.format(
+            service_name))
+        try:
+            service = model_storage.service.get_by_name(service_name)
+            filters = dict(service=service)
+        except StorageError:
+            raise AriaCliError('Service {0} does not exist'.format(
+                service_name))
+    else:
+        logger.info('Listing all executions...')
+        filters = {}
+
+    executions = [e.to_dict() for e in model_storage.execution.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(EXECUTION_COLUMNS, executions, 'Executions:')
+
+
+@executions.command(name='start',
+                    short_help='Execute a workflow')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.inputs
+@aria.options.task_max_attempts()
+@aria.options.task_retry_interval()
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def start(workflow_name,
+          service_name,
+          inputs,
+          task_max_attempts,
+          task_retry_interval,
+          model_storage,
+          resource_storage,
+          plugin_manager,
+          logger):
+    """Execute a workflow
+
+    `WORKFLOW_ID` is the id of the workflow to execute (e.g. `uninstall`)
+    """
+    workflow_runner = \
+        WorkflowRunner(workflow_name, service_name, inputs,
+                       model_storage, resource_storage, plugin_manager,
+                       task_max_attempts, task_retry_interval)
+
+    execution_thread_name = '{0}_{1}'.format(service_name, workflow_name)
+    execution_thread = threading.ExceptionThread(target=workflow_runner.execute,
+                                                 name=execution_thread_name)
+    execution_thread.daemon = True  # allows force-cancel to exit immediately
+
+    logger.info('Starting execution. Press Ctrl+C cancel')
+    execution_thread.start()
+    try:
+        while execution_thread.is_alive():
+            # using join without a timeout blocks and ignores KeyboardInterrupt
+            execution_thread.join(1)
+    except KeyboardInterrupt:
+        _cancel_execution(workflow_runner, execution_thread, logger)
+
+    # raise any errors from the execution thread (note these are not workflow execution errors)
+    execution_thread.raise_error_if_exists()
+
+    execution = workflow_runner.execution
+    logger.info('Execution has ended with "{0}" status'.format(execution.status))
+    if execution.status == Execution.FAILED:
+        logger.info('Execution error:\n{0}'.format(execution.error))
+
+
+def _cancel_execution(workflow_runner, execution_thread, logger):
+    logger.info('Cancelling execution. Press Ctrl+C again to force-cancel')
+    try:
+        workflow_runner.cancel()
+        while execution_thread.is_alive():
+            execution_thread.join(1)
+    except KeyboardInterrupt:
+        logger.info('Force-cancelling execution')
+        # TODO handle execution (update status etc.) and exit process

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/logs.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py
new file mode 100644
index 0000000..3662063
--- /dev/null
+++ b/aria/cli/commands/logs.py
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+
+
+@aria.group(name='logs')
+@aria.options.verbose()
+def logs():
+    """Show logs from workflow executions
+    """
+    pass
+
+
+@logs.command(name='list',
+              short_help='List execution logs')
+@aria.argument('execution-id')
+@aria.options.json_output
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(execution_id,
+         json_output,
+         model_storage,
+         logger):
+    """Display logs for an execution
+    """
+    logger.info('Listing logs for execution id {0}'.format(execution_id))
+    # events_logger = get_events_logger(json_output)
+    logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
+                                  sort=utils.storage_sort_param('created_at', False))
+    # TODO: print logs nicely
+    if logs:
+        for log in logs:
+            print log
+    else:
+        logger.info('\tNo logs')
+
+
+@logs.command(name='delete',
+              short_help='Delete execution logs')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(execution_id, model_storage, logger):
+    """Delete logs of an execution
+
+    `EXECUTION_ID` is the execution logs to delete.
+    """
+    logger.info('Deleting logs for execution id {0}'.format(execution_id))
+    logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
+    for log in logs:
+        model_storage.log.delete(log)
+    logger.info('Deleted logs for execution id {0}'.format(execution_id))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
new file mode 100644
index 0000000..5614aee
--- /dev/null
+++ b/aria/cli/commands/node_templates.py
@@ -0,0 +1,104 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from .. import utils
+from ..cli import aria
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+
+NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
+
+
+@aria.group(name='node-templates')
+@aria.options.verbose()
+def node_templates():
+    """Handle a service template's node templates
+    """
+    pass
+
+
+@node_templates.command(name='show',
+                        short_help='Show node information')
+@aria.argument('node-template-id')
+# @aria.options.service_template_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_template_id, model_storage, logger):
+    """Show information for a specific node of a specific service template
+
+    `NODE_TEMPLATE_ID` is the node id to get information on.
+    """
+    # logger.info('Showing node template {0} for service template {1}'.format(
+    #     node_template_id, service_template_name))
+    logger.info('Showing node template {0}'.format(node_template_id))
+    try:
+        #TODO get node template of a specific service template instead?
+        node_template = model_storage.node_template.get(node_template_id)
+    except StorageError:
+        raise AriaCliError('Node template {0} was not found'.format(node_template_id))
+
+    print_data(NODE_TEMPLATE_COLUMNS, node_template.to_dict(), 'Node template:', max_width=50)
+
+    # print node template properties
+    logger.info('Node template properties:')
+    if node_template.properties:
+        logger.info(utils.get_parameter_templates_as_string(node_template.properties))
+    else:
+        logger.info('\tNo properties')
+
+    # print node IDs
+    nodes = node_template.nodes.all()
+    logger.info('Nodes:')
+    if nodes:
+        for node in nodes:
+            logger.info('\t{0}'.format(node.name))
+    else:
+        logger.info('\tNo nodes')
+
+
+@node_templates.command(name='list',
+                        short_help='List node templates for a service template')
+@aria.options.service_template_name()
+@aria.options.sort_by('service_template_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_name, sort_by, descending, model_storage, logger):
+    """List node templates
+
+    If `SERVICE_TEMPLATE_NAME` is provided, list nodes for that service template.
+    Otherwise, list node templates for all service templates.
+    """
+    if service_template_name:
+        logger.info('Listing node templates for service template {0}...'.format(
+            service_template_name))
+        try:
+            service_template = model_storage.service_template.get_by_name(service_template_name)
+            filters = dict(service_template=service_template)
+        except StorageError:
+            raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
+    else:
+        logger.info('Listing all node templates...')
+        filters = {}
+
+    node_templates = [nt.to_dict() for nt in model_storage.node_template.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(NODE_TEMPLATE_COLUMNS, node_templates, 'Node templates:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
new file mode 100644
index 0000000..f38c917
--- /dev/null
+++ b/aria/cli/commands/nodes.py
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+from ..table import print_data
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+
+NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
+
+
+@aria.group(name='nodes')
+@aria.options.verbose()
+def nodes():
+    """Handle a service's nodes
+    """
+    pass
+
+
+@nodes.command(name='show',
+               short_help='Show node information')
+@aria.argument('node_id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_id, model_storage, logger):
+    """Showing information for a specific node
+
+    `NODE_ID` is the id of the node to get information on.
+    """
+    logger.info('Showing node {0}'.format(node_id))
+    try:
+        node = model_storage.node.get(node_id)
+    except StorageError:
+        raise AriaCliError('Node {0} not found'.format(node_id))
+
+    print_data(NODE_COLUMNS, node.to_dict(), 'Node:', 50)
+
+    # print node attributes
+    logger.info('Node attributes:')
+    if node.runtime_properties:
+        for prop_name, prop_value in node.runtime_properties.iteritems():
+            logger.info('\t{0}: {1}'.format(prop_name, prop_value))
+    else:
+        logger.info('\tNo attributes')
+    logger.info('')
+
+
+@nodes.command(name='list',
+               short_help='List node for a service')
+@aria.options.service_name(required=False)
+@aria.options.sort_by('service_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List nodes
+
+    If `SERVICE_NAME` is provided, list nodes for that service.
+    Otherwise, list nodes for all services.
+    """
+    if service_name:
+        logger.info('Listing nodes for service {0}...'.format(service_name))
+        try:
+            service = model_storage.service.get_by_name(service_name)
+            filters = dict(service=service)
+        except StorageError:
+            raise AriaCliError('Service {0} does not exist'.format(service_name))
+    else:
+        logger.info('Listing all nodes...')
+        filters = {}
+
+    nodes = [node.to_dict() for node in model_storage.node.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(NODE_COLUMNS, nodes, 'Nodes:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/plugins.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/plugins.py b/aria/cli/commands/plugins.py
new file mode 100644
index 0000000..d31aa99
--- /dev/null
+++ b/aria/cli/commands/plugins.py
@@ -0,0 +1,145 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import tarfile
+
+from ..table import print_data
+from ..cli import helptexts, aria
+from ..exceptions import AriaCliError
+from ..utils import storage_sort_param
+
+
+PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'distribution',
+                  'supported_platform', 'distribution_release', 'uploaded_at']
+EXCLUDED_COLUMNS = ['archive_name', 'distribution_version', 'excluded_wheels',
+                    'package_source', 'supported_py_versions', 'wheels']
+
+
+@aria.group(name='plugins')
+@aria.options.verbose()
+def plugins():
+    """Handle plugins
+    """
+    pass
+
+
+@plugins.command(name='validate',
+                 short_help='Validate a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_logger
+def validate(plugin_path, logger):
+    """Validate a plugin
+
+    This will try to validate the plugin's archive is not corrupted.
+    A valid plugin is a wagon (http://github.com/cloudify-cosomo/wagon)
+    in the tar.gz format (suffix may also be .wgn).
+
+    `PLUGIN_PATH` is the path to wagon archive to validate.
+    """
+    logger.info('Validating plugin {0}...'.format(plugin_path))
+
+    if not tarfile.is_tarfile(plugin_path):
+        raise AriaCliError(
+            'Archive {0} is of an unsupported type. Only '
+            'tar.gz/wgn is allowed'.format(plugin_path))
+    with tarfile.open(plugin_path) as tar:
+        tar_members = tar.getmembers()
+        package_json_path = "{0}/{1}".format(
+            tar_members[0].name, 'package.json')
+        # TODO: Find a better way to validate a plugin.
+        try:
+            tar.getmember(package_json_path)
+        except KeyError:
+            raise AriaCliError(
+                'Failed to validate plugin {0} '
+                '(package.json was not found in archive)'.format(plugin_path))
+
+    logger.info('Plugin validated successfully')
+
+
+@plugins.command(name='delete',
+                 short_help='Delete a plugin')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(plugin_id, model_storage, logger):
+    """Delete a plugin
+
+    `PLUGIN_ID` is the id of the plugin to delete.
+    """
+    logger.info('Deleting plugin {0}...'.format(plugin_id))
+    model_storage.plugin.delete(plugin_id=plugin_id)
+    logger.info('Plugin deleted')
+
+
+@plugins.command(name='install',
+                 short_help='Install a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_context
+@aria.pass_logger
+def install(ctx, plugin_path, logger):
+    """Install a plugin
+
+    `PLUGIN_PATH` is the path to wagon archive to install.
+    """
+    ctx.invoke(validate, plugin_path=plugin_path)
+    logger.info('Installing plugin {0}...'.format(plugin_path))
+    plugin = plugin_manager.install(plugin_path)
+    logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id))
+
+
+@plugins.command(name='show',
+                 short_help='show plugin information')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(plugin_id, model_storage, logger):
+    """Show information for a specific plugin
+
+    `PLUGIN_ID` is the id of the plugin to show information on.
+    """
+    logger.info('Showing plugin {0}...'.format(plugin_id))
+    plugin = model_storage.plugin.get(plugin_id)
+    _transform_plugin_response(plugin)
+    print_data(PLUGIN_COLUMNS, plugin, 'Plugin:')
+
+
+@plugins.command(name='list',
+                 short_help='List plugins')
+@aria.options.sort_by('uploaded_at')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+    """List all plugins on the manager
+    """
+    logger.info('Listing all plugins...')
+    plugins_list = model_storage.plugin.list(
+        sort=storage_sort_param(sort_by, descending))
+    for plugin in plugins_list:
+        _transform_plugin_response(plugin)
+    print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:')
+
+
+def _transform_plugin_response(plugin):
+    """Remove any columns that shouldn't be displayed in the CLI
+    """
+    for column in EXCLUDED_COLUMNS:
+        plugin.pop(column, None)



[12/24] incubator-ariatosca git commit: fixed tests

Posted by ra...@apache.org.
fixed tests


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/060f8cc6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/060f8cc6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/060f8cc6

Branch: refs/heads/ARIA-48-aria-cli
Commit: 060f8cc634b2d252b33aa0029c405fabaf521940
Parents: 15e2c09
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Sun Apr 2 17:34:31 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/utils/type.py                                   |  1 +
 tests/orchestrator/workflows/api/test_task.py        | 15 +++++++++------
 ...test_process_executor_concurrent_modifications.py |  3 ++-
 .../executor/test_process_executor_extension.py      |  3 ++-
 .../test_process_executor_tracked_changes.py         |  6 ++++--
 5 files changed, 18 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/060f8cc6/aria/utils/type.py
----------------------------------------------------------------------
diff --git a/aria/utils/type.py b/aria/utils/type.py
index e427be1..494a2c2 100644
--- a/aria/utils/type.py
+++ b/aria/utils/type.py
@@ -21,6 +21,7 @@ def validate_value_type(value, type_name):
     name_to_type = {
         'list': list,
         'dict': dict,
+        'tuple': tuple,
         'str': str,
         'unicode': str,
         'string': str,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/060f8cc6/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 80d2351..33b9b94 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -46,18 +46,19 @@ class TestOperationTask(object):
         ctx.model.node.update(plugin)
 
         plugin_specification = mock.models.create_plugin_specification('test_plugin', '0.1')
+        inputs = {'test_input': True}
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin_specification=plugin_specification,
-                                  implementation='op_path'))
+                                  implementation='op_path',
+                                  inputs=inputs),)
 
         node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
         node.interfaces[interface_name] = interface
         ctx.model.node.update(node)
-        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
         ignore_failure = True
@@ -95,18 +96,19 @@ class TestOperationTask(object):
         ctx.model.plugin.update(plugin)
 
         plugin_specification = mock.models.create_plugin_specification('test_plugin', '0.1')
+        inputs = {'test_input': True}
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin_specification=plugin_specification,
-                                  implementation='op_path')
+                                  implementation='op_path',
+                                  inputs=inputs)
         )
 
         relationship = ctx.model.relationship.list()[0]
         relationship.interfaces[interface.name] = interface
-        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
 
@@ -141,18 +143,19 @@ class TestOperationTask(object):
         ctx.model.node.update(plugin)
 
         plugin_specification = mock.models.create_plugin_specification('test_plugin', '0.1')
+        inputs = {'test_input': True}
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin_specification=plugin_specification,
-                                  implementation='op_path')
+                                  implementation='op_path',
+                                  inputs=inputs)
         )
 
         relationship = ctx.model.relationship.list()[0]
         relationship.interfaces[interface.name] = interface
-        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/060f8cc6/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 6d0eb5b..88e7ae0 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -90,7 +90,8 @@ def _test(context, executor, lock_files, func, expected_failure):
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__))
+        operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__),
+                              inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     context.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/060f8cc6/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 0988fae..7ae337d 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -42,7 +42,8 @@ def test_decorate_extension(context, executor):
             interface_name,
             operation_name,
             operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
-                                                                  _mock_operation.__name__))
+                                                                  _mock_operation.__name__),
+                                  inputs=inputs)
         )
         node.interfaces[interface.name] = interface
         task = api.task.OperationTask.for_node(node=node,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/060f8cc6/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index 5512189..feebb6a 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -90,17 +90,19 @@ def _run_workflow(context, executor, op_func, inputs=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
         interface_name = 'test_interface'
         operation_name = 'operation'
+        inputs = inputs or {}
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(implementation=_operation_mapping(op_func))
+            operation_kwargs=dict(implementation=_operation_mapping(op_func),
+                                  inputs=inputs)
         )
         node.interfaces[interface.name] = interface
         task = api.task.OperationTask.for_node(node=node,
                                                interface_name=interface_name,
                                                operation_name=operation_name,
-                                               inputs=inputs or {})
+                                               inputs=inputs)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter


[03/24] incubator-ariatosca git commit: created dry-run executor

Posted by ra...@apache.org.
created dry-run executor


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/c5624260
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/c5624260
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/c5624260

Branch: refs/heads/ARIA-48-aria-cli
Commit: c56242604f4025e7e62d8304dba89bf2e80ff1e0
Parents: 108d8a6
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Apr 4 12:02:11 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/cli/aria.py                        |  5 +++
 aria/cli/cli/helptexts.py                   |  2 +
 aria/cli/commands/executions.py             | 15 +++++--
 aria/orchestrator/workflow_runner.py        |  5 ++-
 aria/orchestrator/workflows/executor/dry.py | 54 ++++++++++++++++++++++++
 5 files changed, 76 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c5624260/aria/cli/cli/aria.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/aria.py b/aria/cli/cli/aria.py
index baa72eb..1664ce5 100644
--- a/aria/cli/cli/aria.py
+++ b/aria/cli/cli/aria.py
@@ -304,6 +304,11 @@ class Options(object):
             is_flag=True,
             help=helptexts.JSON_OUTPUT)
 
+        self.dry_execution = click.option(
+            '--dry',
+            is_flag=True,
+            help=helptexts.DRY_EXECUTION)
+
         self.init_hard_reset = click.option(
             '--hard',
             is_flag=True,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c5624260/aria/cli/cli/helptexts.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/helptexts.py b/aria/cli/cli/helptexts.py
index 02519cb..0d66d6b 100644
--- a/aria/cli/cli/helptexts.py
+++ b/aria/cli/cli/helptexts.py
@@ -32,6 +32,8 @@ HARD_RESET = "Hard reset the configuration, including coloring and loggers"
 ENABLE_COLORS = "Enable colors in logger (use --hard when working with" \
                 " an initialized environment) [default: False]"
 
+DRY_EXECUTION = "Execute a workflow dry run (prints operations information without causing side " \
+                "effects)"
 SERVICE_TEMPLATE_FILENAME = (
     "The name of the archive's main service template file. "
     "This is only relevant if uploading an archive")

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c5624260/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
index 6d8b949..82ee51a 100644
--- a/aria/cli/commands/executions.py
+++ b/aria/cli/commands/executions.py
@@ -18,6 +18,7 @@ from ..table import print_data
 from ..cli import aria
 from ...modeling.models import Execution
 from ...orchestrator.workflow_runner import WorkflowRunner
+from ...orchestrator.workflows.executor.dry import DryExecutor
 from ...utils import formatting
 from ...utils import threading
 
@@ -101,6 +102,7 @@ def list(service_name,
 @aria.argument('workflow-name')
 @aria.options.service_name(required=True)
 @aria.options.inputs
+@aria.options.dry_execution
 @aria.options.task_max_attempts()
 @aria.options.task_retry_interval()
 @aria.options.verbose()
@@ -111,6 +113,7 @@ def list(service_name,
 def start(workflow_name,
           service_name,
           inputs,
+          dry,
           task_max_attempts,
           task_retry_interval,
           model_storage,
@@ -119,19 +122,21 @@ def start(workflow_name,
           logger):
     """Execute a workflow
 
-    `WORKFLOW_ID` is the id of the workflow to execute (e.g. `uninstall`)
+    `WORKFLOW_NAME` is the name of the workflow to execute (e.g. `uninstall`)
     """
+    executor = DryExecutor() if dry else None  # use WorkflowRunner's default executor
+
     workflow_runner = \
         WorkflowRunner(workflow_name, service_name, inputs,
                        model_storage, resource_storage, plugin_manager,
-                       task_max_attempts, task_retry_interval)
+                       executor, task_max_attempts, task_retry_interval)
 
     execution_thread_name = '{0}_{1}'.format(service_name, workflow_name)
     execution_thread = threading.ExceptionThread(target=workflow_runner.execute,
                                                  name=execution_thread_name)
     execution_thread.daemon = True  # allows force-cancel to exit immediately
 
-    logger.info('Starting execution. Press Ctrl+C cancel')
+    logger.info('Starting {0}execution. Press Ctrl+C cancel'.format('dry ' if dry else ''))
     execution_thread.start()
     try:
         while execution_thread.is_alive():
@@ -148,6 +153,10 @@ def start(workflow_name,
     if execution.status == Execution.FAILED:
         logger.info('Execution error:\n{0}'.format(execution.error))
 
+    if dry:
+        # remove traces of the dry execution (including tasks, logs, inputs..)
+        model_storage.execution.delete(execution)
+
 
 def _cancel_execution(workflow_runner, execution_thread, logger):
     logger.info('Cancelling execution. Press Ctrl+C again to force-cancel')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c5624260/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 78b17b8..982dff1 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -40,7 +40,7 @@ class WorkflowRunner(object):
 
     def __init__(self, workflow_name, service_name, inputs,
                  model_storage, resource_storage, plugin_manager,
-                 task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
+                 executor=None, task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
                  task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
 
         self._model_storage = model_storage
@@ -71,8 +71,9 @@ class WorkflowRunner(object):
         execution_inputs_dict = models.Parameter.unwrap_dict(self.execution.inputs)
         self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
 
+        executor = executor or ProcessExecutor(plugin_manager=plugin_manager)
         self._engine = Engine(
-            executor=ProcessExecutor(plugin_manager=plugin_manager),
+            executor=executor,
             workflow_context=workflow_context,
             tasks_graph=self._tasks_graph)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c5624260/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
new file mode 100644
index 0000000..69ce53c
--- /dev/null
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -0,0 +1,54 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Dry executor
+"""
+
+from datetime import datetime
+
+from .base import BaseExecutor
+from ....modeling.models import Parameter
+
+
+class DryExecutor(BaseExecutor):
+    """
+    Executor which dry runs tasks - prints task information without causing any side effects
+    """
+
+    def execute(self, task):
+        # updating the task manually instead of calling self._task_started(task),
+        # to avoid any side effects raising that event might cause
+        with task._update():
+            task.started_at = datetime.utcnow()
+            task.status = task.STARTED
+
+        actor_type = type(task.actor).__name__.lower()
+        implementation = '{0} > '.format(task.plugin) if task.plugin else ''
+        implementation += task.implementation
+        inputs = Parameter.unwrap_dict(task.inputs)
+
+        self.logger.info(
+            'Executing {actor_type} {actor_name} operation {interface_name} {operation_name}: '
+            '{implementation} (Inputs: {inputs})'
+            .format(actor_type=actor_type, actor_name=task.actor.name,
+                    interface_name=task.interface_name, operation_name=task.operation_name,
+                    implementation=implementation, inputs=inputs))
+
+        # updating the task manually instead of calling self._task_succeeded(task),
+        # to avoid any side effects raising that event might cause
+        with task._update():
+            task.ended_at = datetime.utcnow()
+            task.status = task.SUCCESS


[11/24] incubator-ariatosca git commit: Enforce Uniqueness on ServiceTemplate and Service names

Posted by ra...@apache.org.
Enforce Uniqueness on ServiceTemplate and Service names


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/15e2c09b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/15e2c09b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/15e2c09b

Branch: refs/heads/ARIA-48-aria-cli
Commit: 15e2c09b7b28eac348107a81e5fe7a9d379114e4
Parents: c4ca1e8
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Sun Apr 2 16:37:51 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/service_templates.py | 19 ++++++++++++++-----
 aria/cli/commands/services.py          |  7 +++++++
 aria/cli/constants.py                  |  4 ++++
 aria/modeling/models.py                |  8 ++++++--
 4 files changed, 31 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/15e2c09b/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index b855529..823a6e8 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -19,12 +19,14 @@ import json
 
 from .. import utils
 from .. import csar
-from ..cli import aria
 from .. import service_template_utils
+from ..cli import aria
+from ..constants import TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE
 from ..table import print_data
 from ..exceptions import AriaCliError
 from ...core import Core
 from ...exceptions import AriaException
+from ...storage.exceptions import StorageError
 
 
 DESCRIPTION_LIMIT = 20
@@ -112,11 +114,18 @@ def store(service_template_path, service_template_name, model_storage, resource_
 
     service_template_path = service_template_utils.get(service_template_path)
     core = Core(model_storage, resource_storage, plugin_manager)
-    core.create_service_template(service_template_path,
-                                 os.path.dirname(service_template_path),
-                                 service_template_name)
+    try:
+        core.create_service_template(service_template_path,
+                                     os.path.dirname(service_template_path),
+                                     service_template_name)
+    except StorageError:
+        logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
+            model_class='service template',
+            name=service_template_name))
+        raise
 
-    logger.info('Service template stored')
+    else:
+        logger.info('Service template stored')
 
 
 @service_templates.command(name='delete',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/15e2c09b/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index ce1139b..ae04d7e 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -19,11 +19,13 @@ from StringIO import StringIO
 
 from . import service_templates
 from ..cli import aria, helptexts
+from ..constants import TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE
 from ..exceptions import AriaCliError
 from ..table import print_data
 from ..utils import storage_sort_param
 from ...core import Core
 from ...exceptions import AriaException
+from ...storage.exceptions import StorageError
 
 
 SERVICE_COLUMNS = ['id', 'name', 'service_template_name', 'created_at', 'updated_at']
@@ -97,6 +99,11 @@ def create(service_template_name,
     try:
         core = Core(model_storage, resource_storage, plugin_manager)
         service = core.create_service(service_template_name, inputs, service_name)
+    except StorageError:
+        logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
+            model_class='service',
+            name=service_name))
+        raise
     except AriaException as e:
         logger.info(str(e))
         service_templates.print_service_template_inputs(model_storage, service_template_name)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/15e2c09b/aria/cli/constants.py
----------------------------------------------------------------------
diff --git a/aria/cli/constants.py b/aria/cli/constants.py
index 67c094d..fdd37fc 100644
--- a/aria/cli/constants.py
+++ b/aria/cli/constants.py
@@ -16,3 +16,7 @@
 
 SAMPLE_SERVICE_TEMPLATE_FILENAME = 'service_template.yaml'
 HELP_TEXT_COLUMN_BUFFER = 5
+TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE = """
+Could not store {model_class} `{name}`
+There already a exists a {model_class} with the same name
+"""

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/15e2c09b/aria/modeling/models.py
----------------------------------------------------------------------
diff --git a/aria/modeling/models.py b/aria/modeling/models.py
index a01783b..db9db07 100644
--- a/aria/modeling/models.py
+++ b/aria/modeling/models.py
@@ -26,6 +26,10 @@ from . import (
     mixins,
 )
 
+from sqlalchemy import (
+    Column,
+    Text
+)
 
 aria_declarative_base = declarative_base(cls=mixins.ModelIDMixin)
 
@@ -84,7 +88,7 @@ __all__ = (
 # region service template models
 
 class ServiceTemplate(aria_declarative_base, service_template.ServiceTemplateBase):
-    pass
+    name = Column(Text, index=True, unique=True)
 
 
 class NodeTemplate(aria_declarative_base, service_template.NodeTemplateBase):
@@ -137,7 +141,7 @@ class ArtifactTemplate(aria_declarative_base, service_template.ArtifactTemplateB
 # region service instance models
 
 class Service(aria_declarative_base, service_instance.ServiceBase):
-    pass
+    name = Column(Text, index=True, unique=True)
 
 
 class Node(aria_declarative_base, service_instance.NodeBase):


[10/24] incubator-ariatosca git commit: Make Execution, Task and Node status properties into methods

Posted by ra...@apache.org.
Make Execution, Task and Node status properties into methods


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/c4ca1e8f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/c4ca1e8f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/c4ca1e8f

Branch: refs/heads/ARIA-48-aria-cli
Commit: c4ca1e8f98c1b3ac0c974da06f238be989c394a6
Parents: d872282
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Sun Apr 2 16:11:43 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/modeling/orchestration.py             | 6 +-----
 aria/modeling/service_instance.py          | 1 -
 aria/orchestrator/workflows/core/engine.py | 6 +++---
 aria/orchestrator/workflows/core/task.py   | 2 --
 4 files changed, 4 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c4ca1e8f/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index 15abde4..3ad6b58 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -98,13 +98,11 @@ class ExecutionBase(ModelMixin):
     status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
     workflow_name = Column(Text)
 
-    @property
     def has_ended(self):
         return self.status in self.END_STATES
 
-    @property
     def is_active(self):
-        return not self.has_ended
+        return not self.has_ended()
 
     @declared_attr
     def logs(cls):
@@ -290,11 +288,9 @@ class TaskBase(ModelMixin):
     implementation = Column(String)
     _runs_on = Column(Enum(*RUNS_ON, name='runs_on'), name='runs_on')
 
-    @property
     def has_ended(self):
         return self.status in [self.SUCCESS, self.FAILED]
 
-    @property
     def is_waiting(self):
         return self.status in [self.PENDING, self.RETRYING]
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c4ca1e8f/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 48615af..a8fe71b 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -415,7 +415,6 @@ class NodeBase(InstanceModelMixin):
         except KeyError:
             return None
 
-    @property
     def is_available(self):
         return self.state not in [self.INITIAL, self.DELETED, self.ERROR]
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c4ca1e8f/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index d32abb8..0503142 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -88,12 +88,12 @@ class Engine(logger.LoggerMixin):
     def _executable_tasks(self):
         now = datetime.utcnow()
         return (task for task in self._tasks_iter()
-                if task.is_waiting and
+                if task.is_waiting() and
                 task.due_at <= now and
                 not self._task_has_dependencies(task))
 
     def _ended_tasks(self):
-        return (task for task in self._tasks_iter() if task.has_ended)
+        return (task for task in self._tasks_iter() if task.has_ended())
 
     def _task_has_dependencies(self, task):
         return len(self._execution_graph.pred.get(task.id, {})) > 0
@@ -105,7 +105,7 @@ class Engine(logger.LoggerMixin):
         for _, data in self._execution_graph.nodes_iter(data=True):
             task = data['task']
             if isinstance(task, engine_task.OperationTask):
-                if not task.model_task.has_ended:
+                if not task.model_task.has_ended():
                     self._workflow_context.model.task.refresh(task.model_task)
             yield task
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c4ca1e8f/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index aa8963f..540988c 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -69,11 +69,9 @@ class StubTask(BaseTask):
         self.status = models.Task.PENDING
         self.due_at = datetime.utcnow()
 
-    @property
     def has_ended(self):
         return self.status in [models.Task.SUCCESS, models.Task.FAILED]
 
-    @property
     def is_waiting(self):
         return self.status in [models.Task.PENDING, models.Task.RETRYING]
 


[08/24] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
new file mode 100644
index 0000000..b855529
--- /dev/null
+++ b/aria/cli/commands/service_templates.py
@@ -0,0 +1,207 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import json
+
+from .. import utils
+from .. import csar
+from ..cli import aria
+from .. import service_template_utils
+from ..table import print_data
+from ..exceptions import AriaCliError
+from ...core import Core
+from ...exceptions import AriaException
+
+
+DESCRIPTION_LIMIT = 20
+SERVICE_TEMPLATE_COLUMNS = \
+    ['id', 'name', 'main_file_name', 'created_at', 'updated_at']
+
+
+@aria.group(name='service-templates')
+@aria.options.verbose()
+def service_templates():
+    """Handle service templates on the manager
+    """
+    pass
+
+
+@service_templates.command(name='show',
+                           short_help='Show service template information')
+@aria.argument('service-template-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(service_template_id, model_storage, logger):
+    """Show information for a specific service templates
+
+    `SERVICE_TEMPLATE_ID` is the id of the service template to show information on.
+    """
+    logger.info('Showing service template {0}...'.format(service_template_id))
+    service_template = model_storage.service_template.get(service_template_id)
+    services = [d.to_dict() for d in service_template.services]
+    service_template_dict = service_template.to_dict()
+    service_template_dict['#services'] = len(services)
+    columns = SERVICE_TEMPLATE_COLUMNS + ['#services']
+    print_data(columns, service_template_dict, 'Service-template:', max_width=50)
+
+    logger.info('Description:')
+    logger.info('{0}\n'.format(service_template_dict['description'].encode('UTF-8') or ''))
+
+    logger.info('Existing services:')
+    logger.info('{0}\n'.format(json.dumps([d['name'] for d in services])))
+
+
+@service_templates.command(name='list',
+                           short_help='List service templates')
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+    """List all service templates
+    """
+    def trim_description(service_template):
+        if service_template['description'] is not None:
+            if len(service_template['description']) >= DESCRIPTION_LIMIT:
+                service_template['description'] = '{0}..'.format(
+                    service_template['description'][:DESCRIPTION_LIMIT - 2])
+        else:
+            service_template['description'] = ''
+        return service_template
+
+    logger.info('Listing all service templates...')
+    service_templates = [trim_description(b.to_dict()) for b in model_storage.service_template.list(
+        sort=utils.storage_sort_param(sort_by, descending))]
+    print_data(SERVICE_TEMPLATE_COLUMNS, service_templates, 'Service templates:')
+
+
+@service_templates.command(name='store',
+                           short_help='Store a service template')
+@aria.argument('service-template-path')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def store(service_template_path, service_template_name, model_storage, resource_storage,
+          plugin_manager, logger):
+    """Store a service template
+
+    `SERVICE_TEMPLATE_PATH` is the path of the service template to store.
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to store.
+    """
+    logger.info('Storing service template {0}...'.format(service_template_name))
+
+    service_template_path = service_template_utils.get(service_template_path)
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.create_service_template(service_template_path,
+                                 os.path.dirname(service_template_path),
+                                 service_template_name)
+
+    logger.info('Service template stored')
+
+
+@service_templates.command(name='delete',
+                           short_help='Delete a service template')
+@aria.argument('service-template-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def delete(service_template_id, model_storage, resource_storage, plugin_manager, logger):
+    """Delete a service template
+    `SERVICE_TEMPLATE_ID` is the id of the service template to delete.
+    """
+    logger.info('Deleting service template {0}...'.format(service_template_id))
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.delete_service_template(service_template_id)
+    logger.info('Service template {0} deleted'.format(service_template_id))
+
+
+@service_templates.command(name='inputs',
+                           short_help='Show service template inputs')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def inputs(service_template_name, model_storage, logger):
+    """Show inputs for a specific service template
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to show inputs for.
+    """
+    logger.info('Showing inputs for service template {0}...'.format(service_template_name))
+    print_service_template_inputs(model_storage, service_template_name)
+
+
+@service_templates.command(name='validate',
+                           short_help='Validate a service template')
+@aria.argument('service-template')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def validate_service_template(service_template, model_storage, resource_storage, plugin_manager,
+                              logger):
+    """Validate a service template
+
+    `SERVICE_TEMPLATE` is the path or url of the service template or archive to validate.
+    """
+    logger.info('Validating service template: {0}'.format(service_template))
+    service_template_path = service_template_utils.get(service_template)
+    core = Core(model_storage, resource_storage, plugin_manager)
+
+    try:
+        core.validate_service_template(service_template_path)
+    except AriaException as e:
+        # TODO: gather errors from parser and dump them via CLI?
+        raise AriaCliError(str(e))
+
+    logger.info('Service template validated successfully')
+
+
+@service_templates.command(name='create-archive',
+                           short_help='Create a csar archive')
+@aria.argument('service-template-path')
+@aria.argument('destination')
+@aria.options.verbose()
+@aria.pass_logger
+def create_archive(service_template_path, destination, logger):
+    """Create a csar archive
+
+    `service_template_path` is the path of the service template to create the archive from
+    `destination` is the path of the output csar archive
+    """
+    logger.info('Creating a csar archive')
+    csar.write(os.path.dirname(service_template_path), service_template_path, destination, logger)
+    logger.info('Csar archive created at {0}'.format(destination))
+
+
+@aria.pass_logger
+def print_service_template_inputs(model_storage, service_template_name, logger):
+    service_template = model_storage.service_template.get_by_name(service_template_name)
+
+    logger.info('Service template inputs:')
+    if service_template.inputs:
+        logger.info(utils.get_parameter_templates_as_string(service_template.inputs))
+    else:
+        logger.info('\tNo inputs')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
new file mode 100644
index 0000000..ce1139b
--- /dev/null
+++ b/aria/cli/commands/services.py
@@ -0,0 +1,175 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+from StringIO import StringIO
+
+from . import service_templates
+from ..cli import aria, helptexts
+from ..exceptions import AriaCliError
+from ..table import print_data
+from ..utils import storage_sort_param
+from ...core import Core
+from ...exceptions import AriaException
+
+
+SERVICE_COLUMNS = ['id', 'name', 'service_template_name', 'created_at', 'updated_at']
+
+
+@aria.group(name='services')
+@aria.options.verbose()
+def services():
+    """Handle services
+    """
+    pass
+
+
+@services.command(name='list', short_help='List services')
+@aria.options.service_template_id()
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_id,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List services
+
+    If `--service-template-id` is provided, list services for that service template.
+    Otherwise, list services for all service templates.
+    """
+    if service_template_id:
+        logger.info('Listing services for service template {0}...'.format(
+            service_template_id))
+        service_template = model_storage.service_template.get(service_template_id)
+        filters = dict(service_template=service_template)
+    else:
+        logger.info('Listing all service...')
+        filters = {}
+
+    services = [d.to_dict() for d in model_storage.service.list(
+        sort=storage_sort_param(sort_by=sort_by, descending=descending),
+        filters=filters)]
+    print_data(SERVICE_COLUMNS, services, 'Services:')
+
+
+@services.command(name='create',
+                  short_help='Create a services')
+@aria.argument('service-name', required=False)
+@aria.options.service_template_name(required=True)
+@aria.options.inputs
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def create(service_template_name,
+           service_name,
+           inputs,
+           model_storage,
+           resource_storage,
+           plugin_manager,
+           logger):
+    """Create a service
+
+    `SERVICE_NAME` is the name of the service you'd like to create.
+
+    """
+    logger.info('Creating new service from service template {0}...'.format(
+        service_template_name))
+
+    try:
+        core = Core(model_storage, resource_storage, plugin_manager)
+        service = core.create_service(service_template_name, inputs, service_name)
+    except AriaException as e:
+        logger.info(str(e))
+        service_templates.print_service_template_inputs(model_storage, service_template_name)
+        raise AriaCliError(str(e))
+
+    logger.info("Service created. The service's name is {0}".format(service.name))
+
+
+@services.command(name='delete',
+                  short_help='Delete a service')
+@aria.argument('service-name')
+@aria.options.force(help=helptexts.IGNORE_RUNNING_NODES)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def delete(service_name, force, model_storage, resource_storage, plugin_manager, logger):
+    """Delete a service
+
+    `SERVICE_NAME` is the name of the service to delete.
+    """
+    logger.info('Deleting service {0}...'.format(service_name))
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.delete_service(service_name, force=force)
+    logger.info('Service {0} deleted'.format(service_name))
+
+
+@services.command(name='outputs',
+                  short_help='Show service outputs')
+@aria.argument('service-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def outputs(service_name, model_storage, logger):
+    """Show outputs for a specific service
+
+    `SERVICE_NAME` is the name of the service to print outputs for.
+    """
+    logger.info('Showing outputs for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    #TODO fix this section..
+    outputs_def = service.outputs
+    response = model_storage.service.outputs.get(service_name)
+    outputs_ = StringIO()
+    for output_name, output in response.outputs.iteritems():
+        outputs_.write(' - "{0}":{1}'.format(output_name, os.linesep))
+        description = outputs_def[output_name].get('description', '')
+        outputs_.write('     Description: {0}{1}'.format(description,
+                                                         os.linesep))
+        outputs_.write('     Value: {0}{1}'.format(output, os.linesep))
+    logger.info(outputs_.getvalue())
+
+
+@services.command(name='inputs',
+                  short_help='Show service inputs')
+@aria.argument('service-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def inputs(service_name, model_storage, logger):
+    """Show inputs for a specific service
+
+    `SERVICE_NAME` is the id of the service to print inputs for.
+    """
+    logger.info('Showing inputs for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    if service.inputs:
+        inputs_ = StringIO()
+        for input_name, input in service.inputs.iteritems():
+            inputs_.write(' - "{0}":{1}'.format(input_name, os.linesep))
+            inputs_.write('     Value: {0}{1}'.format(input.value, os.linesep))
+        logger.info(inputs_.getvalue())
+    else:
+        logger.info('\tNo inputs')
+    logger.info('')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/commands/workflows.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/workflows.py b/aria/cli/commands/workflows.py
new file mode 100644
index 0000000..2180168
--- /dev/null
+++ b/aria/cli/commands/workflows.py
@@ -0,0 +1,107 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from .. import utils
+from ..cli import aria
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+WORKFLOW_COLUMNS = ['name', 'service_template_name', 'service_name']
+
+
+@aria.group(name='workflows')
+def workflows():
+    """Handle service workflows
+    """
+    pass
+
+
+@workflows.command(name='show',
+                   short_help='Show workflow information')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(workflow_name, service_name, model_storage, logger):
+    """Show information for a specific workflow of a specific service
+
+    `WORKFLOW_NAME` is the name of the workflow to get information on.
+    """
+    try:
+        logger.info('Retrieving workflow {0} for service {1}'.format(
+            workflow_name, service_name))
+        service = model_storage.service.get(service_name)
+        workflow = next((wf for wf in service.workflows if
+                         wf.name == workflow_name), None)
+        if not workflow:
+            raise AriaCliError(
+                'Workflow {0} not found for service {1}'.format(workflow_name, service_name))
+    except StorageError:
+        raise AriaCliError('service {0} not found'.format(service_name))
+
+    defaults = {
+        'service_template_name': service.service_template_name,
+        'service_name': service.name
+    }
+    print_data(WORKFLOW_COLUMNS, workflow, 'Workflows:', defaults=defaults)
+
+    # print workflow parameters
+    mandatory_params = dict()
+    optional_params = dict()
+    for param_name, param in workflow.parameters.iteritems():
+        params_group = optional_params if 'default' in param else \
+            mandatory_params
+        params_group[param_name] = param
+
+    logger.info('Workflow Parameters:')
+    logger.info('\tMandatory Parameters:')
+    for param_name, param in mandatory_params.iteritems():
+        if 'description' in param:
+            logger.info('\t\t{0}\t({1})'.format(param_name,
+                                                param['description']))
+        else:
+            logger.info('\t\t{0}'.format(param_name))
+
+    logger.info('\tOptional Parameters:')
+    for param_name, param in optional_params.iteritems():
+        if 'description' in param:
+            logger.info('\t\t{0}: \t{1}\t({2})'.format(
+                param_name, param['default'], param['description']))
+        else:
+            logger.info('\t\t{0}: \t{1}'.format(param_name,
+                                                param['default']))
+    logger.info('')
+
+
+@workflows.command(name='list',
+                   short_help='List workflows for a service')
+@aria.options.service_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name, model_storage, logger):
+    """List all workflows of a specific service
+    """
+    logger.info('Listing workflows for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    workflows = [wf.to_dict() for wf in sorted(service.workflows.values(), key=lambda w: w.name)]
+
+    defaults = {
+        'service_template_name': service.service_template_name,
+        'service_name': service.name
+    }
+    print_data(WORKFLOW_COLUMNS, workflows, 'Workflows:', defaults=defaults)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/config.py
----------------------------------------------------------------------
diff --git a/aria/cli/config.py b/aria/cli/config.py
deleted file mode 100644
index d82886d..0000000
--- a/aria/cli/config.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI configuration
-"""
-
-import os
-import logging
-from getpass import getuser
-from tempfile import gettempdir
-
-from yaml import safe_load
-
-from .storage import config_file_path
-
-# path to a file where cli logs will be saved.
-logging_filename = os.path.join(gettempdir(), 'aria_cli_{0}.log'.format(getuser()))
-# loggers log level to show
-logger_level = logging.INFO
-# loggers log level to show
-colors = True
-
-import_resolver = None
-
-
-def load_configurations():
-    """
-    Dynamically load attributes into the config module from the ``config.yaml`` defined in the user
-    configuration directory
-    """
-    config_path = config_file_path()
-    with open(config_path) as config_file:
-        globals().update(safe_load(config_file) or {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/config/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/config/__init__.py b/aria/cli/config/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/config/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/config/config.py
----------------------------------------------------------------------
diff --git a/aria/cli/config/config.py b/aria/cli/config/config.py
new file mode 100644
index 0000000..7d76830
--- /dev/null
+++ b/aria/cli/config/config.py
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import yaml
+import pkg_resources
+
+from jinja2.environment import Template
+
+
+class CliConfig(object):
+
+    def __init__(self, config_path):
+        with open(config_path) as f:
+            self._config = yaml.safe_load(f.read())
+
+    @classmethod
+    def create_config(cls, workdir):
+        config_path = os.path.join(workdir, 'config.yaml')
+        if not os.path.isfile(config_path):
+            config_template = pkg_resources.resource_string(
+                __package__,
+                'config_template.yaml')
+
+            default_values = {
+                'log_path': os.path.join(workdir, 'cli.log'),
+                'enable_colors': True
+            }
+
+            template = Template(config_template)
+            rendered = template.render(**default_values)
+            with open(config_path, 'w') as f:
+                f.write(rendered)
+                f.write(os.linesep)
+
+        return cls(config_path)
+
+    @property
+    def colors(self):
+        return self._config.get('colors', False)
+
+    @property
+    def logging(self):
+        return self.Logging(self._config.get('logging'))
+
+    class Logging(object):
+
+        def __init__(self, logging):
+            self._logging = logging or {}
+
+        @property
+        def filename(self):
+            return self._logging.get('filename')
+
+        @property
+        def loggers(self):
+            return self._logging.get('loggers', {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/config/config_template.yaml
----------------------------------------------------------------------
diff --git a/aria/cli/config/config_template.yaml b/aria/cli/config/config_template.yaml
new file mode 100644
index 0000000..13f2cf9
--- /dev/null
+++ b/aria/cli/config/config_template.yaml
@@ -0,0 +1,12 @@
+colors: {{ enable_colors }}
+
+logging:
+
+  # path to a file where cli logs will be saved.
+  filename: {{ log_path }}
+
+  # configuring level per logger
+  loggers:
+
+    # main logger of the cli. provides basic descriptions for executed operations.
+    aria.cli.main: info

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/constants.py
----------------------------------------------------------------------
diff --git a/aria/cli/constants.py b/aria/cli/constants.py
new file mode 100644
index 0000000..67c094d
--- /dev/null
+++ b/aria/cli/constants.py
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+SAMPLE_SERVICE_TEMPLATE_FILENAME = 'service_template.yaml'
+HELP_TEXT_COLUMN_BUFFER = 5

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/csar.py
----------------------------------------------------------------------
diff --git a/aria/cli/csar.py b/aria/cli/csar.py
index b185f46..5ab581b 100644
--- a/aria/cli/csar.py
+++ b/aria/cli/csar.py
@@ -14,12 +14,13 @@
 # limitations under the License.
 
 import os
+import logging
 import pprint
 import tempfile
 import zipfile
 
 import requests
-from ruamel import yaml # @UnresolvedImport
+from ruamel import yaml
 
 
 META_FILE = 'TOSCA-Metadata/TOSCA.meta'
@@ -167,5 +168,11 @@ class _CSARReader(object):
                     f.write(chunk)
 
 
-def read(source, destination, logger):
+def read(source, destination=None, logger=None):
+    destination = destination or tempfile.mkdtemp()
+    logger = logger or logging.getLogger('dummy')
     return _CSARReader(source=source, destination=destination, logger=logger)
+
+
+def is_csar_archive(source):
+    return source.endswith('.csar')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/dry.py
----------------------------------------------------------------------
diff --git a/aria/cli/dry.py b/aria/cli/dry.py
deleted file mode 100644
index 82faf42..0000000
--- a/aria/cli/dry.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from threading import RLock
-
-from ..modeling import models
-from ..orchestrator.decorators import operation
-from ..utils.collections import OrderedDict
-from ..utils.console import puts, Colored
-from ..utils.formatting import safe_repr
-
-
-_TERMINAL_LOCK = RLock()
-
-
-def convert_to_dry(service):
-    """
-    Converts all operations on the service (on workflows, node interfaces, and relationship
-    interfaces) to run dryly.
-    """
-
-    for workflow in service.workflows:
-        convert_operation_to_dry(workflow)
-
-    for node in service.nodes.itervalues():
-        for interface in node.interfaces.itervalues():
-            for oper in interface.operations.itervalues():
-                convert_operation_to_dry(oper)
-        for relationship in node.outbound_relationships:
-            for interface in relationship.interfaces.itervalues():
-                for oper in interface.operations.itervalues():
-                    convert_operation_to_dry(oper)
-
-
-def convert_operation_to_dry(oper):
-    """
-    Converts a single :class:`Operation` to run dryly.
-    """
-
-    plugin = oper.plugin_specification.name \
-        if oper.plugin_specification is not None else None
-    if oper.inputs is None:
-        oper.inputs = OrderedDict()
-    oper.inputs['_implementation'] = models.Parameter(name='_implementation',
-                                                      type_name='string',
-                                                      value=oper.implementation)
-    oper.inputs['_plugin'] = models.Parameter(name='_plugin',
-                                              type_name='string',
-                                              value=plugin)
-    oper.implementation = '{0}.{1}'.format(__name__, 'dry_operation')
-    oper.plugin_specification = None
-
-
-@operation
-def dry_operation(ctx, _plugin, _implementation, **kwargs):
-    """
-    The dry operation simply prints out information about the operation to the console.
-    """
-
-    with _TERMINAL_LOCK:
-        print ctx.name
-        if hasattr(ctx, 'relationship'):
-            puts('> Relationship: {0} -> {1}'.format(
-                Colored.red(ctx.relationship.source_node.name),
-                Colored.red(ctx.relationship.target_node.name)))
-        else:
-            puts('> Node: {0}'.format(Colored.red(ctx.node.name)))
-        puts('  Operation: {0}'.format(Colored.green(ctx.name)))
-        _dump_implementation(_plugin, _implementation)
-
-
-def _dump_implementation(plugin, implementation):
-    if plugin:
-        puts('  Plugin: {0}'.format(Colored.magenta(plugin, bold=True)))
-    if implementation:
-        puts('  Implementation: {0}'.format(Colored.magenta(safe_repr(implementation))))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/env.py
----------------------------------------------------------------------
diff --git a/aria/cli/env.py b/aria/cli/env.py
new file mode 100644
index 0000000..5d34141
--- /dev/null
+++ b/aria/cli/env.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import json
+import pkgutil
+
+from .config import config
+from .logger import Logging
+from .. import (application_model_storage, application_resource_storage)
+from ..orchestrator.plugin import PluginManager
+from ..storage.sql_mapi import SQLAlchemyModelAPI
+from ..storage.filesystem_rapi import FileSystemResourceAPI
+
+
+ARIA_DEFAULT_WORKDIR_NAME = '.aria'
+
+
+class Environment(object):
+
+    def __init__(self, workdir):
+
+        self._workdir = workdir
+        self._init_workdir()
+
+        self._config = config.CliConfig.create_config(workdir)
+        self._logging = Logging(self._config)
+
+        self._model_storage_dir = os.path.join(workdir, 'models')
+        self._resource_storage_dir = os.path.join(workdir, 'resources')
+        self._plugins_dir = os.path.join(workdir, 'plugins')
+
+        # initialized lazily
+        self._model_storage = None
+        self._resource_storage = None
+        self._plugin_manager = None
+
+    @property
+    def workdir(self):
+        return self._workdir
+
+    @property
+    def config(self):
+        return self._config
+
+    @property
+    def logging(self):
+        return self._logging
+
+    @property
+    def model_storage(self):
+        if not self._model_storage:
+            self._model_storage = self._init_sqlite_model_storage()
+        return self._model_storage
+
+    @property
+    def resource_storage(self):
+        if not self._resource_storage:
+            self._resource_storage = self._init_fs_resource_storage()
+        return self._resource_storage
+
+    @property
+    def plugin_manager(self):
+        if not self._plugin_manager:
+            self._plugin_manager = self._init_plugin_manager()
+        return self._plugin_manager
+
+    @staticmethod
+    def get_version_data():
+        data = pkgutil.get_data(__package__, 'VERSION')
+        return json.loads(data)
+
+    def _init_workdir(self):
+        if not os.path.exists(self._workdir):
+            os.makedirs(self._workdir)
+
+    def _init_sqlite_model_storage(self):
+        if not os.path.exists(self._model_storage_dir):
+            os.makedirs(self._model_storage_dir)
+
+        initiator_kwargs = dict(base_dir=self._model_storage_dir)
+        return application_model_storage(
+            SQLAlchemyModelAPI,
+            initiator_kwargs=initiator_kwargs)
+
+    def _init_fs_resource_storage(self):
+        if not os.path.exists(self._resource_storage_dir):
+            os.makedirs(self._resource_storage_dir)
+
+        fs_kwargs = dict(directory=self._resource_storage_dir)
+        return application_resource_storage(
+            FileSystemResourceAPI,
+            api_kwargs=fs_kwargs)
+
+    def _init_plugin_manager(self):
+        if not os.path.exists(self._plugins_dir):
+            os.makedirs(self._plugins_dir)
+
+        return PluginManager(self._model_storage, self._plugins_dir)
+
+
+env = Environment(os.path.join(
+    os.environ.get('ARIA_WORKDIR', os.path.expanduser('~')), ARIA_DEFAULT_WORKDIR_NAME))
+
+logger = env.logging.logger

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/cli/exceptions.py b/aria/cli/exceptions.py
index 6897731..89cfacd 100644
--- a/aria/cli/exceptions.py
+++ b/aria/cli/exceptions.py
@@ -13,59 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""
-CLI various exception classes
-"""
 
+from ..exceptions import AriaError
 
-class AriaCliError(Exception):
-    """
-    General CLI Exception class
-    """
-    pass
-
-
-class AriaCliFormatInputsError(AriaCliError):
-    """
-    Raised when provided inputs are malformed.
-    """
-
-    def __init__(self, message, inputs):
-        self.inputs = inputs
-        super(AriaCliFormatInputsError, self).__init__(message)
-
-    def user_message(self):
-        """
-        Describes the format error in detail.
-        """
-        return (
-            'Invalid input format: {0}, '
-            'the expected format is: '
-            'key1=value1;key2=value2'.format(self.inputs))
 
-
-class AriaCliYAMLInputsError(AriaCliError):
-    """
-    Raised when an invalid yaml file is provided
-    """
+class AriaCliError(AriaError):
     pass
-
-
-class AriaCliInvalidInputsError(AriaCliFormatInputsError):
-    """
-    Raised when provided inputs are invalid.
-    """
-
-    def user_message(self):
-        """
-        Describes the error in detail.
-        """
-        return (
-            'Invalid input: {0}. input must represent a dictionary.\n'
-            'Valid values can be one of:\n'
-            '- a path to a YAML file\n'
-            '- a path to a directory containing YAML files\n'
-            '- a single quoted wildcard based path (e.g. "*-inputs.yaml")\n'
-            '- a string formatted as JSON\n'
-            '- a string formatted as key1=value1;key2=value2'.format(self.inputs)
-        )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/inputs.py
----------------------------------------------------------------------
diff --git a/aria/cli/inputs.py b/aria/cli/inputs.py
new file mode 100644
index 0000000..2077b67
--- /dev/null
+++ b/aria/cli/inputs.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import glob
+import yaml
+
+from .env import logger
+from.exceptions import AriaCliError
+
+
+def inputs_to_dict(resources):
+    """Returns a dictionary of inputs
+
+    `resources` can be:
+    - A list of files.
+    - A single file
+    - A directory containing multiple input files
+    - A key1=value1;key2=value2 pairs string.
+    - A string formatted as JSON/YAML.
+    - Wildcard based string (e.g. *-inputs.yaml)
+    """
+    if not resources:
+        return dict()
+
+    parsed_dict = {}
+
+    for resource in resources:
+        logger.debug('Processing inputs source: {0}'.format(resource))
+        # Workflow parameters always pass an empty dictionary. We ignore it
+        if isinstance(resource, basestring):
+            try:
+                parsed_dict.update(_parse_single_input(resource))
+            except AriaCliError:
+                raise AriaCliError(
+                    "Invalid input: {0}. It must represent a dictionary. "
+                    "Valid values can be one of:\n "
+                    "- A path to a YAML file\n "
+                    "- A path to a directory containing YAML files\n "
+                    "- A single quoted wildcard based path "
+                    "(e.g. '*-inputs.yaml')\n "
+                    "- A string formatted as JSON/YAML\n "
+                    "- A string formatted as key1=value1;key2=value2".format(
+                        resource))
+    return parsed_dict
+
+
+def _parse_single_input(resource):
+    try:
+        # parse resource as string representation of a dictionary
+        return plain_string_to_dict(resource)
+    except AriaCliError:
+        input_files = glob.glob(resource)
+        parsed_dict = dict()
+        if os.path.isdir(resource):
+            for input_file in os.listdir(resource):
+                parsed_dict.update(
+                    _parse_yaml_path(os.path.join(resource, input_file)))
+        elif input_files:
+            for input_file in input_files:
+                parsed_dict.update(_parse_yaml_path(input_file))
+        else:
+            parsed_dict.update(_parse_yaml_path(resource))
+    return parsed_dict
+
+
+def _parse_yaml_path(resource):
+
+    try:
+        # if resource is a path - parse as a yaml file
+        if os.path.isfile(resource):
+            with open(resource) as f:
+                content = yaml.load(f.read())
+        else:
+            # parse resource content as yaml
+            content = yaml.load(resource)
+    except yaml.error.YAMLError as e:
+        raise AriaCliError("'{0}' is not a valid YAML. {1}".format(
+            resource, str(e)))
+
+    # Emtpy files return None
+    content = content or dict()
+    if not isinstance(content, dict):
+        raise AriaCliError()
+
+    return content
+
+
+def plain_string_to_dict(input_string):
+    input_string = input_string.strip()
+    input_dict = {}
+    mapped_inputs = input_string.split(';')
+    for mapped_input in mapped_inputs:
+        mapped_input = mapped_input.strip()
+        if not mapped_input:
+            continue
+        split_mapping = mapped_input.split('=')
+        try:
+            key = split_mapping[0].strip()
+            value = split_mapping[1].strip()
+        except IndexError:
+            raise AriaCliError(
+                "Invalid input format: {0}, the expected format is: "
+                "key1=value1;key2=value2".format(input_string))
+        input_dict[key] = value
+    return input_dict

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/logger.py
----------------------------------------------------------------------
diff --git a/aria/cli/logger.py b/aria/cli/logger.py
new file mode 100644
index 0000000..289dbd3
--- /dev/null
+++ b/aria/cli/logger.py
@@ -0,0 +1,113 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import copy
+import logging
+import logging.config
+
+
+HIGH_VERBOSE = 3
+MEDIUM_VERBOSE = 2
+LOW_VERBOSE = 1
+NO_VERBOSE = 0
+
+DEFAULT_LOGGER_CONFIG = {
+    "version": 1,
+    "formatters": {
+        "file": {
+            "format": "%(asctime)s [%(levelname)s] %(message)s"
+        },
+        "console": {
+            "format": "%(message)s"
+        }
+    },
+    "handlers": {
+        "file": {
+            "class": "logging.handlers.RotatingFileHandler",
+            "formatter": "file",
+            "maxBytes": "5000000",
+            "backupCount": "20"
+        },
+        "console": {
+            "class": "logging.StreamHandler",
+            "stream": "ext://sys.stdout",
+            "formatter": "console"
+        }
+    }
+}
+
+
+class Logging(object):
+
+    def __init__(self, config):
+        self._log_file = None
+        self._verbosity_level = NO_VERBOSE
+        self._all_loggers = []
+        self._configure_loggers(config)
+        self._lgr = logging.getLogger('aria.cli.main')
+
+    @property
+    def logger(self):
+        return self._lgr
+
+    @property
+    def log_file(self):
+        return self._log_file
+
+    @property
+    def verbosity_level(self):
+        return self._verbosity_level
+
+    def is_high_verbose_level(self):
+        return self.verbosity_level == HIGH_VERBOSE
+
+    @verbosity_level.setter
+    def verbosity_level(self, level):
+        self._verbosity_level = level
+        if self.is_high_verbose_level():
+            for logger_name in self._all_loggers:
+                logging.getLogger(logger_name).setLevel(logging.DEBUG)
+
+    def _configure_loggers(self, config):
+        loggers_config = config.logging.loggers
+        logfile = config.logging.filename
+
+        logger_dict = copy.deepcopy(DEFAULT_LOGGER_CONFIG)
+        if logfile:
+            # set filename on file handler
+            logger_dict['handlers']['file']['filename'] = logfile
+            logfile_dir = os.path.dirname(logfile)
+            if not os.path.exists(logfile_dir):
+                os.makedirs(logfile_dir)
+            self._log_file = logfile
+        else:
+            del logger_dict['handlers']['file']
+
+        # add handlers to all loggers
+        loggers = {}
+        for logger_name in loggers_config:
+            loggers[logger_name] = dict(handlers=list(logger_dict['handlers'].keys()))
+        logger_dict['loggers'] = loggers
+
+        # set level for all loggers
+        for logger_name, logging_level in loggers_config.iteritems():
+            log = logging.getLogger(logger_name)
+            level = logging._levelNames[logging_level.upper()]
+            log.setLevel(level)
+            self._all_loggers.append(logger_name)
+
+        logging.config.dictConfig(logger_dict)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/main.py
----------------------------------------------------------------------
diff --git a/aria/cli/main.py b/aria/cli/main.py
new file mode 100644
index 0000000..4544e40
--- /dev/null
+++ b/aria/cli/main.py
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#TODO handle
+if __name__ == '__main__' and __package__ is None:
+    import aria.cli
+    __package__ = 'aria.cli'
+
+# from . import env
+from . import logger
+from .cli import aria
+from .commands import service_templates
+from .commands import node_templates
+from .commands import services
+from .commands import nodes
+from .commands import workflows
+from .commands import executions
+from .commands import plugins
+from .commands import logs
+from .. import install_aria_extensions
+
+
+@aria.group(name='aria')
+@aria.options.verbose()
+@aria.options.version
+def _aria():
+    """ARIA's Command Line Interface
+
+    To activate bash-completion. Run: `eval "$(_ARIA_COMPLETE=source aria)"`
+
+    ARIA's working directory resides by default in ~/.aria. To change it, set
+    the environment variable `ARIA_WORKDIR` to something else (e.g. /tmp/).
+    """
+    aria.set_cli_except_hook()
+
+
+def _register_commands():
+    """
+    Register the CLI's commands.
+    """
+
+    _aria.add_command(service_templates.service_templates)
+    _aria.add_command(node_templates.node_templates)
+    _aria.add_command(services.services)
+    _aria.add_command(nodes.nodes)
+    _aria.add_command(workflows.workflows)
+    _aria.add_command(executions.executions)
+    _aria.add_command(plugins.plugins)
+    _aria.add_command(logs.logs)
+
+
+_register_commands()
+
+
+def main():
+    install_aria_extensions()
+    _aria()
+
+
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/service_template_utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/service_template_utils.py b/aria/cli/service_template_utils.py
new file mode 100644
index 0000000..4ef4ff1
--- /dev/null
+++ b/aria/cli/service_template_utils.py
@@ -0,0 +1,140 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from urlparse import urlparse
+
+from . import csar
+from . import utils
+from .exceptions import AriaCliError
+from .constants import SAMPLE_SERVICE_TEMPLATE_FILENAME
+from ..utils import archive as archive_utils
+
+
+def get(source, service_template_filename=SAMPLE_SERVICE_TEMPLATE_FILENAME):
+    """Get a source and return a path to the main service template file
+
+    The behavior based on then source argument content is:
+        -
+        - local archive:
+            extract it locally and return path service template file
+        - local yaml file: return the file
+        - URL:
+            - return it (download=False)
+            - download and get service template from downloaded file (download=True)
+        - github repo:
+            - map it to a URL and return it (download=False)
+            - download and get service template from downloaded file (download=True)
+
+    Supported archive types are: csar, zip, tar, tar.gz and tar.bz2
+
+    :param source: Path/URL/github repo to archive/service-template file
+    :type source: str
+    :param service_template_filename: Path to service template (if source is an archive file)
+    :type service_template_filename: str
+    :param download: Download service template file if source is URL/github repo
+    :type download: bool
+    :return: Path to file (if archive/service-template file passed) or url
+    :rtype: str
+
+    """
+    if urlparse(source).scheme:
+        downloaded_file = utils.download_file(source)
+        return _get_service_template_file_from_archive(
+            downloaded_file, service_template_filename)
+    elif os.path.isfile(source):
+        if _is_archive(source):
+            return _get_service_template_file_from_archive(source, service_template_filename)
+        else:
+            # Maybe check if yaml.
+            return source
+    elif len(source.split('/')) == 2:
+        url = _map_to_github_url(source)
+        downloaded_file = utils.download_file(url)
+        return _get_service_template_file_from_archive(
+            downloaded_file, service_template_filename)
+    else:
+        raise AriaCliError(
+            'You must provide either a path to a local file, a remote URL '
+            'or a GitHub `organization/repository[:tag/branch]`')
+
+
+def _get_service_template_file_from_archive(archive, service_template_filename):
+    """Extract archive to temporary location and get path to service template file.
+
+    :param archive: Path to archive file
+    :type archive: str
+    :param service_template_filename: Path to service template file relative to archive
+    :type service_template_filename: str
+    :return: Absolute path to service template file
+    :rtype: str
+
+    """
+    if csar.is_csar_archive(archive):
+        service_template_file = _extract_csar_archive(archive)
+    else:
+        extract_directory = archive_utils.extract_archive(archive)(archive)
+        service_template_dir = os.path.join(
+            extract_directory,
+            os.listdir(extract_directory)[0],
+        )
+        service_template_file = os.path.join(service_template_dir, service_template_filename)
+
+    if not os.path.isfile(service_template_file):
+        raise AriaCliError(
+            'Could not find `{0}`. Please provide the name of the main '
+            'service template file by using the `-n/--service-template-filename` flag'
+            .format(service_template_filename))
+    return service_template_file
+
+
+def _map_to_github_url(source):
+    """Returns a path to a downloaded github archive.
+
+    :param source: github repo in the format of `org/repo[:tag/branch]`.
+    :type source: str
+    :return: URL to the archive file for the given repo in github
+    :rtype: str
+
+    """
+    source_parts = source.split(':', 1)
+    repo = source_parts[0]
+    tag = source_parts[1] if len(source_parts) == 2 else 'master'
+    url = 'https://github.com/{0}/archive/{1}.tar.gz'.format(repo, tag)
+    return url
+
+
+def generate_id(service_template_path, service_template_filename=SAMPLE_SERVICE_TEMPLATE_FILENAME):
+    """The name of the service template will be the name of the folder.
+    If service_template_filename is provided, it will be appended to the folder.
+    """
+    service_template_id = os.path.split(os.path.dirname(os.path.abspath(
+        service_template_path)))[-1]
+    if not service_template_filename == SAMPLE_SERVICE_TEMPLATE_FILENAME:
+        filename, _ = os.path.splitext(os.path.basename(service_template_filename))
+        service_template_id = (service_template_id + '.' + filename)
+    return service_template_id.replace('_', '-')
+
+
+def _is_archive(source):
+    return archive_utils.is_archive(source) or csar.is_csar_archive(source)
+
+
+def _extract_csar_archive(archive):
+    if csar.is_csar_archive(archive):
+        reader = csar.read(source=archive)
+        main_service_template_file_name = os.path.basename(reader.entry_definitions)
+        return os.path.join(reader.destination,
+                            main_service_template_file_name)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/storage.py
----------------------------------------------------------------------
diff --git a/aria/cli/storage.py b/aria/cli/storage.py
deleted file mode 100644
index fa1518b..0000000
--- a/aria/cli/storage.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Filesystem related CLI storage location and configuration
-"""
-
-import os
-import getpass
-from shutil import rmtree
-
-work_space_directory = '.aria'
-storage_directory_name = 'local-storage'
-
-
-def user_space(user_name=getpass.getuser()):
-    """
-    Base work directory
-    """
-    user_path = '~{0}'.format(user_name)
-    real_path = os.path.expanduser(user_path)
-    if os.path.exists(real_path):
-        return os.path.join(real_path, work_space_directory)
-    return os.path.join(os.getcwd(), work_space_directory)
-
-
-def local_storage(user_name=getpass.getuser()):
-    """
-    Base storage directory
-    """
-    return os.path.join(user_space(user_name), storage_directory_name)
-
-
-def local_model_storage():
-    """
-    Model storage directory
-    """
-    return os.path.join(local_storage(), 'models')
-
-
-def local_resource_storage():
-    """
-    Resource storage directory
-    """
-    return os.path.join(local_storage(), 'resources')
-
-
-def config_file_path():
-    """
-    Configuration file path
-    """
-    path = os.path.join(user_space(), 'config.yaml')
-    if not os.path.exists(path):
-        open(path, 'w').close()
-    return path
-
-
-def create_user_space(user_name=getpass.getuser(), override=False):
-    """
-    Creates the base work directory
-    """
-    path = user_space(user_name)
-    if os.path.exists(path):
-        if override:
-            rmtree(path, ignore_errors=True)
-        else:
-            raise IOError('user space {0} already exists'.format(path))
-    os.mkdir(path)
-    return path
-
-
-def create_local_storage(user_name=getpass.getuser(), override=False):
-    """
-    Creates the base storage directory
-    """
-    path = local_storage(user_name)
-    if os.path.exists(path):
-        if override:
-            rmtree(path, ignore_errors=True)
-        else:
-            raise IOError('local storage {0} already exists'.format(path))
-    os.mkdir(path)
-    return path

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/table.py
----------------------------------------------------------------------
diff --git a/aria/cli/table.py b/aria/cli/table.py
new file mode 100644
index 0000000..9c195f5
--- /dev/null
+++ b/aria/cli/table.py
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from datetime import datetime
+
+from .env import logger
+
+from prettytable import PrettyTable
+
+
+def generate(cols, data, defaults=None):
+    """
+    Return a new PrettyTable instance representing the list.
+
+    Arguments:
+
+        cols - An iterable of strings that specify what
+               are the columns of the table.
+
+               for example: ['id','name']
+
+        data - An iterable of dictionaries, each dictionary must
+               have key's corresponding to the cols items.
+
+               for example: [{'id':'123', 'name':'Pete']
+
+        defaults - A dictionary specifying default values for
+                   key's that don't exist in the data itself.
+
+                   for example: {'serviceId':'123'} will set the
+                   serviceId value for all rows to '123'.
+
+    """
+    def get_values_per_column(column, row_data):
+        if column in row_data:
+            if row_data[column] and isinstance(row_data[column], basestring):
+                try:
+                    datetime.strptime(row_data[column][:10], '%Y-%m-%d')
+                    row_data[column] = \
+                        row_data[column].replace('T', ' ').replace('Z', ' ')
+                except ValueError:
+                    # not a timestamp
+                    pass
+            elif row_data[column] and isinstance(row_data[column], list):
+                row_data[column] = ','.join(row_data[column])
+            elif not row_data[column]:
+                # if it's empty list, don't print []
+                row_data[column] = ''
+            return row_data[column]
+        else:
+            return defaults[column]
+
+    pt = PrettyTable([col for col in cols])
+
+    for d in data:
+        values_row = []
+        for c in cols:
+            values_row.append(get_values_per_column(c, d))
+        pt.add_row(values_row)
+
+    return pt
+
+
+def log(title, tb):
+    logger.info('{0}{1}{0}{2}{0}'.format(os.linesep, title, tb))
+
+
+def print_data(columns, items, header_text, max_width=None, defaults=None):
+    if items is None:
+        items = []
+    elif not isinstance(items, list):
+        items = [items]
+
+    pt = generate(columns, data=items, defaults=defaults)
+    if max_width:
+        pt.max_width = max_width
+    log(header_text, pt)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/cli/utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/utils.py b/aria/cli/utils.py
new file mode 100644
index 0000000..3b68729
--- /dev/null
+++ b/aria/cli/utils.py
@@ -0,0 +1,152 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import string
+import random
+import tempfile
+from StringIO import StringIO
+
+from backports.shutil_get_terminal_size import get_terminal_size
+import requests
+
+from .env import logger
+from .exceptions import AriaCliError
+
+
+def dump_to_file(collection, file_path):
+    with open(file_path, 'a') as f:
+        f.write(os.linesep.join(collection))
+        f.write(os.linesep)
+
+
+def is_virtual_env():
+    return hasattr(sys, 'real_prefix')
+
+
+def storage_sort_param(sort_by, descending):
+    return {sort_by: 'desc' if descending else 'asc'}
+
+
+def generate_random_string(size=6,
+                           chars=string.ascii_uppercase + string.digits):
+    return ''.join(random.choice(chars) for _ in range(size))
+
+
+def generate_suffixed_id(id):
+    return '{0}_{1}'.format(id, generate_random_string())
+
+
+def get_parameter_templates_as_string(parameter_templates):
+    params_string = StringIO()
+
+    for param_name, param_template in parameter_templates.iteritems():
+        params_string.write('\t{0}:{1}'.format(param_name, os.linesep))
+        param_dict = param_template.to_dict()
+        del param_dict['id']  # not interested in printing the id
+        for k, v in param_dict.iteritems():
+            params_string.write('\t\t{0}: {1}{2}'.format(k, v, os.linesep))
+
+    params_string.write(os.linesep)
+    return params_string.getvalue()
+
+
+def download_file(url, destination=None):
+    """Download file.
+
+    :param url: Location of the file to download
+    :type url: str
+    :param destination:
+        Location where the file should be saved (autogenerated by default)
+    :type destination: str | None
+    :returns: Location where the file was saved
+    :rtype: str
+
+    """
+    CHUNK_SIZE = 1024
+
+    if not destination:
+        fd, destination = tempfile.mkstemp()
+        os.close(fd)
+    logger.info('Downloading {0} to {1}...'.format(url, destination))
+
+    try:
+        response = requests.get(url, stream=True)
+    except requests.exceptions.RequestException as ex:
+        raise AriaCliError(
+            'Failed to download {0}. ({1})'.format(url, str(ex)))
+
+    final_url = response.url
+    if final_url != url:
+        logger.debug('Redirected to {0}'.format(final_url))
+
+    try:
+        with open(destination, 'wb') as destination_file:
+            for chunk in response.iter_content(CHUNK_SIZE):
+                destination_file.write(chunk)
+    except IOError as ex:
+        raise AriaCliError(
+            'Failed to download {0}. ({1})'.format(url, str(ex)))
+
+    return destination
+
+
+def generate_progress_handler(file_path, action='', max_bar_length=80):
+    """Returns a function that prints a progress bar in the terminal
+
+    :param file_path: The name of the file being transferred
+    :param action: Uploading/Downloading
+    :param max_bar_length: Maximum allowed length of the bar. Default: 80
+    :return: The configured print_progress function
+    """
+    # We want to limit the maximum line length to 80, but allow for a smaller
+    # terminal size. We also include the action string, and some extra chars
+    terminal_width = get_terminal_size().columns
+
+    # This takes care of the case where there is no terminal (e.g. unittest)
+    terminal_width = terminal_width or max_bar_length
+    bar_length = min(max_bar_length, terminal_width) - len(action) - 12
+
+    # Shorten the file name if it's too long
+    file_name = os.path.basename(file_path)
+    if len(file_name) > (bar_length / 4) + 3:
+        file_name = file_name[:bar_length / 4] + '...'
+
+    bar_length -= len(file_name)
+
+    def print_progress(read_bytes, total_bytes):
+        """Print upload/download progress on a single line
+
+        Call this function in a loop to create a progress bar in the terminal
+
+        :param read_bytes: Number of bytes already processed
+        :param total_bytes: Total number of bytes in the file
+        """
+
+        filled_length = min(bar_length, int(round(bar_length * read_bytes /
+                                                  float(total_bytes))))
+        percents = min(100.00, round(
+            100.00 * (read_bytes / float(total_bytes)), 2))
+        bar = '#' * filled_length + '-' * (bar_length - filled_length)
+
+        # The \r caret makes sure the cursor moves back to the beginning of
+        # the line
+        sys.stdout.write('\r{0} {1} |{2}| {3}%'.format(
+            action, file_name, bar, percents))
+        if read_bytes >= total_bytes:
+            sys.stdout.write('\n')
+
+    return print_progress

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
new file mode 100644
index 0000000..96a967f
--- /dev/null
+++ b/aria/core.py
@@ -0,0 +1,116 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import exceptions
+from .modeling import models
+from .modeling import utils as modeling_utils
+from .parser.consumption import (
+    ConsumptionContext,
+    ConsumerChain,
+    Read,
+    Validate,
+    ServiceTemplate)
+from .parser.loading.location import UriLocation
+
+
+class Core(object):
+
+    def __init__(self,
+                 model_storage,
+                 resource_storage,
+                 plugin_manager):
+        self._model_storage = model_storage
+        self._resource_storage = resource_storage
+        self._plugin_manager = plugin_manager
+
+    @property
+    def model_storage(self):
+        return self._model_storage
+
+    @property
+    def resource_storage(self):
+        return self._resource_storage
+
+    @property
+    def plugin_manager(self):
+        return self._plugin_manager
+
+    def validate_service_template(self, service_template_path):
+        self._parse_service_template(service_template_path)
+
+    def create_service_template(self, service_template_path, service_template_dir,
+                                service_template_name):
+        context = self._parse_service_template(service_template_path)
+        service_template = context.modeling.template
+        service_template.name = service_template_name
+        self.model_storage.service_template.put(service_template)
+        self.resource_storage.service_template.upload(
+            entry_id=str(service_template.id), source=service_template_dir)
+
+    def delete_service_template(self, service_template_id):
+        service_template = self.model_storage.service_template.get(service_template_id)
+        if service_template.services.all():
+            raise exceptions.DependentServicesError(
+                "Can't delete service template {0} - Service template has existing services")
+
+        self.model_storage.service_template.delete(service_template)
+        self.resource_storage.service_template.delete(entry_id=str(service_template.id))
+
+    def create_service(self, service_template_name, inputs, service_name=None):
+        service_template = self.model_storage.service_template.get_by_name(service_template_name)
+
+        # creating an empty ConsumptionContext, initiating a threadlocal context
+        ConsumptionContext()
+        with self.model_storage._all_api_kwargs['session'].no_autoflush:
+            service = service_template.instantiate(None)
+
+        template_inputs = service_template.inputs
+        service.inputs = modeling_utils.create_inputs(inputs, template_inputs)
+        # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
+
+        # first put the service model so it could have an id, as fallback for setting its name
+        self.model_storage.service.put(service)
+        service.name = service_name or '{0}_{1}'.format(service_template_name, service.id)
+        self.model_storage.service.update(service)
+        return service
+
+    def delete_service(self, service_name, force=False):
+        service = self.model_storage.service.get_by_name(service_name)
+
+        active_executions = [e for e in service.executions
+                             if e.status not in models.Execution.ACTIVE_STATES]
+        if active_executions:
+            raise exceptions.DependentActiveExecutionsError(
+                "Can't delete service {0} - there is an active execution for this service. "
+                "Active execution id: {1}".format(service_name, active_executions[0].id))
+
+        if not force:
+            available_nodes = [n for n in service.nodes.values()
+                               if n.state not in ('deleted', 'errored')]
+            if available_nodes:
+                raise exceptions.DependentAvailableNodesError(
+                    "Can't delete service {0} - there are available nodes for this service. "
+                    "Available node ids: {1}".format(service_name, available_nodes))
+
+        self.model_storage.service.delete(service)
+
+    @staticmethod
+    def _parse_service_template(service_template_path):
+        context = ConsumptionContext()
+        context.presentation.location = UriLocation(service_template_path)
+        ConsumerChain(context, (Read, Validate, ServiceTemplate)).consume()
+        if context.validation.dump_issues():
+            raise exceptions.ParsingError('Failed to parse service template')
+        return context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/exceptions.py b/aria/exceptions.py
index a180ce1..72adda5 100644
--- a/aria/exceptions.py
+++ b/aria/exceptions.py
@@ -44,3 +44,19 @@ class AriaException(Exception):
                 # Make sure it's our traceback
                 cause_traceback = traceback
         self.cause_traceback = cause_traceback
+
+
+class DependentServicesError(AriaError):
+    pass
+
+
+class DependentActiveExecutionsError(AriaError):
+    pass
+
+
+class DependentAvailableNodesError(AriaError):
+    pass
+
+
+class ParsingError(AriaError):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/aria/modeling/__init__.py b/aria/modeling/__init__.py
index 4dfc39d..4ac79e7 100644
--- a/aria/modeling/__init__.py
+++ b/aria/modeling/__init__.py
@@ -19,6 +19,7 @@ from . import (
     mixins,
     types,
     models,
+    utils,
     service_template as _service_template_bases,
     service_instance as _service_instance_bases,
     service_changes as _service_changes_bases,
@@ -45,4 +46,5 @@ __all__ = (
     'types',
     'models',
     'model_bases',
+    'utils'
 )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index 6931c78..f699560 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -32,3 +32,21 @@ class CannotEvaluateFunctionException(ModelingException):
     """
     ARIA modeling exception: cannot evaluate the function at this time.
     """
+
+
+class MissingRequiredInputsException(ModelingException):
+    """
+    ARIA modeling exception: Required inputs have been omitted
+    """
+
+
+class InputOfWrongTypeException(ModelingException):
+    """
+    ARIA modeling exception: Inputs of the wrong types have been provided
+    """
+
+
+class UndeclaredInputsException(ModelingException):
+    """
+    ARIA modeling exception: Undeclared inputs have been provided
+    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index f0bd4b2..15abde4 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -55,9 +55,7 @@ class ExecutionBase(ModelMixin):
     __tablename__ = 'execution'
 
     __private_fields__ = ['service_fk',
-                          'service_name',
-                          'service_template',
-                          'service_template_name']
+                          'service_template']
 
     TERMINATED = 'terminated'
     FAILED = 'failed'
@@ -97,7 +95,6 @@ class ExecutionBase(ModelMixin):
     ended_at = Column(DateTime, nullable=True, index=True)
     error = Column(Text, nullable=True)
     is_system_workflow = Column(Boolean, nullable=False, default=False)
-    parameters = Column(Dict)
     status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
     workflow_name = Column(Text)
 
@@ -121,6 +118,10 @@ class ExecutionBase(ModelMixin):
     def tasks(cls):
         return relationship.one_to_many(cls, 'task')
 
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
     # region foreign keys
 
     @declared_attr
@@ -227,10 +228,7 @@ class TaskBase(ModelMixin):
     __private_fields__ = ['node_fk',
                           'relationship_fk',
                           'plugin_fk',
-                          'execution_fk',
-                          'node_name',
-                          'relationship_name',
-                          'execution_name']
+                          'execution_fk']
 
     PENDING = 'pending'
     RETRYING = 'retrying'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/service_changes.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_changes.py b/aria/modeling/service_changes.py
index b1a75a2..1974424 100644
--- a/aria/modeling/service_changes.py
+++ b/aria/modeling/service_changes.py
@@ -45,9 +45,7 @@ class ServiceUpdateBase(ModelMixin):
     __tablename__ = 'service_update'
 
     __private_fields__ = ['service_fk',
-                          'execution_fk',
-                          'execution_name',
-                          'service_name']
+                          'execution_fk']
 
     created_at = Column(DateTime, nullable=False, index=True)
     service_plan = Column(Dict, nullable=False)
@@ -125,8 +123,7 @@ class ServiceUpdateStepBase(ModelMixin):
 
     __tablename__ = 'service_update_step'
 
-    __private_fields__ = ['service_update_fk',
-                          'service_update_name']
+    __private_fields__ = ['service_update_fk']
 
     _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
     ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
@@ -222,8 +219,7 @@ class ServiceModificationBase(ModelMixin):
 
     __tablename__ = 'service_modification'
 
-    __private_fields__ = ['service_fk',
-                          'service_name']
+    __private_fields__ = ['service_fk']
 
     STARTED = 'started'
     FINISHED = 'finished'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index e6c2b12..48615af 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -86,8 +86,7 @@ class ServiceBase(InstanceModelMixin):
     __tablename__ = 'service'
 
     __private_fields__ = ['substitution_fk',
-                          'service_template_fk',
-                          'service_template_name']
+                          'service_template_fk']
 
     # region foreign keys
 
@@ -371,8 +370,7 @@ class NodeBase(InstanceModelMixin):
     __private_fields__ = ['type_fk',
                           'host_fk',
                           'service_fk',
-                          'node_template_fk',
-                          'service_name']
+                          'node_template_fk']
 
     INITIAL = 'initial'
     CREATING = 'creating'
@@ -452,6 +450,11 @@ class NodeBase(InstanceModelMixin):
         """Required for use by SQLAlchemy queries"""
         return association_proxy('service', 'name')
 
+    @declared_attr
+    def node_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('node_template', 'name')
+
     # endregion
 
     # region one_to_one relationships
@@ -1159,9 +1162,7 @@ class RelationshipBase(InstanceModelMixin):
                           'target_node_fk',
                           'target_capability_fk',
                           'requirement_template_fk',
-                          'relationship_template_fk',
-                          'source_node_name',
-                          'target_node_name']
+                          'relationship_template_fk']
 
     # region foreign keys
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 8355521..86cf81a 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,6 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  updated_at=now,
                                  description=deepcopy_with_locators(self.description),
                                  service_template=self)
-        #service.name = '{0}_{1}'.format(self.name, service.id)
 
         context.modeling.instance = service
 
@@ -309,12 +308,6 @@ class ServiceTemplateBase(TemplateModelMixin):
         utils.instantiate_dict(self, service.inputs, self.inputs)
         utils.instantiate_dict(self, service.outputs, self.outputs)
 
-        for name, the_input in context.modeling.inputs.iteritems():
-            if name not in service.inputs:
-                context.validation.report('input "{0}" is not supported'.format(name))
-            else:
-                service.inputs[name].value = the_input
-
         return service
 
     def validate(self):
@@ -438,8 +431,7 @@ class NodeTemplateBase(TemplateModelMixin):
     __tablename__ = 'node_template'
 
     __private_fields__ = ['type_fk',
-                          'service_template_fk',
-                          'service_template_name']
+                          'service_template_fk']
 
     # region foreign_keys
 
@@ -462,6 +454,11 @@ class NodeTemplateBase(TemplateModelMixin):
         """Required for use by SQLAlchemy queries"""
         return association_proxy('service_template', 'name')
 
+    @declared_attr
+    def type_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('type', 'name')
+
     # endregion
 
     # region one_to_one relationships
@@ -548,6 +545,7 @@ class NodeTemplateBase(TemplateModelMixin):
                            type=self.type,
                            description=deepcopy_with_locators(self.description),
                            state=models.Node.INITIAL,
+                           runtime_properties={},
                            node_template=self)
         utils.instantiate_dict(node, node.properties, self.properties)
         utils.instantiate_dict(node, node.interfaces, self.interface_templates)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d8722826/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 0b4015c..f172a50 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -13,12 +13,95 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from json import JSONEncoder
+from StringIO import StringIO
+
+from . import exceptions
 from ..parser.consumption import ConsumptionContext
 from ..parser.exceptions import InvalidValueError
 from ..parser.presentation import Value
 from ..utils.collections import OrderedDict
 from ..utils.console import puts
-from .exceptions import CannotEvaluateFunctionException
+from ..utils.type import validate_value_type
+
+
+class ModelJSONEncoder(JSONEncoder):
+    def default(self, o):
+        from .mixins import ModelMixin
+        if isinstance(o, ModelMixin):
+            if hasattr(o, 'value'):
+                dict_to_return = o.to_dict(fields=('value',))
+                return dict_to_return['value']
+            else:
+                return o.to_dict()
+        else:
+            return JSONEncoder.default(self, o)
+
+
+def create_inputs(inputs, template_inputs):
+    """
+    :param inputs: key-value dict
+    :param template_inputs: parameter name to parameter object dict
+    :return: dict of parameter name to Parameter models
+    """
+    merged_inputs = _merge_and_validate_inputs(inputs, template_inputs)
+
+    from . import models
+    input_models = []
+    for input_name, input_val in merged_inputs.iteritems():
+        parameter = models.Parameter(
+            name=input_name,
+            type_name=template_inputs[input_name].type_name,
+            description=template_inputs[input_name].description,
+            value=input_val)
+        input_models.append(parameter)
+
+    return {input.name: input for input in input_models}
+
+
+def _merge_and_validate_inputs(inputs, template_inputs):
+    """
+    :param inputs: key-value dict
+    :param template_inputs: parameter name to parameter object dict
+    :return:
+    """
+    merged_inputs = inputs.copy()
+
+    missing_inputs = []
+    wrong_type_inputs = {}
+    for input_name, input_template in template_inputs.iteritems():
+        if input_name not in inputs:
+            if input_template.value is not None:
+                merged_inputs[input_name] = input_template.value  # apply default value
+            else:
+                missing_inputs.append(input_name)
+        else:
+            # Validate input type
+            try:
+                validate_value_type(inputs[input_name], input_template.type_name)
+            except ValueError:
+                wrong_type_inputs[input_name] = input_template.type_name
+
+    if missing_inputs:
+        raise exceptions.MissingRequiredInputsException(
+            'Required inputs {0} have not been specified - expected inputs: {1}'
+            .format(missing_inputs, template_inputs.keys()))
+
+    if wrong_type_inputs:
+        error_message = StringIO()
+        for param_name, param_type in wrong_type_inputs.iteritems():
+            error_message.write('Input "{0}" must be of type {1}\n'.
+                                format(param_name, param_type))
+        raise exceptions.InputOfWrongTypeException(error_message.getvalue())
+
+    undeclared_inputs = [input_name for input_name in inputs.keys()
+                         if input_name not in template_inputs]
+    if undeclared_inputs:
+        raise exceptions.UndeclaredInputsException(
+            'Undeclared inputs have been specified: {0}; Expected inputs: {1}'
+            .format(undeclared_inputs, template_inputs.keys()))
+
+    return merged_inputs
 
 
 def coerce_value(container, value, report_issues=False):
@@ -35,7 +118,7 @@ def coerce_value(container, value, report_issues=False):
         try:
             value = value._evaluate(context, container)
             value = coerce_value(container, value, report_issues)
-        except CannotEvaluateFunctionException:
+        except exceptions.CannotEvaluateFunctionException:
             pass
         except InvalidValueError as e:
             if report_issues:


[04/24] incubator-ariatosca git commit: Refacor handling storage exceptions stemming from unique names

Posted by ra...@apache.org.
Refacor handling storage exceptions stemming from unique names


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/375228b6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/375228b6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/375228b6

Branch: refs/heads/ARIA-48-aria-cli
Commit: 375228b6a364727591f1319da4d89f6e38543fb9
Parents: 38d58d4
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Mon Apr 3 15:44:00 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/service_templates.py | 13 ++++---------
 aria/cli/commands/services.py          | 11 +++--------
 aria/cli/constants.py                  |  4 ----
 aria/cli/utils.py                      |  9 +++++++++
 4 files changed, 16 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/375228b6/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index 79f8012..6c6224b 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -21,9 +21,9 @@ from .. import utils
 from .. import csar
 from .. import service_template_utils
 from ..cli import aria
-from ..constants import TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE
 from ..table import print_data
 from ..exceptions import AriaCliError
+from ..utils import handle_storage_exception
 from ...core import Core
 from ...exceptions import AriaException
 from ...storage import exceptions as storage_exceptions
@@ -119,14 +119,9 @@ def store(service_template_path, service_template_name, model_storage, resource_
         core.create_service_template(service_template_path,
                                      os.path.dirname(service_template_path),
                                      service_template_name)
-    except storage_exceptions.StorageError:
-        logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
-            model_class='service template',
-            name=service_template_name))
-        raise
-
-    else:
-        logger.info('Service template stored')
+    except storage_exceptions.StorageError as e:
+        handle_storage_exception(e, 'service template', service_template_name)
+    logger.info('Service template stored')
 
 
 @service_templates.command(name='delete',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/375228b6/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 28fb499..e73c43b 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -19,10 +19,9 @@ from StringIO import StringIO
 
 from . import service_templates
 from ..cli import aria, helptexts
-from ..constants import TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE
 from ..exceptions import AriaCliError
 from ..table import print_data
-from ..utils import storage_sort_param
+from ..utils import storage_sort_param, handle_storage_exception
 from ...core import Core
 from ...exceptions import AriaException
 from ...storage import exceptions as storage_exceptions
@@ -99,16 +98,12 @@ def create(service_template_name,
     try:
         core = Core(model_storage, resource_storage, plugin_manager)
         service = core.create_service(service_template_name, inputs, service_name)
-    except storage_exceptions.StorageError:
-        logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
-            model_class='service',
-            name=service_name))
-        raise
+    except storage_exceptions.StorageError as e:
+        handle_storage_exception(e, 'service', service_name)
     except AriaException as e:
         logger.info(str(e))
         service_templates.print_service_template_inputs(model_storage, service_template_name)
         raise AriaCliError(str(e))
-
     logger.info("Service created. The service's name is {0}".format(service.name))
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/375228b6/aria/cli/constants.py
----------------------------------------------------------------------
diff --git a/aria/cli/constants.py b/aria/cli/constants.py
index fdd37fc..67c094d 100644
--- a/aria/cli/constants.py
+++ b/aria/cli/constants.py
@@ -16,7 +16,3 @@
 
 SAMPLE_SERVICE_TEMPLATE_FILENAME = 'service_template.yaml'
 HELP_TEXT_COLUMN_BUFFER = 5
-TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE = """
-Could not store {model_class} `{name}`
-There already a exists a {model_class} with the same name
-"""

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/375228b6/aria/cli/utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/utils.py b/aria/cli/utils.py
index 3b68729..fff8e3a 100644
--- a/aria/cli/utils.py
+++ b/aria/cli/utils.py
@@ -150,3 +150,12 @@ def generate_progress_handler(file_path, action='', max_bar_length=80):
             sys.stdout.write('\n')
 
     return print_progress
+
+
+def handle_storage_exception(e, model_class, name):
+    if 'UNIQUE constraint failed' in e.msg:
+        msg = 'Could not store {model_class} `{name}`\n' \
+              'There already a exists a {model_class} with the same name' \
+              .format(model_class=model_class, name=name)
+        raise AriaCliError(msg)
+    raise


[06/24] incubator-ariatosca git commit: fixed storage exceptions in CLI

Posted by ra...@apache.org.
fixed storage exceptions in CLI


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/c40f3615
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/c40f3615
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/c40f3615

Branch: refs/heads/ARIA-48-aria-cli
Commit: c40f3615a21ce83fa98fbeec2c1b617959e7bba2
Parents: 349d4d0
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon Apr 3 14:58:17 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/executions.py     | 14 +++++---------
 aria/cli/commands/logs.py           | 16 +++++++++++++---
 aria/cli/commands/node_templates.py |  6 +++---
 aria/cli/commands/nodes.py          |  6 +++---
 aria/cli/commands/services.py       | 23 ++++++++++++++---------
 aria/storage/exceptions.py          |  4 ++++
 aria/storage/sql_mapi.py            |  8 ++++----
 7 files changed, 46 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
index d767fa1..fd47eb3 100644
--- a/aria/cli/commands/executions.py
+++ b/aria/cli/commands/executions.py
@@ -13,15 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import json
-import time
-
 from .. import utils
 from ..table import print_data
-from ..cli import aria, helptexts
+from ..cli import aria
 from ..exceptions import AriaCliError
 from ...modeling.models import Execution
-from ...storage.exceptions import StorageError
+from ...storage import exceptions as storage_exceptions
 from ...orchestrator.workflow_runner import WorkflowRunner
 from ...utils import formatting
 from ...utils import threading
@@ -52,7 +49,7 @@ def show(execution_id, model_storage, logger):
     try:
         logger.info('Showing execution {0}'.format(execution_id))
         execution = model_storage.execution.get(execution_id)
-    except StorageError:
+    except storage_exceptions.NotFoundError:
         raise AriaCliError('Execution {0} not found'.format(execution_id))
 
     print_data(EXECUTION_COLUMNS, execution.to_dict(), 'Execution:', max_width=50)
@@ -94,9 +91,8 @@ def list(service_name,
         try:
             service = model_storage.service.get_by_name(service_name)
             filters = dict(service=service)
-        except StorageError:
-            raise AriaCliError('Service {0} does not exist'.format(
-                service_name))
+        except storage_exceptions.NotFoundError:
+            raise AriaCliError('Service {0} does not exist'.format(service_name))
     else:
         logger.info('Listing all executions...')
         filters = {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/cli/commands/logs.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py
index 3662063..6890fb4 100644
--- a/aria/cli/commands/logs.py
+++ b/aria/cli/commands/logs.py
@@ -15,6 +15,8 @@
 
 from .. import utils
 from ..cli import aria
+from ..exceptions import AriaCliError
+from ...storage import exceptions as storage_exceptions
 
 
 @aria.group(name='logs')
@@ -40,8 +42,12 @@ def list(execution_id,
     """
     logger.info('Listing logs for execution id {0}'.format(execution_id))
     # events_logger = get_events_logger(json_output)
-    logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
-                                  sort=utils.storage_sort_param('created_at', False))
+    try:
+        logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
+                                      sort=utils.storage_sort_param('created_at', False))
+    except storage_exceptions.NotFoundError:
+        raise AriaCliError('Execution {0} does not exist'.format(execution_id))
+
     # TODO: print logs nicely
     if logs:
         for log in logs:
@@ -62,7 +68,11 @@ def delete(execution_id, model_storage, logger):
     `EXECUTION_ID` is the execution logs to delete.
     """
     logger.info('Deleting logs for execution id {0}'.format(execution_id))
-    logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
+    try:
+        logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
+    except storage_exceptions.NotFoundError:
+        raise AriaCliError('Execution {0} does not exist'.format(execution_id))
+
     for log in logs:
         model_storage.log.delete(log)
     logger.info('Deleted logs for execution id {0}'.format(execution_id))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
index 7365f0c..0910ded 100644
--- a/aria/cli/commands/node_templates.py
+++ b/aria/cli/commands/node_templates.py
@@ -17,7 +17,7 @@ from ..table import print_data
 from .. import utils
 from ..cli import aria
 from ..exceptions import AriaCliError
-from ...storage.exceptions import StorageError
+from ...storage import exceptions as storage_exceptions
 
 
 NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
@@ -49,7 +49,7 @@ def show(node_template_id, model_storage, logger):
     try:
         #TODO get node template of a specific service template instead?
         node_template = model_storage.node_template.get(node_template_id)
-    except StorageError:
+    except storage_exceptions.NotFoundError:
         raise AriaCliError('Node template {0} was not found'.format(node_template_id))
 
     print_data(NODE_TEMPLATE_COLUMNS, node_template.to_dict(), 'Node template:', max_width=50)
@@ -91,7 +91,7 @@ def list(service_template_name, sort_by, descending, model_storage, logger):
         try:
             service_template = model_storage.service_template.get_by_name(service_template_name)
             filters = dict(service_template=service_template)
-        except StorageError:
+        except storage_exceptions.NotFoundException:
             raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
     else:
         logger.info('Listing all node templates...')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
index f38c917..8600ff2 100644
--- a/aria/cli/commands/nodes.py
+++ b/aria/cli/commands/nodes.py
@@ -17,7 +17,7 @@ from .. import utils
 from ..cli import aria
 from ..table import print_data
 from ..exceptions import AriaCliError
-from ...storage.exceptions import StorageError
+from ...storage import exceptions as storage_exceptions
 
 
 NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
@@ -45,7 +45,7 @@ def show(node_id, model_storage, logger):
     logger.info('Showing node {0}'.format(node_id))
     try:
         node = model_storage.node.get(node_id)
-    except StorageError:
+    except storage_exceptions.NotFoundError:
         raise AriaCliError('Node {0} not found'.format(node_id))
 
     print_data(NODE_COLUMNS, node.to_dict(), 'Node:', 50)
@@ -83,7 +83,7 @@ def list(service_name,
         try:
             service = model_storage.service.get_by_name(service_name)
             filters = dict(service=service)
-        except StorageError:
+        except storage_exceptions.NotFoundError:
             raise AriaCliError('Service {0} does not exist'.format(service_name))
     else:
         logger.info('Listing all nodes...')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index ae04d7e..00b093c 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -25,7 +25,7 @@ from ..table import print_data
 from ..utils import storage_sort_param
 from ...core import Core
 from ...exceptions import AriaException
-from ...storage.exceptions import StorageError
+from ...storage import exceptions as storage_exceptions
 
 
 SERVICE_COLUMNS = ['id', 'name', 'service_template_name', 'created_at', 'updated_at']
@@ -40,27 +40,30 @@ def services():
 
 
 @services.command(name='list', short_help='List services')
-@aria.options.service_template_id()
+@aria.options.service_template_name()
 @aria.options.sort_by()
 @aria.options.descending
 @aria.options.verbose()
 @aria.pass_model_storage
 @aria.pass_logger
-def list(service_template_id,
+def list(service_template_name,
          sort_by,
          descending,
          model_storage,
          logger):
     """List services
 
-    If `--service-template-id` is provided, list services for that service template.
+    If `--service-template-name` is provided, list services for that service template.
     Otherwise, list services for all service templates.
     """
-    if service_template_id:
+    if service_template_name:
         logger.info('Listing services for service template {0}...'.format(
-            service_template_id))
-        service_template = model_storage.service_template.get(service_template_id)
-        filters = dict(service_template=service_template)
+            service_template_name))
+        try:
+            service_template = model_storage.service_template.get(service_template_name)
+            filters = dict(service_template=service_template)
+        except storage_exceptions.NotFoundError:
+            raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
     else:
         logger.info('Listing all service...')
         filters = {}
@@ -99,7 +102,9 @@ def create(service_template_name,
     try:
         core = Core(model_storage, resource_storage, plugin_manager)
         service = core.create_service(service_template_name, inputs, service_name)
-    except StorageError:
+    except storage_exceptions.NotFoundError:
+        raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
+    except storage_exceptions.StorageError:
         logger.info(TWO_MODELS_WITH_THE_SAME_NAME_ERROR_TEMPLATE.format(
             model_class='service',
             name=service_name))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/storage/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/storage/exceptions.py b/aria/storage/exceptions.py
index f982f63..3f0ecec 100644
--- a/aria/storage/exceptions.py
+++ b/aria/storage/exceptions.py
@@ -23,3 +23,7 @@ class StorageError(exceptions.AriaError):
     General storage exception
     """
     pass
+
+
+class NotFoundError(StorageError):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/c40f3615/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
index 59e1896..40c5410 100644
--- a/aria/storage/sql_mapi.py
+++ b/aria/storage/sql_mapi.py
@@ -52,7 +52,7 @@ class SQLAlchemyModelAPI(api.ModelAPI):
         result = query.first()
 
         if not result:
-            raise exceptions.StorageError(
+            raise exceptions.NotFoundError(
                 'Requested `{0}` with ID `{1}` was not found'
                 .format(self.model_cls.__name__, entry_id)
             )
@@ -62,13 +62,13 @@ class SQLAlchemyModelAPI(api.ModelAPI):
         assert hasattr(self.model_cls, 'name')
         result = self.list(include=include, filters={'name': entry_name})
         if not result:
-            raise exceptions.StorageError(
-                'Requested {0} with NAME `{1}` was not found'
+            raise exceptions.NotFoundError(
+                'Requested {0} with name `{1}` was not found'
                 .format(self.model_cls.__name__, entry_name)
             )
         elif len(result) > 1:
             raise exceptions.StorageError(
-                'Requested {0} with NAME `{1}` returned more than 1 value'
+                'Requested {0} with name `{1}` returned more than 1 value'
                 .format(self.model_cls.__name__, entry_name)
             )
         else:


[22/24] incubator-ariatosca git commit: tiny fix to execution error printing

Posted by ra...@apache.org.
tiny fix to execution error printing


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/6016e146
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/6016e146
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/6016e146

Branch: refs/heads/ARIA-48-aria-cli
Commit: 6016e14607279d67c9e061e610fe89225bf4946e
Parents: c562426
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Apr 4 12:07:32 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/executions.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6016e146/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
index 82ee51a..635ae63 100644
--- a/aria/cli/commands/executions.py
+++ b/aria/cli/commands/executions.py
@@ -150,7 +150,7 @@ def start(workflow_name,
 
     execution = workflow_runner.execution
     logger.info('Execution has ended with "{0}" status'.format(execution.status))
-    if execution.status == Execution.FAILED:
+    if execution.status == Execution.FAILED and execution.error:
         logger.info('Execution error:\n{0}'.format(execution.error))
 
     if dry:


[23/24] incubator-ariatosca git commit: Fix handling creating a service with an existing name

Posted by ra...@apache.org.
Fix handling creating a service with an existing name


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/31a0c794
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/31a0c794
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/31a0c794

Branch: refs/heads/ARIA-48-aria-cli
Commit: 31a0c794f1991342b75f018c0d071efdd128376e
Parents: 6016e14
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Tue Apr 4 12:18:01 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/core.py | 13 ++++++++++++-
 1 file changed, 12 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/31a0c794/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index 1fd3cf7..057f716 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -21,6 +21,7 @@ from .parser.consumption import (
     Validate,
     ServiceTemplate)
 from .parser.loading.location import UriLocation
+from .storage import exceptions as storage_exceptions
 
 
 class Core(object):
@@ -67,6 +68,7 @@ class Core(object):
         self.resource_storage.service_template.delete(entry_id=str(service_template.id))
 
     def create_service(self, service_template_id, inputs, service_name=None):
+
         service_template = self.model_storage.service_template.get(service_template_id)
 
         # creating an empty ConsumptionContext, initiating a threadlocal context
@@ -74,8 +76,17 @@ class Core(object):
         with self.model_storage._all_api_kwargs['session'].no_autoflush:
             service = service_template.instantiate(None, inputs)
 
-        service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
+        # If the user didn't enter a name for this service, we'll want to auto generate it.
+        # But how will we ensure a unique but simple name? We'll append the services' unique id
+        # to the service_templates name. Since this service is not in the storage yet, we'll put it
+        # there, and pull out its id.
         self.model_storage.service.put(service)
+        service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
+        try:
+            self.model_storage.service.update(service)
+        except storage_exceptions.StorageError:
+            self.model_storage.service.delete(service)
+            raise
         return service
 
     def delete_service(self, service_id, force=False):


[14/24] incubator-ariatosca git commit: small fixes after rebase from master

Posted by ra...@apache.org.
small fixes after rebase from master


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/2ffefc49
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/2ffefc49
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/2ffefc49

Branch: refs/heads/ARIA-48-aria-cli
Commit: 2ffefc498812a8c3f5c02e994d0333296ec2e180
Parents: 060f8cc
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Sun Apr 2 17:53:13 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/node_templates.py | 2 +-
 aria/core.py                        | 8 +++-----
 2 files changed, 4 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2ffefc49/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
index 5614aee..7365f0c 100644
--- a/aria/cli/commands/node_templates.py
+++ b/aria/cli/commands/node_templates.py
@@ -62,7 +62,7 @@ def show(node_template_id, model_storage, logger):
         logger.info('\tNo properties')
 
     # print node IDs
-    nodes = node_template.nodes.all()
+    nodes = node_template.nodes
     logger.info('Nodes:')
     if nodes:
         for node in nodes:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2ffefc49/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index 96a967f..f2dfb09 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -61,7 +61,7 @@ class Core(object):
 
     def delete_service_template(self, service_template_id):
         service_template = self.model_storage.service_template.get(service_template_id)
-        if service_template.services.all():
+        if service_template.services:
             raise exceptions.DependentServicesError(
                 "Can't delete service template {0} - Service template has existing services")
 
@@ -89,16 +89,14 @@ class Core(object):
     def delete_service(self, service_name, force=False):
         service = self.model_storage.service.get_by_name(service_name)
 
-        active_executions = [e for e in service.executions
-                             if e.status not in models.Execution.ACTIVE_STATES]
+        active_executions = [e for e in service.executions if e.is_active()]
         if active_executions:
             raise exceptions.DependentActiveExecutionsError(
                 "Can't delete service {0} - there is an active execution for this service. "
                 "Active execution id: {1}".format(service_name, active_executions[0].id))
 
         if not force:
-            available_nodes = [n for n in service.nodes.values()
-                               if n.state not in ('deleted', 'errored')]
+            available_nodes = [n for n in service.nodes.values() if n.is_available()]
             if available_nodes:
                 raise exceptions.DependentAvailableNodesError(
                     "Can't delete service {0} - there are available nodes for this service. "


[15/24] incubator-ariatosca git commit: fixed plugins command in CLI

Posted by ra...@apache.org.
fixed plugins command in CLI


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/bf0adb54
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/bf0adb54
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/bf0adb54

Branch: refs/heads/ARIA-48-aria-cli
Commit: bf0adb540618d264ffa55267792b7818c6cedc92
Parents: 2214a98
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Sun Apr 2 22:40:15 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/plugins.py | 80 +++++++++++++++++----------------------
 aria/cli/env.py              |  2 +-
 aria/orchestrator/plugin.py  |  3 +-
 3 files changed, 36 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/bf0adb54/aria/cli/commands/plugins.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/plugins.py b/aria/cli/commands/plugins.py
index d31aa99..4d568d7 100644
--- a/aria/cli/commands/plugins.py
+++ b/aria/cli/commands/plugins.py
@@ -13,18 +13,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import tarfile
+import zipfile
 
 from ..table import print_data
-from ..cli import helptexts, aria
+from ..cli import aria
 from ..exceptions import AriaCliError
 from ..utils import storage_sort_param
 
 
-PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'distribution',
-                  'supported_platform', 'distribution_release', 'uploaded_at']
-EXCLUDED_COLUMNS = ['archive_name', 'distribution_version', 'excluded_wheels',
-                    'package_source', 'supported_py_versions', 'wheels']
+PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'supported_platform',
+                  'distribution', 'distribution_release', 'uploaded_at']
 
 
 @aria.group(name='plugins')
@@ -45,24 +43,23 @@ def validate(plugin_path, logger):
 
     This will try to validate the plugin's archive is not corrupted.
     A valid plugin is a wagon (http://github.com/cloudify-cosomo/wagon)
-    in the tar.gz format (suffix may also be .wgn).
+    in the zip format (suffix may also be .wgn).
 
     `PLUGIN_PATH` is the path to wagon archive to validate.
     """
     logger.info('Validating plugin {0}...'.format(plugin_path))
 
-    if not tarfile.is_tarfile(plugin_path):
+    if not zipfile.is_zipfile(plugin_path):
         raise AriaCliError(
             'Archive {0} is of an unsupported type. Only '
-            'tar.gz/wgn is allowed'.format(plugin_path))
-    with tarfile.open(plugin_path) as tar:
-        tar_members = tar.getmembers()
-        package_json_path = "{0}/{1}".format(
-            tar_members[0].name, 'package.json')
-        # TODO: Find a better way to validate a plugin.
+            'zip/wgn is allowed'.format(plugin_path))
+    with zipfile.ZipFile(plugin_path, 'r') as zip_file:
+        infos = zip_file.infolist()
         try:
-            tar.getmember(package_json_path)
-        except KeyError:
+            package_name = infos[0].filename[:infos[0].filename.index('/')]
+            package_json_path = "{0}/{1}".format(package_name, 'package.json')
+            zip_file.getinfo(package_json_path)
+        except (KeyError, ValueError, IndexError):
             raise AriaCliError(
                 'Failed to validate plugin {0} '
                 '(package.json was not found in archive)'.format(plugin_path))
@@ -70,20 +67,20 @@ def validate(plugin_path, logger):
     logger.info('Plugin validated successfully')
 
 
-@plugins.command(name='delete',
-                 short_help='Delete a plugin')
-@aria.argument('plugin-id')
-@aria.options.verbose()
-@aria.pass_model_storage
-@aria.pass_logger
-def delete(plugin_id, model_storage, logger):
-    """Delete a plugin
-
-    `PLUGIN_ID` is the id of the plugin to delete.
-    """
-    logger.info('Deleting plugin {0}...'.format(plugin_id))
-    model_storage.plugin.delete(plugin_id=plugin_id)
-    logger.info('Plugin deleted')
+# @plugins.command(name='delete',
+#                  short_help='Delete a plugin')
+# @aria.argument('plugin-id')
+# @aria.options.verbose()
+# @aria.pass_model_storage
+# @aria.pass_logger
+# def delete(plugin_id, model_storage, logger):
+#     """Delete a plugin
+#
+#     `PLUGIN_ID` is the id of the plugin to delete.
+#     """
+#     logger.info('Deleting plugin {0}...'.format(plugin_id))
+#     model_storage.plugin.delete(plugin_id=plugin_id)
+#     logger.info('Plugin deleted')
 
 
 @plugins.command(name='install',
@@ -91,13 +88,14 @@ def delete(plugin_id, model_storage, logger):
 @aria.argument('plugin-path')
 @aria.options.verbose()
 @aria.pass_context
+@aria.pass_plugin_manager
 @aria.pass_logger
-def install(ctx, plugin_path, logger):
+def install(ctx, plugin_path, plugin_manager, logger):
     """Install a plugin
 
     `PLUGIN_PATH` is the path to wagon archive to install.
     """
-    ctx.invoke(validate, plugin_path=plugin_path)
+    # ctx.invoke(validate, plugin_path=plugin_path)
     logger.info('Installing plugin {0}...'.format(plugin_path))
     plugin = plugin_manager.install(plugin_path)
     logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id))
@@ -116,8 +114,7 @@ def show(plugin_id, model_storage, logger):
     """
     logger.info('Showing plugin {0}...'.format(plugin_id))
     plugin = model_storage.plugin.get(plugin_id)
-    _transform_plugin_response(plugin)
-    print_data(PLUGIN_COLUMNS, plugin, 'Plugin:')
+    print_data(PLUGIN_COLUMNS, plugin.to_dict(), 'Plugin:')
 
 
 @plugins.command(name='list',
@@ -131,15 +128,6 @@ def list(sort_by, descending, model_storage, logger):
     """List all plugins on the manager
     """
     logger.info('Listing all plugins...')
-    plugins_list = model_storage.plugin.list(
-        sort=storage_sort_param(sort_by, descending))
-    for plugin in plugins_list:
-        _transform_plugin_response(plugin)
-    print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:')
-
-
-def _transform_plugin_response(plugin):
-    """Remove any columns that shouldn't be displayed in the CLI
-    """
-    for column in EXCLUDED_COLUMNS:
-        plugin.pop(column, None)
+    plugins = [p.to_dict() for p in model_storage.plugin.list(
+        sort=storage_sort_param(sort_by, descending))]
+    print_data(PLUGIN_COLUMNS, plugins, 'Plugins:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/bf0adb54/aria/cli/env.py
----------------------------------------------------------------------
diff --git a/aria/cli/env.py b/aria/cli/env.py
index 5d34141..6d02cf5 100644
--- a/aria/cli/env.py
+++ b/aria/cli/env.py
@@ -109,7 +109,7 @@ class Environment(object):
         if not os.path.exists(self._plugins_dir):
             os.makedirs(self._plugins_dir)
 
-        return PluginManager(self._model_storage, self._plugins_dir)
+        return PluginManager(self.model_storage, self._plugins_dir)
 
 
 env = Environment(os.path.join(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/bf0adb54/aria/orchestrator/plugin.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/plugin.py b/aria/orchestrator/plugin.py
index d815754..d526e9c 100644
--- a/aria/orchestrator/plugin.py
+++ b/aria/orchestrator/plugin.py
@@ -46,8 +46,7 @@ class PluginManager(object):
             archive_name=metadata['archive_name'],
             supported_platform=metadata['supported_platform'],
             supported_py_versions=metadata['supported_python_versions'],
-            # Remove suffix colon after upgrading wagon to > 0.5.0
-            distribution=os_props.get('distribution:') or os_props.get('distribution'),
+            distribution=os_props.get('distribution'),
             distribution_release=os_props['distribution_version'],
             distribution_version=os_props['distribution_release'],
             package_name=metadata['package_name'],


[05/24] incubator-ariatosca git commit: Only print description in `service-templates` show if one exists

Posted by ra...@apache.org.
Only print description in `service-templates` show if one exists


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/298885fb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/298885fb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/298885fb

Branch: refs/heads/ARIA-48-aria-cli
Commit: 298885fbfdb6abdf68f643b579b0aca90c75dfd7
Parents: 2ffefc4
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Sun Apr 2 18:17:31 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/service_templates.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/298885fb/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index 823a6e8..af6252a 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -61,8 +61,9 @@ def show(service_template_id, model_storage, logger):
     columns = SERVICE_TEMPLATE_COLUMNS + ['#services']
     print_data(columns, service_template_dict, 'Service-template:', max_width=50)
 
-    logger.info('Description:')
-    logger.info('{0}\n'.format(service_template_dict['description'].encode('UTF-8') or ''))
+    if service_template_dict['description'] is not None:
+        logger.info('Description:')
+        logger.info('{0}\n'.format(service_template_dict['description'].encode('UTF-8') or ''))
 
     logger.info('Existing services:')
     logger.info('{0}\n'.format(json.dumps([d['name'] for d in services])))


[13/24] incubator-ariatosca git commit: before adding any handlers, we check that theere are no handlers at the moment.

Posted by ra...@apache.org.
before adding any handlers, we check that theere are no handlers at the moment.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/2214a982
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/2214a982
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/2214a982

Branch: refs/heads/ARIA-48-aria-cli
Commit: 2214a982ee0fabd9e4eac72e6bcf7fe2169a67b9
Parents: 298885f
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun Apr 2 18:29:43 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Tue Apr 4 13:20:46 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/context/common.py   | 13 ++++++-------
 aria/orchestrator/context/workflow.py |  1 -
 2 files changed, 6 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2214a982/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index f631e79..2e33d77 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -19,7 +19,6 @@ A common context for both workflow and operation
 
 import logging
 from contextlib import contextmanager
-from datetime import datetime
 from functools import partial
 
 import jinja2
@@ -68,13 +67,13 @@ class BaseContext(object):
         self._workdir = workdir
         self.logger = None
 
-    def _register_logger(self, logger_name=None, level=None, task_id=None):
-        self.logger = self.PrefixedLogger(logging.getLogger(logger_name or self.__class__.__name__),
-                                          self.logging_id,
-                                          task_id=task_id)
-        self.logger.addHandler(aria_logger.create_console_log_handler())
-        self.logger.addHandler(self._get_sqla_handler())
+    def _register_logger(self, level=None, task_id=None):
+        self.logger = self.PrefixedLogger(
+            logging.getLogger('aria.executions.task'), self.logging_id, task_id=task_id)
         self.logger.setLevel(level or logging.DEBUG)
+        if not self.logger.handlers:
+            self.logger.addHandler(aria_logger.create_console_log_handler())
+            self.logger.addHandler(self._get_sqla_handler())
 
     def _get_sqla_handler(self):
         api_kwargs = {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2214a982/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index bc9f653..ad4a2ff 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -19,7 +19,6 @@ Workflow and operation contexts
 
 import threading
 from contextlib import contextmanager
-from datetime import datetime
 
 from .exceptions import ContextException
 from .common import BaseContext