You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by ra...@apache.org on 2017/04/02 13:05:26 UTC

[1/9] incubator-ariatosca git commit: ARIA-132-Models-cascading-deletion-raises-constraint-errors [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-48-aria-cli de2d57dd0 -> 3f30fa35d (forced update)


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 8b619bf..8355521 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -41,7 +41,7 @@ from . import (
 )
 
 
-class ServiceTemplateBase(TemplateModelMixin): # pylint: disable=too-many-public-methods
+class ServiceTemplateBase(TemplateModelMixin):
     """
     A service template is a source for creating :class:`Service` instances.
 
@@ -90,7 +90,6 @@ class ServiceTemplateBase(TemplateModelMixin): # pylint: disable=too-many-public
     :vartype created_at: :class:`datetime.datetime`
     :ivar updated_at: Update timestamp
     :vartype updated_at: :class:`datetime.datetime`
-
     :ivar services: Instantiated services
     :vartype services: [:class:`Service`]
     """
@@ -108,121 +107,151 @@ class ServiceTemplateBase(TemplateModelMixin): # pylint: disable=too-many-public
 
     description = Column(Text)
     main_file_name = Column(Text)
+    created_at = Column(DateTime, nullable=False, index=True)
+    updated_at = Column(DateTime)
+
+    # region foreign keys
 
     @declared_attr
-    def meta_data(cls):
-        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
-        return relationship.many_to_many(cls, 'metadata', dict_key='name')
+    def substitution_template_fk(cls):
+        """For ServiceTemplate one-to-one to SubstitutionTemplate"""
+        return relationship.foreign_key('substitution_template', nullable=True)
 
     @declared_attr
-    def node_templates(cls):
-        return relationship.one_to_many(cls, 'node_template', dict_key='name')
+    def node_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def group_templates(cls):
-        return relationship.one_to_many(cls, 'group_template', dict_key='name')
+    def group_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def policy_templates(cls):
-        return relationship.one_to_many(cls, 'policy_template', dict_key='name')
+    def policy_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def substitution_template(cls):
-        return relationship.one_to_one(cls, 'substitution_template')
+    def relationship_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+    def capability_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def outputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='outputs', dict_key='name')
+    def interface_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def workflow_templates(cls):
-        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
+    def artifact_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
 
     @declared_attr
-    def plugin_specifications(cls):
-        return relationship.one_to_many(cls, 'plugin_specification', dict_key='name')
+    def substitution_template(cls):
+        return relationship.one_to_one(
+            cls, 'substitution_template', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def node_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='node_type_fk', other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='node_type_fk', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def group_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='group_type_fk', other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='group_type_fk', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def policy_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='policy_type_fk', other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='policy_type_fk', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def relationship_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='relationship_type_fk',
-                                       other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='relationship_type_fk', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def capability_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='capability_type_fk', other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='capability_type_fk', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def interface_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='interface_type_fk', other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='interface_type_fk', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
     def artifact_types(cls):
-        return relationship.one_to_one(cls, 'type', fk='artifact_type_fk', other_property=False)
+        return relationship.one_to_one(
+            cls, 'type', fk='artifact_type_fk', back_populates=relationship.NO_BACK_POP)
 
-    # region orchestration
+    # endregion
 
-    created_at = Column(DateTime, nullable=False, index=True)
-    updated_at = Column(DateTime)
+    # region one_to_many relationships
 
-    # endregion
+    @declared_attr
+    def services(cls):
+        return relationship.one_to_many(cls, 'service')
 
-    # region foreign keys
+    @declared_attr
+    def operation_templates(cls):
+        return relationship.one_to_many(cls, 'operation_template')
 
     @declared_attr
-    def substitution_template_fk(cls):
-        """For ServiceTemplate one-to-one to SubstitutionTemplate"""
-        return relationship.foreign_key('substitution_template', nullable=True)
+    def node_templates(cls):
+        return relationship.one_to_many(cls, 'node_template', dict_key='name')
 
     @declared_attr
-    def node_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def group_templates(cls):
+        return relationship.one_to_many(cls, 'group_template', dict_key='name')
 
     @declared_attr
-    def group_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def policy_templates(cls):
+        return relationship.one_to_many(cls, 'policy_template', dict_key='name')
 
     @declared_attr
-    def policy_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def workflow_templates(cls):
+        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
 
     @declared_attr
-    def relationship_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def plugin_specifications(cls):
+        return relationship.one_to_many(cls, 'plugin_specification', dict_key='name')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    # endregion
+
+    # region many_to_many relationships
 
     @declared_attr
-    def capability_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def meta_data(cls):
+        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
+        return relationship.many_to_many(cls, 'metadata', dict_key='name')
 
     @declared_attr
-    def interface_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
 
     @declared_attr
-    def artifact_type_fk(cls):
-        """For ServiceTemplate one-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def outputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='outputs', dict_key='name')
 
     # endregion
 
@@ -394,7 +423,6 @@ class NodeTemplateBase(TemplateModelMixin):
     :vartype requirement_templates: [:class:`RequirementTemplate`]
     :ivar target_node_template_constraints: Constraints for filtering relationship targets
     :vartype target_node_template_constraints: [:class:`FunctionType`]
-
     :ivar service_template: Containing service template
     :vartype service_template: :class:`ServiceTemplate`
     :ivar group_templates: We are a member of these groups
@@ -413,14 +441,54 @@ class NodeTemplateBase(TemplateModelMixin):
                           'service_template_fk',
                           'service_template_name']
 
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For NodeTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to NodeTemplate"""
+        return relationship.foreign_key('service_template')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service_template', 'name')
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def nodes(cls):
+        return relationship.one_to_many(cls, 'node')
+
+    # endregion
+
+    # region many_to_one relationships
+
     @declared_attr
     def type(cls):
-        return relationship.many_to_one(cls, 'type')
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
 
-    description = Column(Text)
-    default_instances = Column(Integer, default=1)
-    min_instances = Column(Integer, default=0)
-    max_instances = Column(Integer, default=None)
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
+
+    # endregion
+
+    # region many_to_many relationships
 
     @declared_attr
     def properties(cls):
@@ -440,33 +508,15 @@ class NodeTemplateBase(TemplateModelMixin):
 
     @declared_attr
     def requirement_templates(cls):
-        return relationship.one_to_many(cls, 'requirement_template', child_fk='node_template_fk',
-                                        child_property='node_template')
-
-    target_node_template_constraints = Column(modeling_types.StrictList(FunctionType))
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For NodeTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to NodeTemplate"""
-        return relationship.foreign_key('service_template')
+        return relationship.one_to_many(cls, 'requirement_template', child_fk='node_template_fk')
 
     # endregion
 
-    # region association proxies
-
-    @declared_attr
-    def service_template_name(cls):
-        """Required for use by SQLAlchemy queries"""
-        return association_proxy('service_template', 'name')
-
-    # endregion
+    description = Column(Text)
+    default_instances = Column(Integer, default=1)
+    min_instances = Column(Integer, default=0)
+    max_instances = Column(Integer, default=None)
+    target_node_template_constraints = Column(modeling_types.StrictList(FunctionType))
 
     def is_target_node_valid(self, target_node_template):
         if self.target_node_template_constraints:
@@ -557,7 +607,6 @@ class GroupTemplateBase(TemplateModelMixin):
     :vartype properties: {basestring: :class:`Parameter`}
     :ivar interface_templates: Bundles of operations
     :vartype interface_templates: {basestring: :class:`InterfaceTemplate`}
-
     :ivar service_template: Containing service template
     :vartype service_template: :class:`ServiceTemplate`
     :ivar policy_templates: Policy templates enacted on this group
@@ -571,38 +620,66 @@ class GroupTemplateBase(TemplateModelMixin):
     __private_fields__ = ['type_fk',
                           'service_template_fk']
 
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
+    # region foreign keys
 
-    description = Column(Text)
+    @declared_attr
+    def type_fk(cls):
+        """For GroupTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
 
     @declared_attr
-    def node_templates(cls):
-        return relationship.many_to_many(cls, 'node_template')
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to GroupTemplate"""
+        return relationship.foreign_key('service_template')
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
 
     @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+    def groups(cls):
+        return relationship.one_to_many(cls, 'group')
 
     @declared_attr
     def interface_templates(cls):
         return relationship.one_to_many(cls, 'interface_template', dict_key='name')
 
-    # region foreign keys
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def type_fk(cls):
-        """For GroupTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
 
     @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to GroupTemplate"""
-        return relationship.foreign_key('service_template')
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
 
     # endregion
 
+    # region many_to_many relationships
+
+    @declared_attr
+    def node_templates(cls):
+        return relationship.many_to_many(cls, 'node_template')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -622,7 +699,7 @@ class GroupTemplateBase(TemplateModelMixin):
         utils.instantiate_dict(self, group.interfaces, self.interface_templates)
         if self.node_templates:
             for node_template in self.node_templates:
-                group.nodes += node_template.nodes.all()
+                group.nodes += node_template.nodes
         return group
 
     def validate(self):
@@ -664,7 +741,6 @@ class PolicyTemplateBase(TemplateModelMixin):
     :vartype group_templates: [:class:`GroupTemplate`]
     :ivar properties: Associated parameters
     :vartype properties: {basestring: :class:`Parameter`}
-
     :ivar service_template: Containing service template
     :vartype service_template: :class:`ServiceTemplate`
     :ivar policies: Instantiated policies
@@ -673,14 +749,51 @@ class PolicyTemplateBase(TemplateModelMixin):
 
     __tablename__ = 'policy_template'
 
-    __private_fields__ = ['type_fk',
-                          'service_template_fk']
+    __private_fields__ = ['type_fk', 'service_template_fk']
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For PolicyTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to PolicyTemplate"""
+        return relationship.foreign_key('service_template')
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def policies(cls):
+        return relationship.one_to_many(cls, 'policy')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
 
     @declared_attr
     def type(cls):
-        return relationship.many_to_one(cls, 'type')
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
 
-    description = Column(Text)
+    # endregion
+
+    # region many_to_many relationships
 
     @declared_attr
     def node_templates(cls):
@@ -694,20 +807,10 @@ class PolicyTemplateBase(TemplateModelMixin):
     def properties(cls):
         return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
 
-    # region foreign keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For PolicyTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For ServiceTemplate one-to-many to PolicyTemplate"""
-        return relationship.foreign_key('service_template')
-
     # endregion
 
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -725,10 +828,10 @@ class PolicyTemplateBase(TemplateModelMixin):
         utils.instantiate_dict(self, policy.properties, self.properties)
         if self.node_templates:
             for node_template in self.node_templates:
-                policy.nodes += node_template.nodes.all()
+                policy.nodes += node_template.nodes
         if self.group_templates:
             for group_template in self.group_templates:
-                policy.groups += group_template.groups.all()
+                policy.groups += group_template.groups
         return policy
 
     def validate(self):
@@ -761,7 +864,6 @@ class SubstitutionTemplateBase(TemplateModelMixin):
     :vartype node_type: :class:`Type`
     :ivar mappings: Requirement and capability mappings
     :vartype mappings: {basestring: :class:`SubstitutionTemplateMapping`}
-
     :ivar service_template: Containing service template
     :vartype service_template: :class:`ServiceTemplate`
     :ivar substitutions: Instantiated substitutions
@@ -772,20 +874,44 @@ class SubstitutionTemplateBase(TemplateModelMixin):
 
     __private_fields__ = ['node_type_fk']
 
+    # region foreign keys
+
     @declared_attr
-    def node_type(cls):
-        return relationship.many_to_one(cls, 'type')
+    def node_type_fk(cls):
+        """For SubstitutionTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def substitutions(cls):
+        return relationship.one_to_many(cls, 'substitution')
 
     @declared_attr
     def mappings(cls):
         return relationship.one_to_many(cls, 'substitution_template_mapping', dict_key='name')
 
-    # region foreign keys
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def node_type_fk(cls):
-        """For SubstitutionTemplate many-to-one to Type"""
-        return relationship.foreign_key('type')
+    def node_type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
 
     # endregion
 
@@ -830,7 +956,6 @@ class SubstitutionTemplateMappingBase(TemplateModelMixin):
     :vartype capability_template: :class:`CapabilityTemplate`
     :ivar requirement_template: Requirement template in the node template
     :vartype requirement_template: :class:`RequirementTemplate`
-
     :ivar substitution_template: Containing substitution template
     :vartype substitution_template: :class:`SubstitutionTemplate`
     """
@@ -842,18 +967,6 @@ class SubstitutionTemplateMappingBase(TemplateModelMixin):
                           'capability_template_fk',
                           'requirement_template_fk']
 
-    @declared_attr
-    def node_template(cls):
-        return relationship.one_to_one(cls, 'node_template')
-
-    @declared_attr
-    def capability_template(cls):
-        return relationship.one_to_one(cls, 'capability_template')
-
-    @declared_attr
-    def requirement_template(cls):
-        return relationship.one_to_one(cls, 'requirement_template')
-
     # region foreign keys
 
     @declared_attr
@@ -878,6 +991,45 @@ class SubstitutionTemplateMappingBase(TemplateModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def node_template(cls):
+        return relationship.one_to_one(
+            cls, 'node_template', back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def capability_template(cls):
+        return relationship.one_to_one(
+            cls, 'capability_template', back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def requirement_template(cls):
+        return relationship.one_to_one(
+            cls, 'requirement_template', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def substitution_template(cls):
+        return relationship.many_to_one(cls, 'substitution_template', back_populates='mappings')
+
+    # endregion
+
+    # region many_to_many relationships
+
+    # endregion
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -889,7 +1041,7 @@ class SubstitutionTemplateMappingBase(TemplateModelMixin):
     def instantiate(self, container):
         from . import models
         context = ConsumptionContext.get_thread_local()
-        nodes = self.node_template.nodes.all()
+        nodes = self.node_template.nodes
         if len(nodes) == 0:
             context.validation.report(
                 'mapping "{0}" refers to node template "{1}" but there are no '
@@ -950,7 +1102,6 @@ class RequirementTemplateBase(TemplateModelMixin):
     :vartype target_node_template_constraints: [:class:`FunctionType`]
     :ivar relationship_template: Template for relationships (optional)
     :vartype relationship_template: :class:`RelationshipTemplate`
-
     :ivar node_template: Containing node template
     :vartype node_template: :class:`NodeTemplate`
     :ivar substitution_template_mapping: Our contribution to service substitution
@@ -967,28 +1118,6 @@ class RequirementTemplateBase(TemplateModelMixin):
                           'node_template_fk',
                           'relationship_template_fk']
 
-    @declared_attr
-    def target_node_type(cls):
-        return relationship.many_to_one(cls, 'type', fk='target_node_type_fk',
-                                        parent_property=False)
-
-    @declared_attr
-    def target_node_template(cls):
-        return relationship.one_to_one(cls, 'node_template', fk='target_node_template_fk',
-                                       other_property=False)
-
-    @declared_attr
-    def target_capability_type(cls):
-        return relationship.one_to_one(cls, 'type', fk='target_capability_type_fk',
-                                       other_property=False)
-
-    target_capability_name = Column(Text)
-    target_node_template_constraints = Column(modeling_types.StrictList(FunctionType))
-
-    @declared_attr
-    def relationship_template(cls):
-        return relationship.one_to_one(cls, 'relationship_template')
-
     # region foreign keys
 
     @declared_attr
@@ -1007,17 +1136,73 @@ class RequirementTemplateBase(TemplateModelMixin):
         return relationship.foreign_key('type', nullable=True)
 
     @declared_attr
-    def node_template_fk(cls):
-        """For NodeTemplate one-to-many to RequirementTemplate"""
-        return relationship.foreign_key('node_template')
+    def node_template_fk(cls):
+        """For NodeTemplate one-to-many to RequirementTemplate"""
+        return relationship.foreign_key('node_template')
+
+    @declared_attr
+    def relationship_template_fk(cls):
+        """For RequirementTemplate one-to-one to RelationshipTemplate"""
+        return relationship.foreign_key('relationship_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def target_node_template(cls):
+        return relationship.one_to_one(cls,
+                                       'node_template',
+                                       fk='target_node_template_fk',
+                                       back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def target_capability_type(cls):
+        return relationship.one_to_one(cls,
+                                       'type',
+                                       fk='target_capability_type_fk',
+                                       back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def relationship_template(cls):
+        return relationship.one_to_one(cls,
+                                       'relationship_template',
+                                       back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def relationships(cls):
+        return relationship.one_to_many(cls, 'relationship')
+
+    @declared_attr
+    def target_node_type(cls):
+        return relationship.many_to_one(
+            cls, 'type', fk='target_node_type_fk', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def relationship_template_fk(cls):
-        """For RequirementTemplate one-to-one to RelationshipTemplate"""
-        return relationship.foreign_key('relationship_template', nullable=True)
+    def node_template(cls):
+        return relationship.many_to_one(cls, 'node_template', fk='node_template_fk')
+
+    # endregion
+
+    # region many_to_many relationships
 
     # endregion
 
+    target_capability_name = Column(Text)
+    target_node_template_constraints = Column(modeling_types.StrictList(FunctionType))
+
     def find_target(self, source_node_template):
         context = ConsumptionContext.get_thread_local()
 
@@ -1137,7 +1322,6 @@ class RelationshipTemplateBase(TemplateModelMixin):
     :vartype properties: {basestring: :class:`Parameter`}
     :ivar interface_templates: Bundles of operations
     :vartype interface_templates: {basestring: :class:`InterfaceTemplate`}
-
     :ivar requirement_template: Containing requirement template
     :vartype requirement_template: :class:`RequirementTemplate`
     :ivar relationships: Instantiated relationships
@@ -1148,29 +1332,53 @@ class RelationshipTemplateBase(TemplateModelMixin):
 
     __private_fields__ = ['type_fk']
 
+    # region foreign keys
+
     @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
+    def type_fk(cls):
+        """For RelationshipTemplate many-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
 
-    description = Column(Text)
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
 
     @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+    def relationships(cls):
+        return relationship.one_to_many(cls, 'relationship')
 
     @declared_attr
     def interface_templates(cls):
         return relationship.one_to_many(cls, 'interface_template', dict_key='name')
 
-    # region foreign keys
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def type_fk(cls):
-        """For RelationshipTemplate many-to-one to Type"""
-        return relationship.foreign_key('type', nullable=True)
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
 
     # endregion
 
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1231,7 +1439,6 @@ class CapabilityTemplateBase(TemplateModelMixin):
     :vartype min_occurrences: int
     :ivar properties: Associated parameters
     :vartype properties: {basestring: :class:`Parameter`}
-
     :ivar node_template: Containing node template
     :vartype node_template: :class:`NodeTemplate`
     :ivar substitution_template_mapping: Our contribution to service substitution
@@ -1245,22 +1452,6 @@ class CapabilityTemplateBase(TemplateModelMixin):
     __private_fields__ = ['type_fk',
                           'node_template_fk']
 
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    description = Column(Text)
-    min_occurrences = Column(Integer, default=None)  # optional
-    max_occurrences = Column(Integer, default=None)  # optional
-
-    @declared_attr
-    def valid_source_node_types(cls):
-        return relationship.many_to_many(cls, 'type', prefix='valid_sources')
-
-    @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
-
     # region foreign keys
 
     @declared_attr
@@ -1275,6 +1466,51 @@ class CapabilityTemplateBase(TemplateModelMixin):
 
     # endregion
 
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def capabilities(cls):
+        return relationship.one_to_many(cls, 'capability')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def node_template(cls):
+        return relationship.many_to_one(cls, 'node_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def valid_source_node_types(cls):
+        return relationship.many_to_many(cls, 'type', prefix='valid_sources')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
+    min_occurrences = Column(Integer, default=None)  # optional
+    max_occurrences = Column(Integer, default=None)  # optional
+
     def satisfies_requirement(self,
                               source_node_template,
                               requirement,
@@ -1360,7 +1596,6 @@ class InterfaceTemplateBase(TemplateModelMixin):
     :vartype inputs: {basestring: :class:`Parameter`}
     :ivar operation_templates: Operations
     :vartype operation_templates: {basestring: :class:`OperationTemplate`}
-
     :ivar node_template: Containing node template
     :vartype node_template: :class:`NodeTemplate`
     :ivar group_template: Containing group template
@@ -1378,19 +1613,6 @@ class InterfaceTemplateBase(TemplateModelMixin):
                           'group_template_fk',
                           'relationship_template_fk']
 
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    description = Column(Text)
-
-    @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
-
-    @declared_attr
-    def operation_templates(cls):
-        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
 
     # region foreign keys
 
@@ -1416,6 +1638,57 @@ class InterfaceTemplateBase(TemplateModelMixin):
 
     # endregion
 
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def interfaces(cls):
+        return relationship.one_to_many(cls, 'interface')
+
+    @declared_attr
+    def operation_templates(cls):
+        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def relationship_template(cls):
+        return relationship.many_to_one(cls, 'relationship_template')
+
+    @declared_attr
+    def group_template(cls):
+        return relationship.many_to_one(cls, 'group_template')
+
+    @declared_attr
+    def node_template(cls):
+        return relationship.many_to_one(cls, 'node_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1479,7 +1752,6 @@ class OperationTemplateBase(TemplateModelMixin):
     :vartype max_retries: int
     :ivar retry_interval: Interval between retries (in seconds)
     :vartype retry_interval: int
-
     :ivar interface_template: Containing interface template
     :vartype interface_template: :class:`InterfaceTemplate`
     :ivar service_template: Containing service template
@@ -1496,21 +1768,6 @@ class OperationTemplateBase(TemplateModelMixin):
 
     description = Column(Text)
 
-    @declared_attr
-    def plugin_specification(cls):
-        return relationship.one_to_one(cls, 'plugin_specification')
-
-    implementation = Column(Text)
-    dependencies = Column(modeling_types.StrictList(item_cls=basestring))
-
-    @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
-
-    executor = Column(Text)
-    max_retries = Column(Integer)
-    retry_interval = Column(Integer)
-
     # region foreign keys
 
     @declared_attr
@@ -1530,6 +1787,53 @@ class OperationTemplateBase(TemplateModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def plugin_specification(cls):
+        return relationship.one_to_one(
+            cls, 'plugin_specification', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def operations(cls):
+        return relationship.one_to_many(cls, 'operation')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
+
+    @declared_attr
+    def interface_template(cls):
+        return relationship.many_to_one(cls, 'interface_template')
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    # endregion
+
+    implementation = Column(Text)
+    dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+    executor = Column(Text)
+    max_retries = Column(Integer)
+    retry_interval = Column(Integer)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1604,7 +1908,6 @@ class ArtifactTemplateBase(TemplateModelMixin):
     :vartype repository_credential: {basestring: basestring}
     :ivar properties: Associated parameters
     :vartype properties: {basestring: :class:`Parameter`}
-
     :ivar node_template: Containing node template
     :vartype node_template: :class:`NodeTemplate`
     :ivar artifacts: Instantiated artifacts
@@ -1616,20 +1919,6 @@ class ArtifactTemplateBase(TemplateModelMixin):
     __private_fields__ = ['type_fk',
                           'node_template_fk']
 
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    description = Column(Text)
-    source_path = Column(Text)
-    target_path = Column(Text)
-    repository_url = Column(Text)
-    repository_credential = Column(modeling_types.StrictDict(basestring, basestring))
-
-    @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
-
     # region foreign keys
 
     @declared_attr
@@ -1644,6 +1933,48 @@ class ArtifactTemplateBase(TemplateModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def artifacts(cls):
+        return relationship.one_to_many(cls, 'artifact')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def node_template(cls):
+        return relationship.many_to_one(cls, 'node_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
+    source_path = Column(Text)
+    target_path = Column(Text)
+    repository_url = Column(Text)
+    repository_credential = Column(modeling_types.StrictDict(basestring, basestring))
+
     @property
     def as_raw(self):
         return collections.OrderedDict((

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 6721b29..f55b83e 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -353,10 +353,10 @@ def _assert_loggins(ctx, inputs):
     tasks = ctx.model.task.list()
     assert len(tasks) == 1
     task = tasks[0]
-    assert task.logs.count() == 4
+    assert len(task.logs) == 4
 
     logs = ctx.model.log.list()
-    assert len(logs) == execution.logs.count() == 6
+    assert len(logs) == len(execution.logs) == 6
     assert set(logs) == set(execution.logs)
 
     assert all(l.execution == execution for l in logs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index b950fa4..3b4f371 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -33,7 +33,6 @@ imports:
   - types/mongodb.yaml
   - types/nginx.yaml
   - aria-1.0
-
 dsl_definitions:
 
   default_openstack_credential: &DEFAULT_OPENSTACK_CREDENTIAL



[3/9] incubator-ariatosca git commit: ARIA-133 Add status-related properties to the Execution, Task and Node models

Posted by ra...@apache.org.
ARIA-133 Add status-related properties to the Execution, Task and Node models

We are adding these properties so it will be easier to filter those models
from storage according to their status, and to not make use of the their
`status` constants outside of the models themselves.

It should be noted, since we currently have the concept of `StubTasks`
(that are not part of the model), we had to implement the same logic for
them, although they don't direct access to the state constants of the
task model. We did this since they are treated as 'regular' model tasks
in some parts of the code.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/941d85c0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/941d85c0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/941d85c0

Branch: refs/heads/ARIA-48-aria-cli
Commit: 941d85c0ed622b6b233f38f853145c6a55120dec
Parents: 2de0497
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Tue Mar 28 12:51:42 2017 +0300
Committer: Avia Efrat <av...@gigaspaces.com>
Committed: Tue Mar 28 18:41:19 2017 +0300

----------------------------------------------------------------------
 aria/modeling/orchestration.py             | 20 ++++++++++++++++----
 aria/modeling/service_instance.py          |  4 ++++
 aria/orchestrator/workflows/core/engine.py |  6 +++---
 aria/orchestrator/workflows/core/task.py   |  8 ++++++++
 4 files changed, 31 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/941d85c0/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index a13ae87..f0bd4b2 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -69,7 +69,6 @@ class ExecutionBase(ModelMixin):
 
     STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, FORCE_CANCELLING]
     END_STATES = [TERMINATED, FAILED, CANCELLED]
-    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
 
     VALID_TRANSITIONS = {
         PENDING: [STARTED, CANCELLED],
@@ -102,6 +101,14 @@ class ExecutionBase(ModelMixin):
     status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
     workflow_name = Column(Text)
 
+    @property
+    def has_ended(self):
+        return self.status in self.END_STATES
+
+    @property
+    def is_active(self):
+        return not self.has_ended
+
     @declared_attr
     def logs(cls):
         return relationship.one_to_many(cls, 'log')
@@ -240,9 +247,6 @@ class TaskBase(ModelMixin):
         FAILED,
     )
 
-    WAIT_STATES = [PENDING, RETRYING]
-    END_STATES = [SUCCESS, FAILED]
-
     RUNS_ON_SOURCE = 'source'
     RUNS_ON_TARGET = 'target'
     RUNS_ON_NODE = 'node'
@@ -289,6 +293,14 @@ class TaskBase(ModelMixin):
     _runs_on = Column(Enum(*RUNS_ON, name='runs_on'), name='runs_on')
 
     @property
+    def has_ended(self):
+        return self.status in [self.SUCCESS, self.FAILED]
+
+    @property
+    def is_waiting(self):
+        return self.status in [self.PENDING, self.RETRYING]
+
+    @property
     def runs_on(self):
         if self._runs_on == self.RUNS_ON_NODE:
             return self.node

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/941d85c0/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index e2e5ae0..e6c2b12 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -417,6 +417,10 @@ class NodeBase(InstanceModelMixin):
         except KeyError:
             return None
 
+    @property
+    def is_available(self):
+        return self.state not in [self.INITIAL, self.DELETED, self.ERROR]
+
     # region foreign_keys
 
     @declared_attr

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/941d85c0/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index fa4550d..d32abb8 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -88,12 +88,12 @@ class Engine(logger.LoggerMixin):
     def _executable_tasks(self):
         now = datetime.utcnow()
         return (task for task in self._tasks_iter()
-                if task.status in models.Task.WAIT_STATES and
+                if task.is_waiting and
                 task.due_at <= now and
                 not self._task_has_dependencies(task))
 
     def _ended_tasks(self):
-        return (task for task in self._tasks_iter() if task.status in models.Task.END_STATES)
+        return (task for task in self._tasks_iter() if task.has_ended)
 
     def _task_has_dependencies(self, task):
         return len(self._execution_graph.pred.get(task.id, {})) > 0
@@ -105,7 +105,7 @@ class Engine(logger.LoggerMixin):
         for _, data in self._execution_graph.nodes_iter(data=True):
             task = data['task']
             if isinstance(task, engine_task.OperationTask):
-                if task.model_task.status not in models.Task.END_STATES:
+                if not task.model_task.has_ended:
                     self._workflow_context.model.task.refresh(task.model_task)
             yield task
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/941d85c0/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 1e13588..aa8963f 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -69,6 +69,14 @@ class StubTask(BaseTask):
         self.status = models.Task.PENDING
         self.due_at = datetime.utcnow()
 
+    @property
+    def has_ended(self):
+        return self.status in [models.Task.SUCCESS, models.Task.FAILED]
+
+    @property
+    def is_waiting(self):
+        return self.status in [models.Task.PENDING, models.Task.RETRYING]
+
 
 class StartWorkflowTask(StubTask):
     """


[8/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
new file mode 100644
index 0000000..b855529
--- /dev/null
+++ b/aria/cli/commands/service_templates.py
@@ -0,0 +1,207 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import json
+
+from .. import utils
+from .. import csar
+from ..cli import aria
+from .. import service_template_utils
+from ..table import print_data
+from ..exceptions import AriaCliError
+from ...core import Core
+from ...exceptions import AriaException
+
+
+DESCRIPTION_LIMIT = 20
+SERVICE_TEMPLATE_COLUMNS = \
+    ['id', 'name', 'main_file_name', 'created_at', 'updated_at']
+
+
+@aria.group(name='service-templates')
+@aria.options.verbose()
+def service_templates():
+    """Handle service templates on the manager
+    """
+    pass
+
+
+@service_templates.command(name='show',
+                           short_help='Show service template information')
+@aria.argument('service-template-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(service_template_id, model_storage, logger):
+    """Show information for a specific service templates
+
+    `SERVICE_TEMPLATE_ID` is the id of the service template to show information on.
+    """
+    logger.info('Showing service template {0}...'.format(service_template_id))
+    service_template = model_storage.service_template.get(service_template_id)
+    services = [d.to_dict() for d in service_template.services]
+    service_template_dict = service_template.to_dict()
+    service_template_dict['#services'] = len(services)
+    columns = SERVICE_TEMPLATE_COLUMNS + ['#services']
+    print_data(columns, service_template_dict, 'Service-template:', max_width=50)
+
+    logger.info('Description:')
+    logger.info('{0}\n'.format(service_template_dict['description'].encode('UTF-8') or ''))
+
+    logger.info('Existing services:')
+    logger.info('{0}\n'.format(json.dumps([d['name'] for d in services])))
+
+
+@service_templates.command(name='list',
+                           short_help='List service templates')
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+    """List all service templates
+    """
+    def trim_description(service_template):
+        if service_template['description'] is not None:
+            if len(service_template['description']) >= DESCRIPTION_LIMIT:
+                service_template['description'] = '{0}..'.format(
+                    service_template['description'][:DESCRIPTION_LIMIT - 2])
+        else:
+            service_template['description'] = ''
+        return service_template
+
+    logger.info('Listing all service templates...')
+    service_templates = [trim_description(b.to_dict()) for b in model_storage.service_template.list(
+        sort=utils.storage_sort_param(sort_by, descending))]
+    print_data(SERVICE_TEMPLATE_COLUMNS, service_templates, 'Service templates:')
+
+
+@service_templates.command(name='store',
+                           short_help='Store a service template')
+@aria.argument('service-template-path')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def store(service_template_path, service_template_name, model_storage, resource_storage,
+          plugin_manager, logger):
+    """Store a service template
+
+    `SERVICE_TEMPLATE_PATH` is the path of the service template to store.
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to store.
+    """
+    logger.info('Storing service template {0}...'.format(service_template_name))
+
+    service_template_path = service_template_utils.get(service_template_path)
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.create_service_template(service_template_path,
+                                 os.path.dirname(service_template_path),
+                                 service_template_name)
+
+    logger.info('Service template stored')
+
+
+@service_templates.command(name='delete',
+                           short_help='Delete a service template')
+@aria.argument('service-template-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def delete(service_template_id, model_storage, resource_storage, plugin_manager, logger):
+    """Delete a service template
+    `SERVICE_TEMPLATE_ID` is the id of the service template to delete.
+    """
+    logger.info('Deleting service template {0}...'.format(service_template_id))
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.delete_service_template(service_template_id)
+    logger.info('Service template {0} deleted'.format(service_template_id))
+
+
+@service_templates.command(name='inputs',
+                           short_help='Show service template inputs')
+@aria.argument('service-template-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def inputs(service_template_name, model_storage, logger):
+    """Show inputs for a specific service template
+
+    `SERVICE_TEMPLATE_NAME` is the name of the service template to show inputs for.
+    """
+    logger.info('Showing inputs for service template {0}...'.format(service_template_name))
+    print_service_template_inputs(model_storage, service_template_name)
+
+
+@service_templates.command(name='validate',
+                           short_help='Validate a service template')
+@aria.argument('service-template')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def validate_service_template(service_template, model_storage, resource_storage, plugin_manager,
+                              logger):
+    """Validate a service template
+
+    `SERVICE_TEMPLATE` is the path or url of the service template or archive to validate.
+    """
+    logger.info('Validating service template: {0}'.format(service_template))
+    service_template_path = service_template_utils.get(service_template)
+    core = Core(model_storage, resource_storage, plugin_manager)
+
+    try:
+        core.validate_service_template(service_template_path)
+    except AriaException as e:
+        # TODO: gather errors from parser and dump them via CLI?
+        raise AriaCliError(str(e))
+
+    logger.info('Service template validated successfully')
+
+
+@service_templates.command(name='create-archive',
+                           short_help='Create a csar archive')
+@aria.argument('service-template-path')
+@aria.argument('destination')
+@aria.options.verbose()
+@aria.pass_logger
+def create_archive(service_template_path, destination, logger):
+    """Create a csar archive
+
+    `service_template_path` is the path of the service template to create the archive from
+    `destination` is the path of the output csar archive
+    """
+    logger.info('Creating a csar archive')
+    csar.write(os.path.dirname(service_template_path), service_template_path, destination, logger)
+    logger.info('Csar archive created at {0}'.format(destination))
+
+
+@aria.pass_logger
+def print_service_template_inputs(model_storage, service_template_name, logger):
+    service_template = model_storage.service_template.get_by_name(service_template_name)
+
+    logger.info('Service template inputs:')
+    if service_template.inputs:
+        logger.info(utils.get_parameter_templates_as_string(service_template.inputs))
+    else:
+        logger.info('\tNo inputs')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
new file mode 100644
index 0000000..ce1139b
--- /dev/null
+++ b/aria/cli/commands/services.py
@@ -0,0 +1,175 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+from StringIO import StringIO
+
+from . import service_templates
+from ..cli import aria, helptexts
+from ..exceptions import AriaCliError
+from ..table import print_data
+from ..utils import storage_sort_param
+from ...core import Core
+from ...exceptions import AriaException
+
+
+SERVICE_COLUMNS = ['id', 'name', 'service_template_name', 'created_at', 'updated_at']
+
+
+@aria.group(name='services')
+@aria.options.verbose()
+def services():
+    """Handle services
+    """
+    pass
+
+
+@services.command(name='list', short_help='List services')
+@aria.options.service_template_id()
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_id,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List services
+
+    If `--service-template-id` is provided, list services for that service template.
+    Otherwise, list services for all service templates.
+    """
+    if service_template_id:
+        logger.info('Listing services for service template {0}...'.format(
+            service_template_id))
+        service_template = model_storage.service_template.get(service_template_id)
+        filters = dict(service_template=service_template)
+    else:
+        logger.info('Listing all service...')
+        filters = {}
+
+    services = [d.to_dict() for d in model_storage.service.list(
+        sort=storage_sort_param(sort_by=sort_by, descending=descending),
+        filters=filters)]
+    print_data(SERVICE_COLUMNS, services, 'Services:')
+
+
+@services.command(name='create',
+                  short_help='Create a services')
+@aria.argument('service-name', required=False)
+@aria.options.service_template_name(required=True)
+@aria.options.inputs
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def create(service_template_name,
+           service_name,
+           inputs,
+           model_storage,
+           resource_storage,
+           plugin_manager,
+           logger):
+    """Create a service
+
+    `SERVICE_NAME` is the name of the service you'd like to create.
+
+    """
+    logger.info('Creating new service from service template {0}...'.format(
+        service_template_name))
+
+    try:
+        core = Core(model_storage, resource_storage, plugin_manager)
+        service = core.create_service(service_template_name, inputs, service_name)
+    except AriaException as e:
+        logger.info(str(e))
+        service_templates.print_service_template_inputs(model_storage, service_template_name)
+        raise AriaCliError(str(e))
+
+    logger.info("Service created. The service's name is {0}".format(service.name))
+
+
+@services.command(name='delete',
+                  short_help='Delete a service')
+@aria.argument('service-name')
+@aria.options.force(help=helptexts.IGNORE_RUNNING_NODES)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def delete(service_name, force, model_storage, resource_storage, plugin_manager, logger):
+    """Delete a service
+
+    `SERVICE_NAME` is the name of the service to delete.
+    """
+    logger.info('Deleting service {0}...'.format(service_name))
+    core = Core(model_storage, resource_storage, plugin_manager)
+    core.delete_service(service_name, force=force)
+    logger.info('Service {0} deleted'.format(service_name))
+
+
+@services.command(name='outputs',
+                  short_help='Show service outputs')
+@aria.argument('service-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def outputs(service_name, model_storage, logger):
+    """Show outputs for a specific service
+
+    `SERVICE_NAME` is the name of the service to print outputs for.
+    """
+    logger.info('Showing outputs for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    #TODO fix this section..
+    outputs_def = service.outputs
+    response = model_storage.service.outputs.get(service_name)
+    outputs_ = StringIO()
+    for output_name, output in response.outputs.iteritems():
+        outputs_.write(' - "{0}":{1}'.format(output_name, os.linesep))
+        description = outputs_def[output_name].get('description', '')
+        outputs_.write('     Description: {0}{1}'.format(description,
+                                                         os.linesep))
+        outputs_.write('     Value: {0}{1}'.format(output, os.linesep))
+    logger.info(outputs_.getvalue())
+
+
+@services.command(name='inputs',
+                  short_help='Show service inputs')
+@aria.argument('service-name')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def inputs(service_name, model_storage, logger):
+    """Show inputs for a specific service
+
+    `SERVICE_NAME` is the id of the service to print inputs for.
+    """
+    logger.info('Showing inputs for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    if service.inputs:
+        inputs_ = StringIO()
+        for input_name, input in service.inputs.iteritems():
+            inputs_.write(' - "{0}":{1}'.format(input_name, os.linesep))
+            inputs_.write('     Value: {0}{1}'.format(input.value, os.linesep))
+        logger.info(inputs_.getvalue())
+    else:
+        logger.info('\tNo inputs')
+    logger.info('')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/workflows.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/workflows.py b/aria/cli/commands/workflows.py
new file mode 100644
index 0000000..2180168
--- /dev/null
+++ b/aria/cli/commands/workflows.py
@@ -0,0 +1,107 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from .. import utils
+from ..cli import aria
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+WORKFLOW_COLUMNS = ['name', 'service_template_name', 'service_name']
+
+
+@aria.group(name='workflows')
+def workflows():
+    """Handle service workflows
+    """
+    pass
+
+
+@workflows.command(name='show',
+                   short_help='Show workflow information')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(workflow_name, service_name, model_storage, logger):
+    """Show information for a specific workflow of a specific service
+
+    `WORKFLOW_NAME` is the name of the workflow to get information on.
+    """
+    try:
+        logger.info('Retrieving workflow {0} for service {1}'.format(
+            workflow_name, service_name))
+        service = model_storage.service.get(service_name)
+        workflow = next((wf for wf in service.workflows if
+                         wf.name == workflow_name), None)
+        if not workflow:
+            raise AriaCliError(
+                'Workflow {0} not found for service {1}'.format(workflow_name, service_name))
+    except StorageError:
+        raise AriaCliError('service {0} not found'.format(service_name))
+
+    defaults = {
+        'service_template_name': service.service_template_name,
+        'service_name': service.name
+    }
+    print_data(WORKFLOW_COLUMNS, workflow, 'Workflows:', defaults=defaults)
+
+    # print workflow parameters
+    mandatory_params = dict()
+    optional_params = dict()
+    for param_name, param in workflow.parameters.iteritems():
+        params_group = optional_params if 'default' in param else \
+            mandatory_params
+        params_group[param_name] = param
+
+    logger.info('Workflow Parameters:')
+    logger.info('\tMandatory Parameters:')
+    for param_name, param in mandatory_params.iteritems():
+        if 'description' in param:
+            logger.info('\t\t{0}\t({1})'.format(param_name,
+                                                param['description']))
+        else:
+            logger.info('\t\t{0}'.format(param_name))
+
+    logger.info('\tOptional Parameters:')
+    for param_name, param in optional_params.iteritems():
+        if 'description' in param:
+            logger.info('\t\t{0}: \t{1}\t({2})'.format(
+                param_name, param['default'], param['description']))
+        else:
+            logger.info('\t\t{0}: \t{1}'.format(param_name,
+                                                param['default']))
+    logger.info('')
+
+
+@workflows.command(name='list',
+                   short_help='List workflows for a service')
+@aria.options.service_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name, model_storage, logger):
+    """List all workflows of a specific service
+    """
+    logger.info('Listing workflows for service {0}...'.format(service_name))
+    service = model_storage.service.get_by_name(service_name)
+    workflows = [wf.to_dict() for wf in sorted(service.workflows.values(), key=lambda w: w.name)]
+
+    defaults = {
+        'service_template_name': service.service_template_name,
+        'service_name': service.name
+    }
+    print_data(WORKFLOW_COLUMNS, workflows, 'Workflows:', defaults=defaults)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/config.py
----------------------------------------------------------------------
diff --git a/aria/cli/config.py b/aria/cli/config.py
deleted file mode 100644
index d82886d..0000000
--- a/aria/cli/config.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI configuration
-"""
-
-import os
-import logging
-from getpass import getuser
-from tempfile import gettempdir
-
-from yaml import safe_load
-
-from .storage import config_file_path
-
-# path to a file where cli logs will be saved.
-logging_filename = os.path.join(gettempdir(), 'aria_cli_{0}.log'.format(getuser()))
-# loggers log level to show
-logger_level = logging.INFO
-# loggers log level to show
-colors = True
-
-import_resolver = None
-
-
-def load_configurations():
-    """
-    Dynamically load attributes into the config module from the ``config.yaml`` defined in the user
-    configuration directory
-    """
-    config_path = config_file_path()
-    with open(config_path) as config_file:
-        globals().update(safe_load(config_file) or {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/config/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/config/__init__.py b/aria/cli/config/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/config/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/config/config.py
----------------------------------------------------------------------
diff --git a/aria/cli/config/config.py b/aria/cli/config/config.py
new file mode 100644
index 0000000..7d76830
--- /dev/null
+++ b/aria/cli/config/config.py
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import yaml
+import pkg_resources
+
+from jinja2.environment import Template
+
+
+class CliConfig(object):
+
+    def __init__(self, config_path):
+        with open(config_path) as f:
+            self._config = yaml.safe_load(f.read())
+
+    @classmethod
+    def create_config(cls, workdir):
+        config_path = os.path.join(workdir, 'config.yaml')
+        if not os.path.isfile(config_path):
+            config_template = pkg_resources.resource_string(
+                __package__,
+                'config_template.yaml')
+
+            default_values = {
+                'log_path': os.path.join(workdir, 'cli.log'),
+                'enable_colors': True
+            }
+
+            template = Template(config_template)
+            rendered = template.render(**default_values)
+            with open(config_path, 'w') as f:
+                f.write(rendered)
+                f.write(os.linesep)
+
+        return cls(config_path)
+
+    @property
+    def colors(self):
+        return self._config.get('colors', False)
+
+    @property
+    def logging(self):
+        return self.Logging(self._config.get('logging'))
+
+    class Logging(object):
+
+        def __init__(self, logging):
+            self._logging = logging or {}
+
+        @property
+        def filename(self):
+            return self._logging.get('filename')
+
+        @property
+        def loggers(self):
+            return self._logging.get('loggers', {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/config/config_template.yaml
----------------------------------------------------------------------
diff --git a/aria/cli/config/config_template.yaml b/aria/cli/config/config_template.yaml
new file mode 100644
index 0000000..13f2cf9
--- /dev/null
+++ b/aria/cli/config/config_template.yaml
@@ -0,0 +1,12 @@
+colors: {{ enable_colors }}
+
+logging:
+
+  # path to a file where cli logs will be saved.
+  filename: {{ log_path }}
+
+  # configuring level per logger
+  loggers:
+
+    # main logger of the cli. provides basic descriptions for executed operations.
+    aria.cli.main: info

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/constants.py
----------------------------------------------------------------------
diff --git a/aria/cli/constants.py b/aria/cli/constants.py
new file mode 100644
index 0000000..67c094d
--- /dev/null
+++ b/aria/cli/constants.py
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+SAMPLE_SERVICE_TEMPLATE_FILENAME = 'service_template.yaml'
+HELP_TEXT_COLUMN_BUFFER = 5

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/csar.py
----------------------------------------------------------------------
diff --git a/aria/cli/csar.py b/aria/cli/csar.py
index b185f46..5ab581b 100644
--- a/aria/cli/csar.py
+++ b/aria/cli/csar.py
@@ -14,12 +14,13 @@
 # limitations under the License.
 
 import os
+import logging
 import pprint
 import tempfile
 import zipfile
 
 import requests
-from ruamel import yaml # @UnresolvedImport
+from ruamel import yaml
 
 
 META_FILE = 'TOSCA-Metadata/TOSCA.meta'
@@ -167,5 +168,11 @@ class _CSARReader(object):
                     f.write(chunk)
 
 
-def read(source, destination, logger):
+def read(source, destination=None, logger=None):
+    destination = destination or tempfile.mkdtemp()
+    logger = logger or logging.getLogger('dummy')
     return _CSARReader(source=source, destination=destination, logger=logger)
+
+
+def is_csar_archive(source):
+    return source.endswith('.csar')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/dry.py
----------------------------------------------------------------------
diff --git a/aria/cli/dry.py b/aria/cli/dry.py
deleted file mode 100644
index 82faf42..0000000
--- a/aria/cli/dry.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from threading import RLock
-
-from ..modeling import models
-from ..orchestrator.decorators import operation
-from ..utils.collections import OrderedDict
-from ..utils.console import puts, Colored
-from ..utils.formatting import safe_repr
-
-
-_TERMINAL_LOCK = RLock()
-
-
-def convert_to_dry(service):
-    """
-    Converts all operations on the service (on workflows, node interfaces, and relationship
-    interfaces) to run dryly.
-    """
-
-    for workflow in service.workflows:
-        convert_operation_to_dry(workflow)
-
-    for node in service.nodes.itervalues():
-        for interface in node.interfaces.itervalues():
-            for oper in interface.operations.itervalues():
-                convert_operation_to_dry(oper)
-        for relationship in node.outbound_relationships:
-            for interface in relationship.interfaces.itervalues():
-                for oper in interface.operations.itervalues():
-                    convert_operation_to_dry(oper)
-
-
-def convert_operation_to_dry(oper):
-    """
-    Converts a single :class:`Operation` to run dryly.
-    """
-
-    plugin = oper.plugin_specification.name \
-        if oper.plugin_specification is not None else None
-    if oper.inputs is None:
-        oper.inputs = OrderedDict()
-    oper.inputs['_implementation'] = models.Parameter(name='_implementation',
-                                                      type_name='string',
-                                                      value=oper.implementation)
-    oper.inputs['_plugin'] = models.Parameter(name='_plugin',
-                                              type_name='string',
-                                              value=plugin)
-    oper.implementation = '{0}.{1}'.format(__name__, 'dry_operation')
-    oper.plugin_specification = None
-
-
-@operation
-def dry_operation(ctx, _plugin, _implementation, **kwargs):
-    """
-    The dry operation simply prints out information about the operation to the console.
-    """
-
-    with _TERMINAL_LOCK:
-        print ctx.name
-        if hasattr(ctx, 'relationship'):
-            puts('> Relationship: {0} -> {1}'.format(
-                Colored.red(ctx.relationship.source_node.name),
-                Colored.red(ctx.relationship.target_node.name)))
-        else:
-            puts('> Node: {0}'.format(Colored.red(ctx.node.name)))
-        puts('  Operation: {0}'.format(Colored.green(ctx.name)))
-        _dump_implementation(_plugin, _implementation)
-
-
-def _dump_implementation(plugin, implementation):
-    if plugin:
-        puts('  Plugin: {0}'.format(Colored.magenta(plugin, bold=True)))
-    if implementation:
-        puts('  Implementation: {0}'.format(Colored.magenta(safe_repr(implementation))))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/env.py
----------------------------------------------------------------------
diff --git a/aria/cli/env.py b/aria/cli/env.py
new file mode 100644
index 0000000..5d34141
--- /dev/null
+++ b/aria/cli/env.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import json
+import pkgutil
+
+from .config import config
+from .logger import Logging
+from .. import (application_model_storage, application_resource_storage)
+from ..orchestrator.plugin import PluginManager
+from ..storage.sql_mapi import SQLAlchemyModelAPI
+from ..storage.filesystem_rapi import FileSystemResourceAPI
+
+
+ARIA_DEFAULT_WORKDIR_NAME = '.aria'
+
+
+class Environment(object):
+
+    def __init__(self, workdir):
+
+        self._workdir = workdir
+        self._init_workdir()
+
+        self._config = config.CliConfig.create_config(workdir)
+        self._logging = Logging(self._config)
+
+        self._model_storage_dir = os.path.join(workdir, 'models')
+        self._resource_storage_dir = os.path.join(workdir, 'resources')
+        self._plugins_dir = os.path.join(workdir, 'plugins')
+
+        # initialized lazily
+        self._model_storage = None
+        self._resource_storage = None
+        self._plugin_manager = None
+
+    @property
+    def workdir(self):
+        return self._workdir
+
+    @property
+    def config(self):
+        return self._config
+
+    @property
+    def logging(self):
+        return self._logging
+
+    @property
+    def model_storage(self):
+        if not self._model_storage:
+            self._model_storage = self._init_sqlite_model_storage()
+        return self._model_storage
+
+    @property
+    def resource_storage(self):
+        if not self._resource_storage:
+            self._resource_storage = self._init_fs_resource_storage()
+        return self._resource_storage
+
+    @property
+    def plugin_manager(self):
+        if not self._plugin_manager:
+            self._plugin_manager = self._init_plugin_manager()
+        return self._plugin_manager
+
+    @staticmethod
+    def get_version_data():
+        data = pkgutil.get_data(__package__, 'VERSION')
+        return json.loads(data)
+
+    def _init_workdir(self):
+        if not os.path.exists(self._workdir):
+            os.makedirs(self._workdir)
+
+    def _init_sqlite_model_storage(self):
+        if not os.path.exists(self._model_storage_dir):
+            os.makedirs(self._model_storage_dir)
+
+        initiator_kwargs = dict(base_dir=self._model_storage_dir)
+        return application_model_storage(
+            SQLAlchemyModelAPI,
+            initiator_kwargs=initiator_kwargs)
+
+    def _init_fs_resource_storage(self):
+        if not os.path.exists(self._resource_storage_dir):
+            os.makedirs(self._resource_storage_dir)
+
+        fs_kwargs = dict(directory=self._resource_storage_dir)
+        return application_resource_storage(
+            FileSystemResourceAPI,
+            api_kwargs=fs_kwargs)
+
+    def _init_plugin_manager(self):
+        if not os.path.exists(self._plugins_dir):
+            os.makedirs(self._plugins_dir)
+
+        return PluginManager(self._model_storage, self._plugins_dir)
+
+
+env = Environment(os.path.join(
+    os.environ.get('ARIA_WORKDIR', os.path.expanduser('~')), ARIA_DEFAULT_WORKDIR_NAME))
+
+logger = env.logging.logger

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/cli/exceptions.py b/aria/cli/exceptions.py
index 6897731..89cfacd 100644
--- a/aria/cli/exceptions.py
+++ b/aria/cli/exceptions.py
@@ -13,59 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""
-CLI various exception classes
-"""
 
+from ..exceptions import AriaError
 
-class AriaCliError(Exception):
-    """
-    General CLI Exception class
-    """
-    pass
-
-
-class AriaCliFormatInputsError(AriaCliError):
-    """
-    Raised when provided inputs are malformed.
-    """
-
-    def __init__(self, message, inputs):
-        self.inputs = inputs
-        super(AriaCliFormatInputsError, self).__init__(message)
-
-    def user_message(self):
-        """
-        Describes the format error in detail.
-        """
-        return (
-            'Invalid input format: {0}, '
-            'the expected format is: '
-            'key1=value1;key2=value2'.format(self.inputs))
 
-
-class AriaCliYAMLInputsError(AriaCliError):
-    """
-    Raised when an invalid yaml file is provided
-    """
+class AriaCliError(AriaError):
     pass
-
-
-class AriaCliInvalidInputsError(AriaCliFormatInputsError):
-    """
-    Raised when provided inputs are invalid.
-    """
-
-    def user_message(self):
-        """
-        Describes the error in detail.
-        """
-        return (
-            'Invalid input: {0}. input must represent a dictionary.\n'
-            'Valid values can be one of:\n'
-            '- a path to a YAML file\n'
-            '- a path to a directory containing YAML files\n'
-            '- a single quoted wildcard based path (e.g. "*-inputs.yaml")\n'
-            '- a string formatted as JSON\n'
-            '- a string formatted as key1=value1;key2=value2'.format(self.inputs)
-        )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/inputs.py
----------------------------------------------------------------------
diff --git a/aria/cli/inputs.py b/aria/cli/inputs.py
new file mode 100644
index 0000000..2077b67
--- /dev/null
+++ b/aria/cli/inputs.py
@@ -0,0 +1,118 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import glob
+import yaml
+
+from .env import logger
+from.exceptions import AriaCliError
+
+
+def inputs_to_dict(resources):
+    """Returns a dictionary of inputs
+
+    `resources` can be:
+    - A list of files.
+    - A single file
+    - A directory containing multiple input files
+    - A key1=value1;key2=value2 pairs string.
+    - A string formatted as JSON/YAML.
+    - Wildcard based string (e.g. *-inputs.yaml)
+    """
+    if not resources:
+        return dict()
+
+    parsed_dict = {}
+
+    for resource in resources:
+        logger.debug('Processing inputs source: {0}'.format(resource))
+        # Workflow parameters always pass an empty dictionary. We ignore it
+        if isinstance(resource, basestring):
+            try:
+                parsed_dict.update(_parse_single_input(resource))
+            except AriaCliError:
+                raise AriaCliError(
+                    "Invalid input: {0}. It must represent a dictionary. "
+                    "Valid values can be one of:\n "
+                    "- A path to a YAML file\n "
+                    "- A path to a directory containing YAML files\n "
+                    "- A single quoted wildcard based path "
+                    "(e.g. '*-inputs.yaml')\n "
+                    "- A string formatted as JSON/YAML\n "
+                    "- A string formatted as key1=value1;key2=value2".format(
+                        resource))
+    return parsed_dict
+
+
+def _parse_single_input(resource):
+    try:
+        # parse resource as string representation of a dictionary
+        return plain_string_to_dict(resource)
+    except AriaCliError:
+        input_files = glob.glob(resource)
+        parsed_dict = dict()
+        if os.path.isdir(resource):
+            for input_file in os.listdir(resource):
+                parsed_dict.update(
+                    _parse_yaml_path(os.path.join(resource, input_file)))
+        elif input_files:
+            for input_file in input_files:
+                parsed_dict.update(_parse_yaml_path(input_file))
+        else:
+            parsed_dict.update(_parse_yaml_path(resource))
+    return parsed_dict
+
+
+def _parse_yaml_path(resource):
+
+    try:
+        # if resource is a path - parse as a yaml file
+        if os.path.isfile(resource):
+            with open(resource) as f:
+                content = yaml.load(f.read())
+        else:
+            # parse resource content as yaml
+            content = yaml.load(resource)
+    except yaml.error.YAMLError as e:
+        raise AriaCliError("'{0}' is not a valid YAML. {1}".format(
+            resource, str(e)))
+
+    # Emtpy files return None
+    content = content or dict()
+    if not isinstance(content, dict):
+        raise AriaCliError()
+
+    return content
+
+
+def plain_string_to_dict(input_string):
+    input_string = input_string.strip()
+    input_dict = {}
+    mapped_inputs = input_string.split(';')
+    for mapped_input in mapped_inputs:
+        mapped_input = mapped_input.strip()
+        if not mapped_input:
+            continue
+        split_mapping = mapped_input.split('=')
+        try:
+            key = split_mapping[0].strip()
+            value = split_mapping[1].strip()
+        except IndexError:
+            raise AriaCliError(
+                "Invalid input format: {0}, the expected format is: "
+                "key1=value1;key2=value2".format(input_string))
+        input_dict[key] = value
+    return input_dict

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/logger.py
----------------------------------------------------------------------
diff --git a/aria/cli/logger.py b/aria/cli/logger.py
new file mode 100644
index 0000000..289dbd3
--- /dev/null
+++ b/aria/cli/logger.py
@@ -0,0 +1,113 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import copy
+import logging
+import logging.config
+
+
+HIGH_VERBOSE = 3
+MEDIUM_VERBOSE = 2
+LOW_VERBOSE = 1
+NO_VERBOSE = 0
+
+DEFAULT_LOGGER_CONFIG = {
+    "version": 1,
+    "formatters": {
+        "file": {
+            "format": "%(asctime)s [%(levelname)s] %(message)s"
+        },
+        "console": {
+            "format": "%(message)s"
+        }
+    },
+    "handlers": {
+        "file": {
+            "class": "logging.handlers.RotatingFileHandler",
+            "formatter": "file",
+            "maxBytes": "5000000",
+            "backupCount": "20"
+        },
+        "console": {
+            "class": "logging.StreamHandler",
+            "stream": "ext://sys.stdout",
+            "formatter": "console"
+        }
+    }
+}
+
+
+class Logging(object):
+
+    def __init__(self, config):
+        self._log_file = None
+        self._verbosity_level = NO_VERBOSE
+        self._all_loggers = []
+        self._configure_loggers(config)
+        self._lgr = logging.getLogger('aria.cli.main')
+
+    @property
+    def logger(self):
+        return self._lgr
+
+    @property
+    def log_file(self):
+        return self._log_file
+
+    @property
+    def verbosity_level(self):
+        return self._verbosity_level
+
+    def is_high_verbose_level(self):
+        return self.verbosity_level == HIGH_VERBOSE
+
+    @verbosity_level.setter
+    def verbosity_level(self, level):
+        self._verbosity_level = level
+        if self.is_high_verbose_level():
+            for logger_name in self._all_loggers:
+                logging.getLogger(logger_name).setLevel(logging.DEBUG)
+
+    def _configure_loggers(self, config):
+        loggers_config = config.logging.loggers
+        logfile = config.logging.filename
+
+        logger_dict = copy.deepcopy(DEFAULT_LOGGER_CONFIG)
+        if logfile:
+            # set filename on file handler
+            logger_dict['handlers']['file']['filename'] = logfile
+            logfile_dir = os.path.dirname(logfile)
+            if not os.path.exists(logfile_dir):
+                os.makedirs(logfile_dir)
+            self._log_file = logfile
+        else:
+            del logger_dict['handlers']['file']
+
+        # add handlers to all loggers
+        loggers = {}
+        for logger_name in loggers_config:
+            loggers[logger_name] = dict(handlers=list(logger_dict['handlers'].keys()))
+        logger_dict['loggers'] = loggers
+
+        # set level for all loggers
+        for logger_name, logging_level in loggers_config.iteritems():
+            log = logging.getLogger(logger_name)
+            level = logging._levelNames[logging_level.upper()]
+            log.setLevel(level)
+            self._all_loggers.append(logger_name)
+
+        logging.config.dictConfig(logger_dict)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/main.py
----------------------------------------------------------------------
diff --git a/aria/cli/main.py b/aria/cli/main.py
new file mode 100644
index 0000000..4544e40
--- /dev/null
+++ b/aria/cli/main.py
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#TODO handle
+if __name__ == '__main__' and __package__ is None:
+    import aria.cli
+    __package__ = 'aria.cli'
+
+# from . import env
+from . import logger
+from .cli import aria
+from .commands import service_templates
+from .commands import node_templates
+from .commands import services
+from .commands import nodes
+from .commands import workflows
+from .commands import executions
+from .commands import plugins
+from .commands import logs
+from .. import install_aria_extensions
+
+
+@aria.group(name='aria')
+@aria.options.verbose()
+@aria.options.version
+def _aria():
+    """ARIA's Command Line Interface
+
+    To activate bash-completion. Run: `eval "$(_ARIA_COMPLETE=source aria)"`
+
+    ARIA's working directory resides by default in ~/.aria. To change it, set
+    the environment variable `ARIA_WORKDIR` to something else (e.g. /tmp/).
+    """
+    aria.set_cli_except_hook()
+
+
+def _register_commands():
+    """
+    Register the CLI's commands.
+    """
+
+    _aria.add_command(service_templates.service_templates)
+    _aria.add_command(node_templates.node_templates)
+    _aria.add_command(services.services)
+    _aria.add_command(nodes.nodes)
+    _aria.add_command(workflows.workflows)
+    _aria.add_command(executions.executions)
+    _aria.add_command(plugins.plugins)
+    _aria.add_command(logs.logs)
+
+
+_register_commands()
+
+
+def main():
+    install_aria_extensions()
+    _aria()
+
+
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/service_template_utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/service_template_utils.py b/aria/cli/service_template_utils.py
new file mode 100644
index 0000000..4ef4ff1
--- /dev/null
+++ b/aria/cli/service_template_utils.py
@@ -0,0 +1,140 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from urlparse import urlparse
+
+from . import csar
+from . import utils
+from .exceptions import AriaCliError
+from .constants import SAMPLE_SERVICE_TEMPLATE_FILENAME
+from ..utils import archive as archive_utils
+
+
+def get(source, service_template_filename=SAMPLE_SERVICE_TEMPLATE_FILENAME):
+    """Get a source and return a path to the main service template file
+
+    The behavior based on then source argument content is:
+        -
+        - local archive:
+            extract it locally and return path service template file
+        - local yaml file: return the file
+        - URL:
+            - return it (download=False)
+            - download and get service template from downloaded file (download=True)
+        - github repo:
+            - map it to a URL and return it (download=False)
+            - download and get service template from downloaded file (download=True)
+
+    Supported archive types are: csar, zip, tar, tar.gz and tar.bz2
+
+    :param source: Path/URL/github repo to archive/service-template file
+    :type source: str
+    :param service_template_filename: Path to service template (if source is an archive file)
+    :type service_template_filename: str
+    :param download: Download service template file if source is URL/github repo
+    :type download: bool
+    :return: Path to file (if archive/service-template file passed) or url
+    :rtype: str
+
+    """
+    if urlparse(source).scheme:
+        downloaded_file = utils.download_file(source)
+        return _get_service_template_file_from_archive(
+            downloaded_file, service_template_filename)
+    elif os.path.isfile(source):
+        if _is_archive(source):
+            return _get_service_template_file_from_archive(source, service_template_filename)
+        else:
+            # Maybe check if yaml.
+            return source
+    elif len(source.split('/')) == 2:
+        url = _map_to_github_url(source)
+        downloaded_file = utils.download_file(url)
+        return _get_service_template_file_from_archive(
+            downloaded_file, service_template_filename)
+    else:
+        raise AriaCliError(
+            'You must provide either a path to a local file, a remote URL '
+            'or a GitHub `organization/repository[:tag/branch]`')
+
+
+def _get_service_template_file_from_archive(archive, service_template_filename):
+    """Extract archive to temporary location and get path to service template file.
+
+    :param archive: Path to archive file
+    :type archive: str
+    :param service_template_filename: Path to service template file relative to archive
+    :type service_template_filename: str
+    :return: Absolute path to service template file
+    :rtype: str
+
+    """
+    if csar.is_csar_archive(archive):
+        service_template_file = _extract_csar_archive(archive)
+    else:
+        extract_directory = archive_utils.extract_archive(archive)(archive)
+        service_template_dir = os.path.join(
+            extract_directory,
+            os.listdir(extract_directory)[0],
+        )
+        service_template_file = os.path.join(service_template_dir, service_template_filename)
+
+    if not os.path.isfile(service_template_file):
+        raise AriaCliError(
+            'Could not find `{0}`. Please provide the name of the main '
+            'service template file by using the `-n/--service-template-filename` flag'
+            .format(service_template_filename))
+    return service_template_file
+
+
+def _map_to_github_url(source):
+    """Returns a path to a downloaded github archive.
+
+    :param source: github repo in the format of `org/repo[:tag/branch]`.
+    :type source: str
+    :return: URL to the archive file for the given repo in github
+    :rtype: str
+
+    """
+    source_parts = source.split(':', 1)
+    repo = source_parts[0]
+    tag = source_parts[1] if len(source_parts) == 2 else 'master'
+    url = 'https://github.com/{0}/archive/{1}.tar.gz'.format(repo, tag)
+    return url
+
+
+def generate_id(service_template_path, service_template_filename=SAMPLE_SERVICE_TEMPLATE_FILENAME):
+    """The name of the service template will be the name of the folder.
+    If service_template_filename is provided, it will be appended to the folder.
+    """
+    service_template_id = os.path.split(os.path.dirname(os.path.abspath(
+        service_template_path)))[-1]
+    if not service_template_filename == SAMPLE_SERVICE_TEMPLATE_FILENAME:
+        filename, _ = os.path.splitext(os.path.basename(service_template_filename))
+        service_template_id = (service_template_id + '.' + filename)
+    return service_template_id.replace('_', '-')
+
+
+def _is_archive(source):
+    return archive_utils.is_archive(source) or csar.is_csar_archive(source)
+
+
+def _extract_csar_archive(archive):
+    if csar.is_csar_archive(archive):
+        reader = csar.read(source=archive)
+        main_service_template_file_name = os.path.basename(reader.entry_definitions)
+        return os.path.join(reader.destination,
+                            main_service_template_file_name)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/storage.py
----------------------------------------------------------------------
diff --git a/aria/cli/storage.py b/aria/cli/storage.py
deleted file mode 100644
index fa1518b..0000000
--- a/aria/cli/storage.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Filesystem related CLI storage location and configuration
-"""
-
-import os
-import getpass
-from shutil import rmtree
-
-work_space_directory = '.aria'
-storage_directory_name = 'local-storage'
-
-
-def user_space(user_name=getpass.getuser()):
-    """
-    Base work directory
-    """
-    user_path = '~{0}'.format(user_name)
-    real_path = os.path.expanduser(user_path)
-    if os.path.exists(real_path):
-        return os.path.join(real_path, work_space_directory)
-    return os.path.join(os.getcwd(), work_space_directory)
-
-
-def local_storage(user_name=getpass.getuser()):
-    """
-    Base storage directory
-    """
-    return os.path.join(user_space(user_name), storage_directory_name)
-
-
-def local_model_storage():
-    """
-    Model storage directory
-    """
-    return os.path.join(local_storage(), 'models')
-
-
-def local_resource_storage():
-    """
-    Resource storage directory
-    """
-    return os.path.join(local_storage(), 'resources')
-
-
-def config_file_path():
-    """
-    Configuration file path
-    """
-    path = os.path.join(user_space(), 'config.yaml')
-    if not os.path.exists(path):
-        open(path, 'w').close()
-    return path
-
-
-def create_user_space(user_name=getpass.getuser(), override=False):
-    """
-    Creates the base work directory
-    """
-    path = user_space(user_name)
-    if os.path.exists(path):
-        if override:
-            rmtree(path, ignore_errors=True)
-        else:
-            raise IOError('user space {0} already exists'.format(path))
-    os.mkdir(path)
-    return path
-
-
-def create_local_storage(user_name=getpass.getuser(), override=False):
-    """
-    Creates the base storage directory
-    """
-    path = local_storage(user_name)
-    if os.path.exists(path):
-        if override:
-            rmtree(path, ignore_errors=True)
-        else:
-            raise IOError('local storage {0} already exists'.format(path))
-    os.mkdir(path)
-    return path

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/table.py
----------------------------------------------------------------------
diff --git a/aria/cli/table.py b/aria/cli/table.py
new file mode 100644
index 0000000..9c195f5
--- /dev/null
+++ b/aria/cli/table.py
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from datetime import datetime
+
+from .env import logger
+
+from prettytable import PrettyTable
+
+
+def generate(cols, data, defaults=None):
+    """
+    Return a new PrettyTable instance representing the list.
+
+    Arguments:
+
+        cols - An iterable of strings that specify what
+               are the columns of the table.
+
+               for example: ['id','name']
+
+        data - An iterable of dictionaries, each dictionary must
+               have key's corresponding to the cols items.
+
+               for example: [{'id':'123', 'name':'Pete']
+
+        defaults - A dictionary specifying default values for
+                   key's that don't exist in the data itself.
+
+                   for example: {'serviceId':'123'} will set the
+                   serviceId value for all rows to '123'.
+
+    """
+    def get_values_per_column(column, row_data):
+        if column in row_data:
+            if row_data[column] and isinstance(row_data[column], basestring):
+                try:
+                    datetime.strptime(row_data[column][:10], '%Y-%m-%d')
+                    row_data[column] = \
+                        row_data[column].replace('T', ' ').replace('Z', ' ')
+                except ValueError:
+                    # not a timestamp
+                    pass
+            elif row_data[column] and isinstance(row_data[column], list):
+                row_data[column] = ','.join(row_data[column])
+            elif not row_data[column]:
+                # if it's empty list, don't print []
+                row_data[column] = ''
+            return row_data[column]
+        else:
+            return defaults[column]
+
+    pt = PrettyTable([col for col in cols])
+
+    for d in data:
+        values_row = []
+        for c in cols:
+            values_row.append(get_values_per_column(c, d))
+        pt.add_row(values_row)
+
+    return pt
+
+
+def log(title, tb):
+    logger.info('{0}{1}{0}{2}{0}'.format(os.linesep, title, tb))
+
+
+def print_data(columns, items, header_text, max_width=None, defaults=None):
+    if items is None:
+        items = []
+    elif not isinstance(items, list):
+        items = [items]
+
+    pt = generate(columns, data=items, defaults=defaults)
+    if max_width:
+        pt.max_width = max_width
+    log(header_text, pt)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/utils.py b/aria/cli/utils.py
new file mode 100644
index 0000000..3b68729
--- /dev/null
+++ b/aria/cli/utils.py
@@ -0,0 +1,152 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import string
+import random
+import tempfile
+from StringIO import StringIO
+
+from backports.shutil_get_terminal_size import get_terminal_size
+import requests
+
+from .env import logger
+from .exceptions import AriaCliError
+
+
+def dump_to_file(collection, file_path):
+    with open(file_path, 'a') as f:
+        f.write(os.linesep.join(collection))
+        f.write(os.linesep)
+
+
+def is_virtual_env():
+    return hasattr(sys, 'real_prefix')
+
+
+def storage_sort_param(sort_by, descending):
+    return {sort_by: 'desc' if descending else 'asc'}
+
+
+def generate_random_string(size=6,
+                           chars=string.ascii_uppercase + string.digits):
+    return ''.join(random.choice(chars) for _ in range(size))
+
+
+def generate_suffixed_id(id):
+    return '{0}_{1}'.format(id, generate_random_string())
+
+
+def get_parameter_templates_as_string(parameter_templates):
+    params_string = StringIO()
+
+    for param_name, param_template in parameter_templates.iteritems():
+        params_string.write('\t{0}:{1}'.format(param_name, os.linesep))
+        param_dict = param_template.to_dict()
+        del param_dict['id']  # not interested in printing the id
+        for k, v in param_dict.iteritems():
+            params_string.write('\t\t{0}: {1}{2}'.format(k, v, os.linesep))
+
+    params_string.write(os.linesep)
+    return params_string.getvalue()
+
+
+def download_file(url, destination=None):
+    """Download file.
+
+    :param url: Location of the file to download
+    :type url: str
+    :param destination:
+        Location where the file should be saved (autogenerated by default)
+    :type destination: str | None
+    :returns: Location where the file was saved
+    :rtype: str
+
+    """
+    CHUNK_SIZE = 1024
+
+    if not destination:
+        fd, destination = tempfile.mkstemp()
+        os.close(fd)
+    logger.info('Downloading {0} to {1}...'.format(url, destination))
+
+    try:
+        response = requests.get(url, stream=True)
+    except requests.exceptions.RequestException as ex:
+        raise AriaCliError(
+            'Failed to download {0}. ({1})'.format(url, str(ex)))
+
+    final_url = response.url
+    if final_url != url:
+        logger.debug('Redirected to {0}'.format(final_url))
+
+    try:
+        with open(destination, 'wb') as destination_file:
+            for chunk in response.iter_content(CHUNK_SIZE):
+                destination_file.write(chunk)
+    except IOError as ex:
+        raise AriaCliError(
+            'Failed to download {0}. ({1})'.format(url, str(ex)))
+
+    return destination
+
+
+def generate_progress_handler(file_path, action='', max_bar_length=80):
+    """Returns a function that prints a progress bar in the terminal
+
+    :param file_path: The name of the file being transferred
+    :param action: Uploading/Downloading
+    :param max_bar_length: Maximum allowed length of the bar. Default: 80
+    :return: The configured print_progress function
+    """
+    # We want to limit the maximum line length to 80, but allow for a smaller
+    # terminal size. We also include the action string, and some extra chars
+    terminal_width = get_terminal_size().columns
+
+    # This takes care of the case where there is no terminal (e.g. unittest)
+    terminal_width = terminal_width or max_bar_length
+    bar_length = min(max_bar_length, terminal_width) - len(action) - 12
+
+    # Shorten the file name if it's too long
+    file_name = os.path.basename(file_path)
+    if len(file_name) > (bar_length / 4) + 3:
+        file_name = file_name[:bar_length / 4] + '...'
+
+    bar_length -= len(file_name)
+
+    def print_progress(read_bytes, total_bytes):
+        """Print upload/download progress on a single line
+
+        Call this function in a loop to create a progress bar in the terminal
+
+        :param read_bytes: Number of bytes already processed
+        :param total_bytes: Total number of bytes in the file
+        """
+
+        filled_length = min(bar_length, int(round(bar_length * read_bytes /
+                                                  float(total_bytes))))
+        percents = min(100.00, round(
+            100.00 * (read_bytes / float(total_bytes)), 2))
+        bar = '#' * filled_length + '-' * (bar_length - filled_length)
+
+        # The \r caret makes sure the cursor moves back to the beginning of
+        # the line
+        sys.stdout.write('\r{0} {1} |{2}| {3}%'.format(
+            action, file_name, bar, percents))
+        if read_bytes >= total_bytes:
+            sys.stdout.write('\n')
+
+    return print_progress

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
new file mode 100644
index 0000000..96a967f
--- /dev/null
+++ b/aria/core.py
@@ -0,0 +1,116 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from . import exceptions
+from .modeling import models
+from .modeling import utils as modeling_utils
+from .parser.consumption import (
+    ConsumptionContext,
+    ConsumerChain,
+    Read,
+    Validate,
+    ServiceTemplate)
+from .parser.loading.location import UriLocation
+
+
+class Core(object):
+
+    def __init__(self,
+                 model_storage,
+                 resource_storage,
+                 plugin_manager):
+        self._model_storage = model_storage
+        self._resource_storage = resource_storage
+        self._plugin_manager = plugin_manager
+
+    @property
+    def model_storage(self):
+        return self._model_storage
+
+    @property
+    def resource_storage(self):
+        return self._resource_storage
+
+    @property
+    def plugin_manager(self):
+        return self._plugin_manager
+
+    def validate_service_template(self, service_template_path):
+        self._parse_service_template(service_template_path)
+
+    def create_service_template(self, service_template_path, service_template_dir,
+                                service_template_name):
+        context = self._parse_service_template(service_template_path)
+        service_template = context.modeling.template
+        service_template.name = service_template_name
+        self.model_storage.service_template.put(service_template)
+        self.resource_storage.service_template.upload(
+            entry_id=str(service_template.id), source=service_template_dir)
+
+    def delete_service_template(self, service_template_id):
+        service_template = self.model_storage.service_template.get(service_template_id)
+        if service_template.services.all():
+            raise exceptions.DependentServicesError(
+                "Can't delete service template {0} - Service template has existing services")
+
+        self.model_storage.service_template.delete(service_template)
+        self.resource_storage.service_template.delete(entry_id=str(service_template.id))
+
+    def create_service(self, service_template_name, inputs, service_name=None):
+        service_template = self.model_storage.service_template.get_by_name(service_template_name)
+
+        # creating an empty ConsumptionContext, initiating a threadlocal context
+        ConsumptionContext()
+        with self.model_storage._all_api_kwargs['session'].no_autoflush:
+            service = service_template.instantiate(None)
+
+        template_inputs = service_template.inputs
+        service.inputs = modeling_utils.create_inputs(inputs, template_inputs)
+        # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
+
+        # first put the service model so it could have an id, as fallback for setting its name
+        self.model_storage.service.put(service)
+        service.name = service_name or '{0}_{1}'.format(service_template_name, service.id)
+        self.model_storage.service.update(service)
+        return service
+
+    def delete_service(self, service_name, force=False):
+        service = self.model_storage.service.get_by_name(service_name)
+
+        active_executions = [e for e in service.executions
+                             if e.status not in models.Execution.ACTIVE_STATES]
+        if active_executions:
+            raise exceptions.DependentActiveExecutionsError(
+                "Can't delete service {0} - there is an active execution for this service. "
+                "Active execution id: {1}".format(service_name, active_executions[0].id))
+
+        if not force:
+            available_nodes = [n for n in service.nodes.values()
+                               if n.state not in ('deleted', 'errored')]
+            if available_nodes:
+                raise exceptions.DependentAvailableNodesError(
+                    "Can't delete service {0} - there are available nodes for this service. "
+                    "Available node ids: {1}".format(service_name, available_nodes))
+
+        self.model_storage.service.delete(service)
+
+    @staticmethod
+    def _parse_service_template(service_template_path):
+        context = ConsumptionContext()
+        context.presentation.location = UriLocation(service_template_path)
+        ConsumerChain(context, (Read, Validate, ServiceTemplate)).consume()
+        if context.validation.dump_issues():
+            raise exceptions.ParsingError('Failed to parse service template')
+        return context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/exceptions.py b/aria/exceptions.py
index a180ce1..72adda5 100644
--- a/aria/exceptions.py
+++ b/aria/exceptions.py
@@ -44,3 +44,19 @@ class AriaException(Exception):
                 # Make sure it's our traceback
                 cause_traceback = traceback
         self.cause_traceback = cause_traceback
+
+
+class DependentServicesError(AriaError):
+    pass
+
+
+class DependentActiveExecutionsError(AriaError):
+    pass
+
+
+class DependentAvailableNodesError(AriaError):
+    pass
+
+
+class ParsingError(AriaError):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/aria/modeling/__init__.py b/aria/modeling/__init__.py
index 4dfc39d..4ac79e7 100644
--- a/aria/modeling/__init__.py
+++ b/aria/modeling/__init__.py
@@ -19,6 +19,7 @@ from . import (
     mixins,
     types,
     models,
+    utils,
     service_template as _service_template_bases,
     service_instance as _service_instance_bases,
     service_changes as _service_changes_bases,
@@ -45,4 +46,5 @@ __all__ = (
     'types',
     'models',
     'model_bases',
+    'utils'
 )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index 6931c78..f699560 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -32,3 +32,21 @@ class CannotEvaluateFunctionException(ModelingException):
     """
     ARIA modeling exception: cannot evaluate the function at this time.
     """
+
+
+class MissingRequiredInputsException(ModelingException):
+    """
+    ARIA modeling exception: Required inputs have been omitted
+    """
+
+
+class InputOfWrongTypeException(ModelingException):
+    """
+    ARIA modeling exception: Inputs of the wrong types have been provided
+    """
+
+
+class UndeclaredInputsException(ModelingException):
+    """
+    ARIA modeling exception: Undeclared inputs have been provided
+    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index f0bd4b2..15abde4 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -55,9 +55,7 @@ class ExecutionBase(ModelMixin):
     __tablename__ = 'execution'
 
     __private_fields__ = ['service_fk',
-                          'service_name',
-                          'service_template',
-                          'service_template_name']
+                          'service_template']
 
     TERMINATED = 'terminated'
     FAILED = 'failed'
@@ -97,7 +95,6 @@ class ExecutionBase(ModelMixin):
     ended_at = Column(DateTime, nullable=True, index=True)
     error = Column(Text, nullable=True)
     is_system_workflow = Column(Boolean, nullable=False, default=False)
-    parameters = Column(Dict)
     status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
     workflow_name = Column(Text)
 
@@ -121,6 +118,10 @@ class ExecutionBase(ModelMixin):
     def tasks(cls):
         return relationship.one_to_many(cls, 'task')
 
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
     # region foreign keys
 
     @declared_attr
@@ -227,10 +228,7 @@ class TaskBase(ModelMixin):
     __private_fields__ = ['node_fk',
                           'relationship_fk',
                           'plugin_fk',
-                          'execution_fk',
-                          'node_name',
-                          'relationship_name',
-                          'execution_name']
+                          'execution_fk']
 
     PENDING = 'pending'
     RETRYING = 'retrying'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/service_changes.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_changes.py b/aria/modeling/service_changes.py
index b1a75a2..1974424 100644
--- a/aria/modeling/service_changes.py
+++ b/aria/modeling/service_changes.py
@@ -45,9 +45,7 @@ class ServiceUpdateBase(ModelMixin):
     __tablename__ = 'service_update'
 
     __private_fields__ = ['service_fk',
-                          'execution_fk',
-                          'execution_name',
-                          'service_name']
+                          'execution_fk']
 
     created_at = Column(DateTime, nullable=False, index=True)
     service_plan = Column(Dict, nullable=False)
@@ -125,8 +123,7 @@ class ServiceUpdateStepBase(ModelMixin):
 
     __tablename__ = 'service_update_step'
 
-    __private_fields__ = ['service_update_fk',
-                          'service_update_name']
+    __private_fields__ = ['service_update_fk']
 
     _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
     ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
@@ -222,8 +219,7 @@ class ServiceModificationBase(ModelMixin):
 
     __tablename__ = 'service_modification'
 
-    __private_fields__ = ['service_fk',
-                          'service_name']
+    __private_fields__ = ['service_fk']
 
     STARTED = 'started'
     FINISHED = 'finished'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index e6c2b12..48615af 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -86,8 +86,7 @@ class ServiceBase(InstanceModelMixin):
     __tablename__ = 'service'
 
     __private_fields__ = ['substitution_fk',
-                          'service_template_fk',
-                          'service_template_name']
+                          'service_template_fk']
 
     # region foreign keys
 
@@ -371,8 +370,7 @@ class NodeBase(InstanceModelMixin):
     __private_fields__ = ['type_fk',
                           'host_fk',
                           'service_fk',
-                          'node_template_fk',
-                          'service_name']
+                          'node_template_fk']
 
     INITIAL = 'initial'
     CREATING = 'creating'
@@ -452,6 +450,11 @@ class NodeBase(InstanceModelMixin):
         """Required for use by SQLAlchemy queries"""
         return association_proxy('service', 'name')
 
+    @declared_attr
+    def node_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('node_template', 'name')
+
     # endregion
 
     # region one_to_one relationships
@@ -1159,9 +1162,7 @@ class RelationshipBase(InstanceModelMixin):
                           'target_node_fk',
                           'target_capability_fk',
                           'requirement_template_fk',
-                          'relationship_template_fk',
-                          'source_node_name',
-                          'target_node_name']
+                          'relationship_template_fk']
 
     # region foreign keys
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 8355521..86cf81a 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,6 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  updated_at=now,
                                  description=deepcopy_with_locators(self.description),
                                  service_template=self)
-        #service.name = '{0}_{1}'.format(self.name, service.id)
 
         context.modeling.instance = service
 
@@ -309,12 +308,6 @@ class ServiceTemplateBase(TemplateModelMixin):
         utils.instantiate_dict(self, service.inputs, self.inputs)
         utils.instantiate_dict(self, service.outputs, self.outputs)
 
-        for name, the_input in context.modeling.inputs.iteritems():
-            if name not in service.inputs:
-                context.validation.report('input "{0}" is not supported'.format(name))
-            else:
-                service.inputs[name].value = the_input
-
         return service
 
     def validate(self):
@@ -438,8 +431,7 @@ class NodeTemplateBase(TemplateModelMixin):
     __tablename__ = 'node_template'
 
     __private_fields__ = ['type_fk',
-                          'service_template_fk',
-                          'service_template_name']
+                          'service_template_fk']
 
     # region foreign_keys
 
@@ -462,6 +454,11 @@ class NodeTemplateBase(TemplateModelMixin):
         """Required for use by SQLAlchemy queries"""
         return association_proxy('service_template', 'name')
 
+    @declared_attr
+    def type_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('type', 'name')
+
     # endregion
 
     # region one_to_one relationships
@@ -548,6 +545,7 @@ class NodeTemplateBase(TemplateModelMixin):
                            type=self.type,
                            description=deepcopy_with_locators(self.description),
                            state=models.Node.INITIAL,
+                           runtime_properties={},
                            node_template=self)
         utils.instantiate_dict(node, node.properties, self.properties)
         utils.instantiate_dict(node, node.interfaces, self.interface_templates)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 0b4015c..f172a50 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -13,12 +13,95 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from json import JSONEncoder
+from StringIO import StringIO
+
+from . import exceptions
 from ..parser.consumption import ConsumptionContext
 from ..parser.exceptions import InvalidValueError
 from ..parser.presentation import Value
 from ..utils.collections import OrderedDict
 from ..utils.console import puts
-from .exceptions import CannotEvaluateFunctionException
+from ..utils.type import validate_value_type
+
+
+class ModelJSONEncoder(JSONEncoder):
+    def default(self, o):
+        from .mixins import ModelMixin
+        if isinstance(o, ModelMixin):
+            if hasattr(o, 'value'):
+                dict_to_return = o.to_dict(fields=('value',))
+                return dict_to_return['value']
+            else:
+                return o.to_dict()
+        else:
+            return JSONEncoder.default(self, o)
+
+
+def create_inputs(inputs, template_inputs):
+    """
+    :param inputs: key-value dict
+    :param template_inputs: parameter name to parameter object dict
+    :return: dict of parameter name to Parameter models
+    """
+    merged_inputs = _merge_and_validate_inputs(inputs, template_inputs)
+
+    from . import models
+    input_models = []
+    for input_name, input_val in merged_inputs.iteritems():
+        parameter = models.Parameter(
+            name=input_name,
+            type_name=template_inputs[input_name].type_name,
+            description=template_inputs[input_name].description,
+            value=input_val)
+        input_models.append(parameter)
+
+    return {input.name: input for input in input_models}
+
+
+def _merge_and_validate_inputs(inputs, template_inputs):
+    """
+    :param inputs: key-value dict
+    :param template_inputs: parameter name to parameter object dict
+    :return:
+    """
+    merged_inputs = inputs.copy()
+
+    missing_inputs = []
+    wrong_type_inputs = {}
+    for input_name, input_template in template_inputs.iteritems():
+        if input_name not in inputs:
+            if input_template.value is not None:
+                merged_inputs[input_name] = input_template.value  # apply default value
+            else:
+                missing_inputs.append(input_name)
+        else:
+            # Validate input type
+            try:
+                validate_value_type(inputs[input_name], input_template.type_name)
+            except ValueError:
+                wrong_type_inputs[input_name] = input_template.type_name
+
+    if missing_inputs:
+        raise exceptions.MissingRequiredInputsException(
+            'Required inputs {0} have not been specified - expected inputs: {1}'
+            .format(missing_inputs, template_inputs.keys()))
+
+    if wrong_type_inputs:
+        error_message = StringIO()
+        for param_name, param_type in wrong_type_inputs.iteritems():
+            error_message.write('Input "{0}" must be of type {1}\n'.
+                                format(param_name, param_type))
+        raise exceptions.InputOfWrongTypeException(error_message.getvalue())
+
+    undeclared_inputs = [input_name for input_name in inputs.keys()
+                         if input_name not in template_inputs]
+    if undeclared_inputs:
+        raise exceptions.UndeclaredInputsException(
+            'Undeclared inputs have been specified: {0}; Expected inputs: {1}'
+            .format(undeclared_inputs, template_inputs.keys()))
+
+    return merged_inputs
 
 
 def coerce_value(container, value, report_issues=False):
@@ -35,7 +118,7 @@ def coerce_value(container, value, report_issues=False):
         try:
             value = value._evaluate(context, container)
             value = coerce_value(container, value, report_issues)
-        except CannotEvaluateFunctionException:
+        except exceptions.CannotEvaluateFunctionException:
             pass
         except InvalidValueError as e:
             if report_issues:


[4/9] incubator-ariatosca git commit: ARIA-135-Add-plugin-field-to-MockTask

Posted by ra...@apache.org.
ARIA-135-Add-plugin-field-to-MockTask


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/4400e302
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/4400e302
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/4400e302

Branch: refs/heads/ARIA-48-aria-cli
Commit: 4400e302311f51da48fd0c66a58dfa1b7e66a80e
Parents: 941d85c
Author: max-orlov <ma...@gigaspaces.com>
Authored: Thu Mar 30 13:56:36 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Thu Mar 30 14:09:29 2017 +0300

----------------------------------------------------------------------
 tests/orchestrator/workflows/executor/test_executor.py       | 7 +++----
 .../orchestrator/workflows/executor/test_process_executor.py | 8 ++++----
 2 files changed, 7 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/4400e302/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 9dde1ce..d84d1ec 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -85,12 +85,11 @@ class MockContext(object):
 
     def __init__(self, *args, **kwargs):
         self.logger = logging.getLogger()
+        self.task = type('SubprocessMockTask', (object, ), {'plugin': None})
+        self.serialization_dict = {'context_cls': self.__class__, 'context': {}}
 
     def __getattr__(self, item):
-        if item == 'serialization_dict':
-            return {'context_cls': self.__class__, 'context': {}}
-        else:
-            return None
+        return None
 
     @classmethod
     def deserialize_from_dict(cls, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/4400e302/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 0edd0a5..436e7b6 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -113,17 +113,17 @@ class MockContext(object):
 
     def __init__(self, *args, **kwargs):
         self.logger = logging.getLogger('mock_logger')
+        self.task = type('SubprocessMockTask', (object, ), {'plugin': None})
+        self.serialization_dict = {'context_cls': self.__class__, 'context': {}}
 
     def __getattr__(self, item):
-        if item == 'serialization_dict':
-            return {'context_cls': self.__class__, 'context': {}}
-        else:
-            return None
+        return None
 
     @classmethod
     def deserialize_from_dict(cls, **kwargs):
         return cls()
 
+
 class MockTask(object):
 
     INFINITE_RETRIES = aria_models.Task.INFINITE_RETRIES


[2/9] incubator-ariatosca git commit: ARIA-132-Models-cascading-deletion-raises-constraint-errors

Posted by ra...@apache.org.
ARIA-132-Models-cascading-deletion-raises-constraint-errors

Additional fixes:
- The relationships are now defined on both sides. we no longer use backref, but back_populates.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/2de04972
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/2de04972
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/2de04972

Branch: refs/heads/ARIA-48-aria-cli
Commit: 2de049729f5d0fff50297d25a88f5eecdcf266f9
Parents: 07cbfcd
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun Mar 26 14:13:47 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue Mar 28 15:01:15 2017 +0300

----------------------------------------------------------------------
 aria/.pylintrc                                  |   2 +-
 aria/modeling/orchestration.py                  |  16 +
 aria/modeling/relationship.py                   | 222 +++--
 aria/modeling/service_changes.py                |  86 +-
 aria/modeling/service_common.py                 |   4 +
 aria/modeling/service_instance.py               | 813 +++++++++++-------
 aria/modeling/service_template.py               | 823 +++++++++++++------
 tests/orchestrator/context/test_operation.py    |   4 +-
 .../node-cellar/node-cellar.yaml                |   1 -
 9 files changed, 1299 insertions(+), 672 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/.pylintrc
----------------------------------------------------------------------
diff --git a/aria/.pylintrc b/aria/.pylintrc
index 589402f..7222605 100644
--- a/aria/.pylintrc
+++ b/aria/.pylintrc
@@ -375,7 +375,7 @@ max-attributes=20
 min-public-methods=0
 
 # Maximum number of public methods for a class (see R0904).
-max-public-methods=20
+max-public-methods=50
 
 # Maximum number of boolean expressions in a if statement
 max-bool-expr=5

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index 2d58671..a13ae87 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -103,9 +103,17 @@ class ExecutionBase(ModelMixin):
     workflow_name = Column(Text)
 
     @declared_attr
+    def logs(cls):
+        return relationship.one_to_many(cls, 'log')
+
+    @declared_attr
     def service(cls):
         return relationship.many_to_one(cls, 'service')
 
+    @declared_attr
+    def tasks(cls):
+        return relationship.one_to_many(cls, 'task')
+
     # region foreign keys
 
     @declared_attr
@@ -185,6 +193,10 @@ class PluginBase(ModelMixin):
 
     __tablename__ = 'plugin'
 
+    @declared_attr
+    def tasks(cls):
+        return relationship.one_to_many(cls, 'task')
+
     archive_name = Column(Text, nullable=False, index=True)
     distribution = Column(Text)
     distribution_release = Column(Text)
@@ -239,6 +251,10 @@ class TaskBase(ModelMixin):
     INFINITE_RETRIES = -1
 
     @declared_attr
+    def logs(cls):
+        return relationship.one_to_many(cls, 'log')
+
+    @declared_attr
     def node(cls):
         return relationship.many_to_one(cls, 'node')
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/modeling/relationship.py
----------------------------------------------------------------------
diff --git a/aria/modeling/relationship.py b/aria/modeling/relationship.py
index 70691b3..ac1de28 100644
--- a/aria/modeling/relationship.py
+++ b/aria/modeling/relationship.py
@@ -26,9 +26,10 @@ from sqlalchemy import (
 
 from ..utils import formatting
 
+NO_BACK_POP = 'NO_BACK_POP'
 
-def foreign_key(other_table,
-                nullable=False):
+
+def foreign_key(other_table, nullable=False):
     """
     Declare a foreign key property, which will also create a foreign key column in the table with
     the name of the property. By convention the property name should end in "_fk".
@@ -54,9 +55,7 @@ def foreign_key(other_table,
                   nullable=nullable)
 
 
-def one_to_one_self(model_class,
-                    fk,
-                    relationship_kwargs=None):
+def one_to_one_self(model_class, fk):
     """
     Declare a one-to-one relationship property. The property value would be an instance of the same
     model.
@@ -69,12 +68,8 @@ def one_to_one_self(model_class,
     :type model_class: type
     :param fk: Foreign key name
     :type fk: basestring
-    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
-    :type relationship_kwargs: {}
     """
 
-    relationship_kwargs = relationship_kwargs or {}
-
     remote_side = '{model_class}.{remote_column}'.format(
         model_class=model_class.__name__,
         remote_column=model_class.id_column_name()
@@ -85,20 +80,18 @@ def one_to_one_self(model_class,
         model_class=model_class.__name__,
         column=fk
     )
-
-    return relationship(
-        _get_class_for_table(model_class, model_class.__tablename__).__name__,
-        primaryjoin=primaryjoin,
-        remote_side=remote_side,
-        post_update=True,
-        **relationship_kwargs
+    return _relationship(
+        model_class,
+        model_class.__tablename__,
+        relationship_kwargs={
+            'primaryjoin': primaryjoin,
+            'remote_side': remote_side,
+            'post_update': True
+        }
     )
 
 
-def one_to_many_self(model_class,
-                     fk,
-                     dict_key=None,
-                     relationship_kwargs=None):
+def one_to_many_self(model_class, fk, dict_key=None):
     """
     Declare a one-to-many relationship property. The property value would be a list or dict of
     instances of the same model.
@@ -114,28 +107,24 @@ def one_to_many_self(model_class,
     :param dict_key: If set the value will be a dict with this key as the dict key; otherwise will
                      be a list
     :type dict_key: basestring
-    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
-    :type relationship_kwargs: {}
     """
-
-    relationship_kwargs = relationship_kwargs or {}
-
-    relationship_kwargs.setdefault('remote_side', '{model_class}.{remote_column}'.format(
-        model_class=model_class.__name__,
-        remote_column=fk
-    ))
-
-    return _relationship(model_class, model_class.__tablename__, None, relationship_kwargs,
-                         other_property=False, dict_key=dict_key)
+    return _relationship(
+        model_class,
+        model_class.__tablename__,
+        relationship_kwargs={
+            'remote_side': '{model_class}.{remote_column}'.format(
+                model_class=model_class.__name__, remote_column=fk)
+        },
+        back_populates=False,
+        dict_key=dict_key
+    )
 
 
 def one_to_one(model_class,
                other_table,
                fk=None,
                other_fk=None,
-               other_property=None,
-               relationship_kwargs=None,
-               backref_kwargs=None):
+               back_populates=None):
     """
     Declare a one-to-one relationship property. The property value would be an instance of the other
     table's model.
@@ -154,26 +143,25 @@ def one_to_one(model_class,
     :type fk: basestring
     :param other_fk: Foreign key name at the other table (no need specify if there's no ambiguity)
     :type other_fk: basestring
-    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
-    :type relationship_kwargs: {}
-    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
-    :type backref_kwargs: {}
+    :param back_populates: Override name of matching many-to-many property at other table; set to
+                       false to disable
+    :type back_populates: basestring|bool
     """
+    if back_populates is None:
+        back_populates = model_class.__tablename__
 
-    backref_kwargs = backref_kwargs or {}
-    backref_kwargs.setdefault('uselist', False)
-
-    return _relationship(model_class, other_table, backref_kwargs, relationship_kwargs,
-                         other_property, fk=fk, other_fk=other_fk)
+    return _relationship(model_class,
+                         other_table,
+                         fk=fk,
+                         back_populates=back_populates,
+                         other_fk=other_fk)
 
 
 def one_to_many(model_class,
                 child_table,
                 child_fk=None,
                 dict_key=None,
-                child_property=None,
-                relationship_kwargs=None,
-                backref_kwargs=None):
+                back_populates=None):
     """
     Declare a one-to-many relationship property. The property value would be a list or dict of
     instances of the child table's model.
@@ -194,29 +182,25 @@ def one_to_many(model_class,
     :param dict_key: If set the value will be a dict with this key as the dict key; otherwise will
                      be a list
     :type dict_key: basestring
-    :param child_property: Override name of matching many-to-one property at child table; set to
+    :param back_populates: Override name of matching many-to-one property at child table; set to
                            false to disable
-    :type child_property: basestring|bool
-    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
-    :type relationship_kwargs: {}
-    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
-    :type backref_kwargs: {}
+    :type back_populates: basestring|bool
     """
-
-    backref_kwargs = backref_kwargs or {}
-    backref_kwargs.setdefault('uselist', False)
-
-    return _relationship(model_class, child_table, backref_kwargs, relationship_kwargs,
-                         child_property, other_fk=child_fk, dict_key=dict_key)
+    if back_populates is None:
+        back_populates = model_class.__tablename__
+    return _relationship(
+        model_class,
+        child_table,
+        back_populates=back_populates,
+        other_fk=child_fk,
+        dict_key=dict_key)
 
 
 def many_to_one(model_class,
                 parent_table,
                 fk=None,
                 parent_fk=None,
-                parent_property=None,
-                relationship_kwargs=None,
-                backref_kwargs=None):
+                back_populates=None):
     """
     Declare a many-to-one relationship property. The property value would be an instance of the
     parent table's model.
@@ -236,34 +220,25 @@ def many_to_one(model_class,
     :type parent_table: basestring
     :param fk: Foreign key name at our table (no need specify if there's no ambiguity)
     :type fk: basestring
-    :param parent_property: Override name of matching one-to-many property at parent table; set to
+    :param back_populates: Override name of matching one-to-many property at parent table; set to
                             false to disable
-    :type parent_property: basestring|bool
-    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
-    :type relationship_kwargs: {}
-    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
-    :type backref_kwargs: {}
+    :type back_populates: basestring|bool
     """
+    if back_populates is None:
+        back_populates = formatting.pluralize(model_class.__tablename__)
 
-    if parent_property is None:
-        parent_property = formatting.pluralize(model_class.__tablename__)
-
-    backref_kwargs = backref_kwargs or {}
-    backref_kwargs.setdefault('uselist', True)
-    backref_kwargs.setdefault('lazy', 'dynamic')
-    backref_kwargs.setdefault('cascade', 'all') # delete children when parent is deleted
-
-    return _relationship(model_class, parent_table, backref_kwargs, relationship_kwargs,
-                         parent_property, fk=fk, other_fk=parent_fk)
+    return _relationship(model_class,
+                         parent_table,
+                         back_populates=back_populates,
+                         fk=fk,
+                         other_fk=parent_fk)
 
 
 def many_to_many(model_class,
                  other_table,
                  prefix=None,
                  dict_key=None,
-                 other_property=None,
-                 relationship_kwargs=None,
-                 backref_kwargs=None):
+                 other_property=None):
     """
     Declare a many-to-many relationship property. The property value would be a list or dict of
     instances of the other table's model.
@@ -280,8 +255,8 @@ def many_to_many(model_class,
 
     :param model_class: The class in which this relationship will be declared
     :type model_class: type
-    :param parent_table: Parent table name
-    :type parent_table: basestring
+    :param other_table: Parent table name
+    :type other_table: basestring
     :param prefix: Optional prefix for extra table name as well as for ``other_property``
     :type prefix: basestring
     :param dict_key: If set the value will be a dict with this key as the dict key; otherwise will
@@ -290,10 +265,6 @@ def many_to_many(model_class,
     :param other_property: Override name of matching many-to-many property at other table; set to
                            false to disable
     :type other_property: basestring|bool
-    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
-    :type relationship_kwargs: {}
-    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
-    :type backref_kwargs: {}
     """
 
     this_table = model_class.__tablename__
@@ -303,69 +274,68 @@ def many_to_many(model_class,
     other_column_name = '{0}_id'.format(other_table)
     other_foreign_key = '{0}.id'.format(other_table)
 
-    secondary_table = '{0}_{1}'.format(this_table, other_table)
+    secondary_table_name = '{0}_{1}'.format(this_table, other_table)
 
     if prefix is not None:
-        secondary_table = '{0}_{1}'.format(prefix, secondary_table)
+        secondary_table_name = '{0}_{1}'.format(prefix, secondary_table_name)
         if other_property is None:
             other_property = '{0}_{1}'.format(prefix, formatting.pluralize(this_table))
 
-    backref_kwargs = backref_kwargs or {}
-    backref_kwargs.setdefault('uselist', True)
-
-    relationship_kwargs = relationship_kwargs or {}
-    relationship_kwargs.setdefault('secondary', _get_secondary_table(
+    secondary_table = _get_secondary_table(
         model_class.metadata,
-        secondary_table,
+        secondary_table_name,
         this_column_name,
         other_column_name,
         this_foreign_key,
         other_foreign_key
-    ))
+    )
 
-    return _relationship(model_class, other_table, backref_kwargs, relationship_kwargs,
-                         other_property, dict_key=dict_key)
+    return _relationship(
+        model_class,
+        other_table,
+        relationship_kwargs={'secondary': secondary_table},
+        backref_kwargs={'name': other_property, 'uselist': True} if other_property else None,
+        dict_key=dict_key
+    )
 
 
-def _relationship(model_class, other_table, backref_kwargs, relationship_kwargs, other_property,
-                  fk=None, other_fk=None, dict_key=None):
+def _relationship(model_class,
+                  other_table_name,
+                  back_populates=None,
+                  backref_kwargs=None,
+                  relationship_kwargs=None,
+                  fk=None,
+                  other_fk=None,
+                  dict_key=None):
     relationship_kwargs = relationship_kwargs or {}
 
     if fk:
-        relationship_kwargs.setdefault('foreign_keys',
-                                       lambda: getattr(
-                                           _get_class_for_table(
-                                               model_class,
-                                               model_class.__tablename__),
-                                           fk))
+        relationship_kwargs.setdefault(
+            'foreign_keys',
+            lambda: getattr(_get_class_for_table(model_class, model_class.__tablename__), fk)
+        )
 
     elif other_fk:
-        relationship_kwargs.setdefault('foreign_keys',
-                                       lambda: getattr(
-                                           _get_class_for_table(
-                                               model_class,
-                                               other_table),
-                                           other_fk))
+        relationship_kwargs.setdefault(
+            'foreign_keys',
+            lambda: getattr(_get_class_for_table(model_class, other_table_name), other_fk)
+        )
 
     if dict_key:
         relationship_kwargs.setdefault('collection_class',
                                        attribute_mapped_collection(dict_key))
 
-    if other_property is False:
-        # No backref
-        return relationship(
-            lambda: _get_class_for_table(model_class, other_table),
-            **relationship_kwargs
-        )
+    if backref_kwargs:
+        assert back_populates is None
+        return relationship(lambda: _get_class_for_table(model_class, other_table_name),
+                            backref=backref(**backref_kwargs),
+                            **relationship_kwargs
+                           )
     else:
-        if other_property is None:
-            other_property = model_class.__tablename__
-        backref_kwargs = backref_kwargs or {}
-        return relationship(
-            lambda: _get_class_for_table(model_class, other_table),
-            backref=backref(other_property, **backref_kwargs),
-            **relationship_kwargs
-        )
+        if back_populates is not NO_BACK_POP:
+            relationship_kwargs['back_populates'] = back_populates
+        return relationship(lambda: _get_class_for_table(model_class, other_table_name),
+                            **relationship_kwargs)
 
 
 def _get_class_for_table(model_class, tablename):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/modeling/service_changes.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_changes.py b/aria/modeling/service_changes.py
index a33e6ae..b1a75a2 100644
--- a/aria/modeling/service_changes.py
+++ b/aria/modeling/service_changes.py
@@ -42,9 +42,6 @@ class ServiceUpdateBase(ModelMixin):
     """
     Deployment update model representation.
     """
-
-    steps = None
-
     __tablename__ = 'service_update'
 
     __private_fields__ = ['service_fk',
@@ -60,14 +57,6 @@ class ServiceUpdateBase(ModelMixin):
     modified_entity_ids = Column(Dict)
     state = Column(Text)
 
-    @declared_attr
-    def execution(cls):
-        return relationship.many_to_one(cls, 'execution')
-
-    @declared_attr
-    def service(cls):
-        return relationship.many_to_one(cls, 'service', parent_property='updates')
-
     # region foreign keys
 
     @declared_attr
@@ -94,6 +83,34 @@ class ServiceUpdateBase(ModelMixin):
 
     # endregion
 
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def steps(cls):
+        return relationship.one_to_many(cls, 'service_update_step')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def execution(cls):
+        return relationship.one_to_one(cls, 'execution', back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def service(cls):
+        return relationship.many_to_one(cls, 'service', back_populates='updates')
+
+    # endregion
+
+    # region many_to_many relationships
+
+    # endregion
+
     def to_dict(self, suppress_error=False, **kwargs):
         dep_update_dict = super(ServiceUpdateBase, self).to_dict(suppress_error)     #pylint: disable=no-member
         # Taking care of the fact the DeploymentSteps are _BaseModels
@@ -133,10 +150,6 @@ class ServiceUpdateStepBase(ModelMixin):
     entity_id = Column(Text, nullable=False)
     entity_type = Column(Enum(*ENTITY_TYPES, name='entity_type'), nullable=False)
 
-    @declared_attr
-    def service_update(cls):
-        return relationship.many_to_one(cls, 'service_update', parent_property='steps')
-
     # region foreign keys
 
     @declared_attr
@@ -154,6 +167,26 @@ class ServiceUpdateStepBase(ModelMixin):
 
     # endregion
 
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def service_update(cls):
+        return relationship.many_to_one(cls, 'service_update', back_populates='steps')
+
+    # endregion
+
+    # region many_to_many relationships
+
+    # endregion
+
     def __hash__(self):
         return hash((getattr(self, self.id_column_name()), self.entity_id))
 
@@ -206,10 +239,6 @@ class ServiceModificationBase(ModelMixin):
     nodes = Column(Dict)
     status = Column(Enum(*STATES, name='service_modification_status'))
 
-    @declared_attr
-    def service(cls):
-        return relationship.many_to_one(cls, 'service', parent_property='modifications')
-
     # region foreign keys
 
     @declared_attr
@@ -226,3 +255,22 @@ class ServiceModificationBase(ModelMixin):
         return association_proxy('service', cls.name_column_name())
 
     # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+    @declared_attr
+    def service(cls):
+        return relationship.many_to_one(cls, 'service', back_populates='modifications')
+
+    # endregion
+
+    # region many_to_many relationships
+
+    # endregion

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index d6b1f33..48c3170 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -275,6 +275,10 @@ class PluginSpecificationBase(TemplateModelMixin):
 
     # endregion
 
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
+
     @property
     def as_raw(self):
         return collections.OrderedDict((

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2de04972/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 1e18db0..e2e5ae0 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -36,7 +36,7 @@ from . import (
 )
 
 
-class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
+class ServiceBase(InstanceModelMixin):
     """
     A service is usually an instance of a :class:`ServiceTemplate`.
 
@@ -71,12 +71,10 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
     :vartype created_at: :class:`datetime.datetime`
     :ivar updated_at: Update timestamp
     :vartype updated_at: :class:`datetime.datetime`
-
     :ivar permalink: ??
     :vartype permalink: basestring
     :ivar scaling_groups: ??
     :vartype scaling_groups: {}
-
     :ivar modifications: Modifications of this service
     :vartype modifications: [:class:`ServiceModification`]
     :ivar updates: Updates of this service
@@ -91,16 +89,53 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
                           'service_template_fk',
                           'service_template_name']
 
+    # region foreign keys
+
     @declared_attr
-    def service_template(cls):
-        return relationship.many_to_one(cls, 'service_template')
+    def substitution_fk(cls):
+        """Service one-to-one to Substitution"""
+        return relationship.foreign_key('substitution', nullable=True)
 
-    description = Column(Text)
+    @declared_attr
+    def service_template_fk(cls):
+        """For Service many-to-one to ServiceTemplate"""
+        return relationship.foreign_key('service_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
 
     @declared_attr
-    def meta_data(cls):
-        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
-        return relationship.many_to_many(cls, 'metadata', dict_key='name')
+    def service_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service_template', 'name')
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def substitution(cls):
+        return relationship.one_to_one(cls, 'substitution', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+    @declared_attr
+    def updates(cls):
+        return relationship.one_to_many(cls, 'service_update')
+
+    @declared_attr
+    def modifications(cls):
+        return relationship.one_to_many(cls, 'service_modification')
+
+    @declared_attr
+    def executions(cls):
+        return relationship.one_to_many(cls, 'execution')
+
+    @declared_attr
+    def operations(cls):
+        return relationship.one_to_many(cls, 'operation')
 
     @declared_attr
     def nodes(cls):
@@ -115,8 +150,24 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
         return relationship.one_to_many(cls, 'policy', dict_key='name')
 
     @declared_attr
-    def substitution(cls):
-        return relationship.one_to_one(cls, 'substitution')
+    def workflows(cls):
+        return relationship.one_to_many(cls, 'operation', dict_key='name')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
+
+    # endregion
+
+    # region many_to_many relationships
+    @declared_attr
+    def meta_data(cls):
+        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
+        return relationship.many_to_many(cls, 'metadata', dict_key='name')
 
     @declared_attr
     def inputs(cls):
@@ -127,13 +178,13 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
         return relationship.many_to_many(cls, 'parameter', prefix='outputs', dict_key='name')
 
     @declared_attr
-    def workflows(cls):
-        return relationship.one_to_many(cls, 'operation', dict_key='name')
-
-    @declared_attr
     def plugin_specifications(cls):
         return relationship.many_to_many(cls, 'plugin_specification', dict_key='name')
 
+    # endregion
+
+    description = Column(Text)
+
     created_at = Column(DateTime, nullable=False, index=True)
     updated_at = Column(DateTime)
 
@@ -144,29 +195,6 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
 
     # endregion
 
-    # region foreign keys
-
-    @declared_attr
-    def substitution_fk(cls):
-        """Service one-to-one to Substitution"""
-        return relationship.foreign_key('substitution', nullable=True)
-
-    @declared_attr
-    def service_template_fk(cls):
-        """For Service many-to-one to ServiceTemplate"""
-        return relationship.foreign_key('service_template', nullable=True)
-
-    # endregion
-
-    # region association proxies
-
-    @declared_attr
-    def service_template_name(cls):
-        """Required for use by SQLAlchemy queries"""
-        return association_proxy('service_template', 'name')
-
-    # endregion
-
     def satisfy_requirements(self):
         satisfied = True
         for node in self.nodes.itervalues():
@@ -290,7 +318,7 @@ class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
                         self._dump_graph_node(target_node)
 
 
-class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
+class NodeBase(InstanceModelMixin):
     """
     Usually an instance of a :class:`NodeTemplate`.
 
@@ -318,7 +346,6 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
     :vartype inbound_relationships: [:class:`Relationship`]
     :ivar host: Host node (can be self)
     :vartype host: :class:`Node`
-
     :ivar runtime_properties: TODO: should be replaced with attributes
     :vartype runtime_properties: {}
     :ivar scaling_groups: ??
@@ -327,7 +354,6 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
     :vartype state: string
     :ivar version: Used by `aria.storage.instrumentation`
     :vartype version: int
-
     :ivar service: Containing service
     :vartype service: :class:`Service`
     :ivar groups: We are a member of these groups
@@ -391,19 +417,52 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
         except KeyError:
             return None
 
+    # region foreign_keys
+
     @declared_attr
-    def node_template(cls):
-        return relationship.many_to_one(cls, 'node_template')
+    def type_fk(cls):
+        """For Node many-to-one to Type"""
+        return relationship.foreign_key('type')
 
     @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
+    def host_fk(cls):
+        """For Node one-to-one to Node"""
+        return relationship.foreign_key('node', nullable=True)
 
-    description = Column(Text)
+    @declared_attr
+    def service_fk(cls):
+        """For Service one-to-many to Node"""
+        return relationship.foreign_key('service')
 
     @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+    def node_template_fk(cls):
+        """For Node many-to-one to NodeTemplate"""
+        return relationship.foreign_key('node_template')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', 'name')
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def host(cls):
+        return relationship.one_to_one_self(cls, 'host_fk')
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def tasks(cls):
+        return relationship.one_to_many(cls, 'task')
 
     @declared_attr
     def interfaces(cls):
@@ -419,20 +478,40 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
 
     @declared_attr
     def outbound_relationships(cls):
-        return relationship.one_to_many(cls, 'relationship', child_fk='source_node_fk',
-                                        child_property='source_node')
+        return relationship.one_to_many(
+            cls, 'relationship', child_fk='source_node_fk', back_populates='source_node')
 
     @declared_attr
     def inbound_relationships(cls):
-        return relationship.one_to_many(cls, 'relationship', child_fk='target_node_fk',
-                                        child_property='target_node')
+        return relationship.one_to_many(
+            cls, 'relationship', child_fk='target_node_fk', back_populates='target_node')
+
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def host(cls):
-        return relationship.one_to_one_self(cls, 'host_fk')
+    def service(cls):
+        return relationship.many_to_one(cls, 'service')
 
-    # region orchestration
+    @declared_attr
+    def node_template(cls):
+        return relationship.many_to_one(cls, 'node_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
 
+    # endregion
+
+    # region many_to_many relationships
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
     runtime_properties = Column(modeling_types.Dict)
     scaling_groups = Column(modeling_types.List)
     state = Column(Enum(*STATES, name='node_state'), nullable=False, default=INITIAL)
@@ -454,41 +533,6 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
             return host_ip_property.value
         return None
 
-    # endregion
-
-    # region foreign_keys
-
-    @declared_attr
-    def type_fk(cls):
-        """For Node many-to-one to Type"""
-        return relationship.foreign_key('type')
-
-    @declared_attr
-    def host_fk(cls):
-        """For Node one-to-one to Node"""
-        return relationship.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Node"""
-        return relationship.foreign_key('service')
-
-    @declared_attr
-    def node_template_fk(cls):
-        """For Node many-to-one to NodeTemplate"""
-        return relationship.foreign_key('node_template', nullable=True)
-
-    # endregion
-
-    # region association proxies
-
-    @declared_attr
-    def service_name(cls):
-        """Required for use by SQLAlchemy queries"""
-        return association_proxy('service', 'name')
-
-    # endregion
-
     def satisfy_requirements(self):
         node_template = self.node_template
         satisfied = True
@@ -513,7 +557,7 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
         from . import models
         context = ConsumptionContext.get_thread_local()
         # Find target nodes
-        target_nodes = target_node_template.nodes.all()
+        target_nodes = target_node_template.nodes
         if target_nodes:
             target_node = None
             target_capability = None
@@ -621,6 +665,7 @@ class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
             utils.dump_dict_values(self.capabilities, 'Capabilities')
             utils.dump_list_values(self.outbound_relationships, 'Relationships')
 
+
 class GroupBase(InstanceModelMixin):
     """
     Usually an instance of a :class:`GroupTemplate`.
@@ -639,7 +684,6 @@ class GroupBase(InstanceModelMixin):
     :vartype properties: {basestring: :class:`Parameter`}
     :ivar interfaces: Bundles of operations
     :vartype interfaces: {basestring: :class:`Interface`}
-
     :ivar service: Containing service
     :vartype service: :class:`Service`
     :ivar policies: Policies enacted on this group
@@ -648,51 +692,73 @@ class GroupBase(InstanceModelMixin):
 
     __tablename__ = 'group'
 
-    __private_fields__ = ['type_fk',
-                          'service_fk',
-                          'group_template_fk']
+    __private_fields__ = ['type_fk', 'service_fk', 'group_template_fk']
 
-    @declared_attr
-    def group_template(cls):
-        return relationship.many_to_one(cls, 'group_template')
+    # region foreign_keys
 
     @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    description = Column(Text)
+    def type_fk(cls):
+        """For Group many-to-one to Type"""
+        return relationship.foreign_key('type')
 
     @declared_attr
-    def nodes(cls):
-        return relationship.many_to_many(cls, 'node')
+    def service_fk(cls):
+        """For Service one-to-many to Group"""
+        return relationship.foreign_key('service')
 
     @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+    def group_template_fk(cls):
+        """For Group many-to-one to GroupTemplate"""
+        return relationship.foreign_key('group_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
 
     @declared_attr
     def interfaces(cls):
         return relationship.one_to_many(cls, 'interface', dict_key='name')
 
-    # region foreign_keys
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def type_fk(cls):
-        """For Group many-to-one to Type"""
-        return relationship.foreign_key('type')
+    def service(cls):
+        return relationship.many_to_one(cls, 'service')
 
     @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Group"""
-        return relationship.foreign_key('service')
+    def group_template(cls):
+        return relationship.many_to_one(cls, 'group_template')
 
     @declared_attr
-    def group_template_fk(cls):
-        """For Group many-to-one to GroupTemplate"""
-        return relationship.foreign_key('group_template', nullable=True)
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def nodes(cls):
+        return relationship.many_to_many(cls, 'node')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
 
     # endregion
 
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -740,58 +806,79 @@ class PolicyBase(InstanceModelMixin):
     :vartype groups: [:class:`Group`]
     :ivar properties: Associated parameters
     :vartype properties: {basestring: :class:`Parameter`}
-
     :ivar service: Containing service
     :vartype service: :class:`Service`
     """
 
     __tablename__ = 'policy'
 
-    __private_fields__ = ['type_fk',
-                          'service_fk',
-                          'policy_template_fk']
+    __private_fields__ = ['type_fk', 'service_fk', 'policy_template_fk']
 
-    @declared_attr
-    def policy_template(cls):
-        return relationship.many_to_one(cls, 'policy_template')
+    # region foreign_keys
 
     @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
+    def type_fk(cls):
+        """For Policy many-to-one to Type"""
+        return relationship.foreign_key('type')
 
-    description = Column(Text)
+    @declared_attr
+    def service_fk(cls):
+        """For Service one-to-many to Policy"""
+        return relationship.foreign_key('service')
 
     @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+    def policy_template_fk(cls):
+        """For Policy many-to-one to PolicyTemplate"""
+        return relationship.foreign_key('policy_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def nodes(cls):
-        return relationship.many_to_many(cls, 'node')
+    def service(cls):
+        return relationship.many_to_one(cls, 'service')
 
     @declared_attr
-    def groups(cls):
-        return relationship.many_to_many(cls, 'group')
+    def policy_template(cls):
+        return relationship.many_to_one(cls, 'policy_template')
 
-    # region foreign_keys
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
 
     @declared_attr
-    def type_fk(cls):
-        """For Policy many-to-one to Type"""
-        return relationship.foreign_key('type')
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
 
     @declared_attr
-    def service_fk(cls):
-        """For Service one-to-many to Policy"""
-        return relationship.foreign_key('service')
+    def nodes(cls):
+        return relationship.many_to_many(cls, 'node')
 
     @declared_attr
-    def policy_template_fk(cls):
-        """For Policy many-to-one to PolicyTemplate"""
-        return relationship.foreign_key('policy_template', nullable=True)
+    def groups(cls):
+        return relationship.many_to_many(cls, 'group')
 
     # endregion
 
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -835,7 +922,6 @@ class SubstitutionBase(InstanceModelMixin):
     :vartype node_type: :class:`Type`
     :ivar mappings: Requirement and capability mappings
     :vartype mappings: {basestring: :class:`SubstitutionTemplate`}
-
     :ivar service: Containing service
     :vartype service: :class:`Service`
     """
@@ -845,29 +931,53 @@ class SubstitutionBase(InstanceModelMixin):
     __private_fields__ = ['node_type_fk',
                           'substitution_template_fk']
 
+    # region foreign_keys
+
     @declared_attr
-    def substitution_template(cls):
-        return relationship.many_to_one(cls, 'substitution_template')
+    def node_type_fk(cls):
+        """For Substitution many-to-one to Type"""
+        return relationship.foreign_key('type')
 
     @declared_attr
-    def node_type(cls):
-        return relationship.many_to_one(cls, 'type')
+    def substitution_template_fk(cls):
+        """For Substitution many-to-one to SubstitutionTemplate"""
+        return relationship.foreign_key('substitution_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
 
     @declared_attr
     def mappings(cls):
         return relationship.one_to_many(cls, 'substitution_mapping', dict_key='name')
 
-    # region foreign_keys
+    # endregion
+
+    # region many_to_one relationships
 
     @declared_attr
-    def node_type_fk(cls):
-        """For Substitution many-to-one to Type"""
-        return relationship.foreign_key('type')
+    def service(cls):
+        return relationship.one_to_one(cls, 'service', back_populates=relationship.NO_BACK_POP)
 
     @declared_attr
-    def substitution_template_fk(cls):
-        """For Substitution many-to-one to SubstitutionTemplate"""
-        return relationship.foreign_key('substitution_template', nullable=True)
+    def substitution_template(cls):
+        return relationship.many_to_one(cls, 'substitution_template')
+
+    @declared_attr
+    def node_type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
 
     # endregion
 
@@ -907,7 +1017,6 @@ class SubstitutionMappingBase(InstanceModelMixin):
     :vartype capability: :class:`Capability`
     :ivar requirement_template: Requirement template in the node template
     :vartype requirement_template: :class:`RequirementTemplate`
-
     :ivar substitution: Containing substitution
     :vartype substitution: :class:`Substitution`
     """
@@ -919,18 +1028,6 @@ class SubstitutionMappingBase(InstanceModelMixin):
                           'capability_fk',
                           'requirement_template_fk']
 
-    @declared_attr
-    def node(cls):
-        return relationship.one_to_one(cls, 'node')
-
-    @declared_attr
-    def capability(cls):
-        return relationship.one_to_one(cls, 'capability')
-
-    @declared_attr
-    def requirement_template(cls):
-        return relationship.one_to_one(cls, 'requirement_template')
-
     # region foreign keys
 
     @declared_attr
@@ -955,6 +1052,43 @@ class SubstitutionMappingBase(InstanceModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def substitution(cls):
+        return relationship.many_to_one(cls, 'substitution', back_populates='mappings')
+
+    @declared_attr
+    def node(cls):
+        return relationship.one_to_one(cls, 'node', back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def capability(cls):
+        return relationship.one_to_one(cls, 'capability', back_populates=relationship.NO_BACK_POP)
+
+    @declared_attr
+    def requirement_template(cls):
+        return relationship.one_to_one(
+            cls, 'requirement_template', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+
+    # endregion
+
+    # region many_to_many relationships
+
+    # endregion
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1002,12 +1136,10 @@ class RelationshipBase(InstanceModelMixin):
     :vartype properties: {basestring: :class:`Parameter`}
     :ivar interfaces: Bundles of operations
     :vartype interfaces: {basestring: :class:`Interfaces`}
-
     :ivar source_position: ??
     :vartype source_position: int
     :ivar target_position: ??
     :vartype target_position: int
-
     :ivar source_node: Source node
     :vartype source_node: :class:`Node`
     :ivar target_node: Target node
@@ -1027,37 +1159,6 @@ class RelationshipBase(InstanceModelMixin):
                           'source_node_name',
                           'target_node_name']
 
-    @declared_attr
-    def relationship_template(cls):
-        return relationship.many_to_one(cls, 'relationship_template')
-
-    @declared_attr
-    def requirement_template(cls):
-        return relationship.many_to_one(cls, 'requirement_template')
-
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    @declared_attr
-    def target_capability(cls):
-        return relationship.one_to_one(cls, 'capability')
-
-    @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
-
-    @declared_attr
-    def interfaces(cls):
-        return relationship.one_to_many(cls, 'interface', dict_key='name')
-
-    # region orchestration
-
-    source_position = Column(Integer) # ???
-    target_position = Column(Integer) # ???
-
-    # endregion
-
     # region foreign keys
 
     @declared_attr
@@ -1106,6 +1207,63 @@ class RelationshipBase(InstanceModelMixin):
 
     # endregion
 
+    # region one_to_one relationships
+
+    @declared_attr
+    def target_capability(cls):
+        return relationship.one_to_one(cls, 'capability', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def tasks(cls):
+        return relationship.one_to_many(cls, 'task')
+
+    @declared_attr
+    def interfaces(cls):
+        return relationship.one_to_many(cls, 'interface', dict_key='name')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def source_node(cls):
+        return relationship.many_to_one(
+            cls, 'node', fk='source_node_fk', back_populates='outbound_relationships')
+
+    @declared_attr
+    def target_node(cls):
+        return relationship.many_to_one(
+            cls, 'node', fk='target_node_fk', back_populates='inbound_relationships')
+
+    @declared_attr
+    def relationship_template(cls):
+        return relationship.many_to_one(cls, 'relationship_template')
+
+    @declared_attr
+    def requirement_template(cls):
+        return relationship.many_to_one(cls, 'requirement_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # endregion
+
+    source_position = Column(Integer) # ???
+    target_position = Column(Integer) # ???
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1166,7 +1324,6 @@ class CapabilityBase(InstanceModelMixin):
     :vartype occurrences: int
     :ivar properties: Associated parameters
     :vartype properties: {basestring: :class:`Parameter`}
-
     :ivar node: Containing node
     :vartype node: :class:`Node`
     :ivar relationship: Available when we are the target of a relationship
@@ -1181,22 +1338,6 @@ class CapabilityBase(InstanceModelMixin):
                           'node_fk',
                           'capability_template_fk']
 
-    @declared_attr
-    def capability_template(cls):
-        return relationship.many_to_one(cls, 'capability_template')
-
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    min_occurrences = Column(Integer, default=None)
-    max_occurrences = Column(Integer, default=None)
-    occurrences = Column(Integer, default=0)
-
-    @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
-
     # region foreign_keys
 
     @declared_attr
@@ -1216,6 +1357,46 @@ class CapabilityBase(InstanceModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def node(cls):
+        return relationship.many_to_one(cls, 'node')
+
+    @declared_attr
+    def capability_template(cls):
+        return relationship.many_to_one(cls, 'capability_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # endregion
+
+    min_occurrences = Column(Integer, default=None)
+    max_occurrences = Column(Integer, default=None)
+    occurrences = Column(Integer, default=0)
+
     @property
     def has_enough_relationships(self):
         if self.min_occurrences is not None:
@@ -1274,7 +1455,6 @@ class InterfaceBase(InstanceModelMixin):
     :vartype inputs: {basestring: :class:`Parameter`}
     :ivar operations: Operations
     :vartype operations: {basestring: :class:`Operation`}
-
     :ivar node: Containing node
     :vartype node: :class:`Node`
     :ivar group: Containing group
@@ -1291,24 +1471,6 @@ class InterfaceBase(InstanceModelMixin):
                           'relationship_fk',
                           'interface_template_fk']
 
-    @declared_attr
-    def interface_template(cls):
-        return relationship.many_to_one(cls, 'interface_template')
-
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    description = Column(Text)
-
-    @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
-
-    @declared_attr
-    def operations(cls):
-        return relationship.one_to_many(cls, 'operation', dict_key='name')
-
     # region foreign_keys
 
     @declared_attr
@@ -1338,6 +1500,56 @@ class InterfaceBase(InstanceModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    @declared_attr
+    def operations(cls):
+        return relationship.one_to_many(cls, 'operation', dict_key='name')
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def node(cls):
+        return relationship.many_to_one(cls, 'node')
+
+    @declared_attr
+    def relationship(cls):
+        return relationship.many_to_one(cls, 'relationship')
+
+    @declared_attr
+    def group(cls):
+        return relationship.many_to_one(cls, 'group')
+
+    @declared_attr
+    def interface_template(cls):
+        return relationship.many_to_one(cls, 'interface_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1392,7 +1604,6 @@ class OperationBase(InstanceModelMixin):
     :vartype max_retries: int
     :ivar retry_interval: Interval between retries (in seconds)
     :vartype retry_interval: int
-
     :ivar interface: Containing interface
     :vartype interface: :class:`Interface`
     :ivar service: Containing service
@@ -1406,27 +1617,6 @@ class OperationBase(InstanceModelMixin):
                           'plugin_fk',
                           'operation_template_fk']
 
-    @declared_attr
-    def operation_template(cls):
-        return relationship.many_to_one(cls, 'operation_template')
-
-    description = Column(Text)
-
-    @declared_attr
-    def plugin_specification(cls):
-        return relationship.one_to_one(cls, 'plugin_specification')
-
-    implementation = Column(Text)
-    dependencies = Column(modeling_types.StrictList(item_cls=basestring))
-
-    @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
-
-    executor = Column(Text)
-    max_retries = Column(Integer)
-    retry_interval = Column(Integer)
-
     # region foreign_keys
 
     @declared_attr
@@ -1451,6 +1641,56 @@ class OperationBase(InstanceModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    @declared_attr
+    def plugin_specification(cls):
+        return relationship.one_to_one(
+            cls, 'plugin_specification', back_populates=relationship.NO_BACK_POP)
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+
+    @declared_attr
+    def service(cls):
+        return relationship.many_to_one(cls, 'service')
+
+    @declared_attr
+    def interface(cls):
+        return relationship.many_to_one(cls, 'interface')
+
+    @declared_attr
+    def operation_template(cls):
+        return relationship.many_to_one(cls, 'operation_template')
+
+    # endregion
+
+    # region many_to_many relationships
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    # endregion
+
+    description = Column(Text)
+    implementation = Column(Text)
+    dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+    executor = Column(Text)
+    max_retries = Column(Integer)
+    retry_interval = Column(Integer)
+
+
+
     @property
     def as_raw(self):
         return collections.OrderedDict((
@@ -1517,7 +1757,6 @@ class ArtifactBase(InstanceModelMixin):
     :vartype repository_credential: {basestring: basestring}
     :ivar properties: Associated parameters
     :vartype properties: {basestring: :class:`Parameter`}
-
     :ivar node: Containing node
     :vartype node: :class:`Node`
     """
@@ -1528,24 +1767,6 @@ class ArtifactBase(InstanceModelMixin):
                           'node_fk',
                           'artifact_template_fk']
 
-    @declared_attr
-    def artifact_template(cls):
-        return relationship.many_to_one(cls, 'artifact_template')
-
-    @declared_attr
-    def type(cls):
-        return relationship.many_to_one(cls, 'type')
-
-    description = Column(Text)
-    source_path = Column(Text)
-    target_path = Column(Text)
-    repository_url = Column(Text)
-    repository_credential = Column(modeling_types.StrictDict(basestring, basestring))
-
-    @declared_attr
-    def properties(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
-
     # region foreign_keys
 
     @declared_attr
@@ -1565,6 +1786,44 @@ class ArtifactBase(InstanceModelMixin):
 
     # endregion
 
+    # region association proxies
+
+    # endregion
+
+    # region one_to_one relationships
+
+    # endregion
+
+    # region one_to_many relationships
+
+    # endregion
+
+    # region many_to_one relationships
+    @declared_attr
+    def node(cls):
+        return relationship.many_to_one(cls, 'node')
+
+    @declared_attr
+    def artifact_template(cls):
+        return relationship.many_to_one(cls, 'artifact_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type', back_populates=relationship.NO_BACK_POP)
+    # endregion
+
+    # region many_to_many relationships
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+    # endregion
+
+    description = Column(Text)
+    source_path = Column(Text)
+    target_path = Column(Text)
+    repository_url = Column(Text)
+    repository_credential = Column(modeling_types.StrictDict(basestring, basestring))
+
     @property
     def as_raw(self):
         return collections.OrderedDict((


[6/9] incubator-ariatosca git commit: ARIA-125-Filtering-returns-the-wrong-models

Posted by ra...@apache.org.
ARIA-125-Filtering-returns-the-wrong-models


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/2d834753
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/2d834753
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/2d834753

Branch: refs/heads/ARIA-48-aria-cli
Commit: 2d834753a03564d1cd1f413268b5d769d3144845
Parents: 16ae46a
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun Apr 2 14:53:46 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Sun Apr 2 15:35:13 2017 +0300

----------------------------------------------------------------------
 aria/storage/sql_mapi.py             |  50 +++--------
 tests/mock/models.py                 |  13 +--
 tests/mock/topology.py               |  15 ++--
 tests/modeling/test_model_storage.py | 102 ----------------------
 tests/modeling/test_models.py        |  12 +--
 tests/storage/test_model_storage.py  | 139 ++++++++++++++++++++++++++++++
 6 files changed, 169 insertions(+), 162 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d834753/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
index 8bbec54..59e1896 100644
--- a/aria/storage/sql_mapi.py
+++ b/aria/storage/sql_mapi.py
@@ -187,55 +187,31 @@ class SQLAlchemyModelAPI(api.ModelAPI):
             # If all columns should be returned, query directly from the model
             query = self._session.query(self.model_cls)
 
-        if not self._skip_joining(joins, include):
-            for join_table in joins:
-                query = query.join(join_table)
-
+        query = query.join(*joins)
         return query
 
     @staticmethod
     def _get_joins(model_class, columns):
         """Get a list of all the tables on which we need to join
 
-        :param columns: A set of all columns involved in the query
+        :param columns: A set of all attributes involved in the query
         """
-        joins = []  # Using a list instead of a set because order is important
+
+        # Using a list instead of a set because order is important
+        joins = OrderedDict()
         for column_name in columns:
             column = getattr(model_class, column_name)
             while not column.is_attribute:
+                join_attr = column.local_attr
+                # This is a hack, to deal with the fact that SQLA doesn't
+                # fully support doing something like: `if join_attr in joins`,
+                # because some SQLA elements have their own comparators
+                join_attr_name = str(join_attr)
+                if join_attr_name not in joins:
+                    joins[join_attr_name] = join_attr
                 column = column.remote_attr
-                if column.is_attribute:
-                    join_class = column.class_
-                else:
-                    join_class = column.local_attr.class_
-
-                # Don't add the same class more than once
-                if join_class not in joins:
-                    joins.append(join_class)
-        return joins
-
-    @staticmethod
-    def _skip_joining(joins, include):
-        """Dealing with an edge case where the only included column comes from
-        an other table. In this case, we mustn't join on the same table again
 
-        :param joins: A list of tables on which we're trying to join
-        :param include: The list of
-        :return: True if we need to skip joining
-        """
-        if not joins:
-            return True
-        join_table_names = [t.__tablename__ for t in joins]
-
-        if len(include) != 1:
-            return False
-
-        column = include[0]
-        if column.is_clause_element:
-            table_name = column.element.table.name
-        else:
-            table_name = column.class_.__tablename__
-        return table_name in join_table_names
+        return joins.values()
 
     @staticmethod
     def _sort_query(query, sort=None):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d834753/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 457e7cb..1d29e2d 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -50,10 +50,10 @@ DEPENDENT_NODE_TEMPLATE_NAME = 'dependent_node_template'
 DEPENDENT_NODE_NAME = 'dependent_node'
 
 
-def create_service_template():
+def create_service_template(name=SERVICE_TEMPLATE_NAME):
     now = datetime.now()
     return models.ServiceTemplate(
-        name=SERVICE_TEMPLATE_NAME,
+        name=name,
         description=None,
         created_at=now,
         updated_at=now,
@@ -68,10 +68,10 @@ def create_service_template():
     )
 
 
-def create_service(service_template):
+def create_service(service_template, name=SERVICE_NAME):
     now = datetime.utcnow()
     return models.Service(
-        name=SERVICE_NAME,
+        name=name,
         service_template=service_template,
         description='',
         created_at=now,
@@ -81,7 +81,7 @@ def create_service(service_template):
     )
 
 
-def create_dependency_node_template(name, service_template):
+def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPLATE_NAME):
     node_type = service_template.node_types.get_descendant('test_node_type')
     capability_type = service_template.capability_types.get_descendant('test_capability_type')
 
@@ -103,7 +103,8 @@ def create_dependency_node_template(name, service_template):
     return node_template
 
 
-def create_dependent_node_template(name, service_template, dependency_node_template):
+def create_dependent_node_template(
+        service_template, dependency_node_template, name=DEPENDENT_NODE_TEMPLATE_NAME):
     the_type = service_template.node_types.get_descendant('test_node_type')
 
     requirement_template = models.RequirementTemplate(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d834753/tests/mock/topology.py
----------------------------------------------------------------------
diff --git a/tests/mock/topology.py b/tests/mock/topology.py
index edf6b7d..e5b4e01 100644
--- a/tests/mock/topology.py
+++ b/tests/mock/topology.py
@@ -22,8 +22,7 @@ def create_simple_topology_single_node(model_storage, create_operation):
     service_template = models.create_service_template()
     service = models.create_service(service_template)
 
-    node_template = models.create_dependency_node_template(
-        models.DEPENDENCY_NODE_TEMPLATE_NAME, service_template)
+    node_template = models.create_dependency_node_template(service_template)
     interface_template = models.create_interface_template(
         service_template,
         'Standard', 'create',
@@ -55,10 +54,9 @@ def create_simple_topology_two_nodes(model_storage):
 
     # Creating a simple service with node -> node as a graph
 
-    dependency_node_template = models.create_dependency_node_template(
-        models.DEPENDENCY_NODE_TEMPLATE_NAME, service_template)
-    dependent_node_template = models.create_dependent_node_template(
-        models.DEPENDENT_NODE_TEMPLATE_NAME, service_template, dependency_node_template)
+    dependency_node_template = models.create_dependency_node_template(service_template)
+    dependent_node_template = models.create_dependent_node_template(service_template,
+                                                                    dependency_node_template)
 
     dependency_node = models.create_node(
         models.DEPENDENCY_NODE_NAME, dependency_node_template, service)
@@ -87,9 +85,8 @@ def create_simple_topology_three_nodes(model_storage):
     service_id = create_simple_topology_two_nodes(model_storage)
     service = model_storage.service.get(service_id)
     third_node_template = models.create_dependency_node_template(
-        'another_dependency_node_template', service.service_template)
-    third_node = models.create_node(
-        'another_dependency_node', third_node_template, service)
+        service.service_template, name='another_dependency_node_template')
+    third_node = models.create_node('another_dependency_node', third_node_template, service)
     new_relationship = models.create_relationship(
         source=model_storage.node.get_by_name(models.DEPENDENT_NODE_NAME),
         target=third_node,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d834753/tests/modeling/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_model_storage.py b/tests/modeling/test_model_storage.py
deleted file mode 100644
index bb778d4..0000000
--- a/tests/modeling/test_model_storage.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from aria.storage import (
-    ModelStorage,
-    exceptions,
-    sql_mapi
-)
-from aria import (application_model_storage, modeling)
-from ..storage import (release_sqlite_storage, init_inmemory_model_storage)
-
-from . import MockModel
-
-
-@pytest.fixture
-def storage():
-    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
-                                initiator=init_inmemory_model_storage)
-    base_storage.register(MockModel)
-    yield base_storage
-    release_sqlite_storage(base_storage)
-
-
-@pytest.fixture(scope='module', autouse=True)
-def module_cleanup():
-    modeling.models.aria_declarative_base.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
-
-
-def test_storage_base(storage):
-    with pytest.raises(AttributeError):
-        storage.non_existent_attribute()
-
-
-def test_model_storage(storage):
-    mock_model = MockModel(value=0, name='model_name')
-    storage.mock_model.put(mock_model)
-
-    assert storage.mock_model.get_by_name('model_name') == mock_model
-
-    assert [mm_from_storage for mm_from_storage in storage.mock_model.iter()] == [mock_model]
-    assert [mm_from_storage for mm_from_storage in storage.mock_model] == [mock_model]
-
-    storage.mock_model.delete(mock_model)
-    with pytest.raises(exceptions.StorageError):
-        storage.mock_model.get(mock_model.id)
-
-
-def test_application_storage_factory():
-    storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                        initiator=init_inmemory_model_storage)
-
-    assert storage.service_template
-    assert storage.node_template
-    assert storage.group_template
-    assert storage.policy_template
-    assert storage.substitution_template
-    assert storage.substitution_template_mapping
-    assert storage.requirement_template
-    assert storage.relationship_template
-    assert storage.capability_template
-    assert storage.interface_template
-    assert storage.operation_template
-    assert storage.artifact_template
-
-    assert storage.service
-    assert storage.node
-    assert storage.group
-    assert storage.policy
-    assert storage.substitution
-    assert storage.substitution_mapping
-    assert storage.relationship
-    assert storage.capability
-    assert storage.interface
-    assert storage.operation
-    assert storage.artifact
-
-    assert storage.execution
-    assert storage.service_update
-    assert storage.service_update_step
-    assert storage.service_modification
-    assert storage.plugin
-    assert storage.task
-
-    assert storage.parameter
-    assert storage.type
-    assert storage.metadata
-
-    release_sqlite_storage(storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d834753/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 35ae09f..8cd00f8 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -89,10 +89,8 @@ def _service_update_storage():
 def _node_template_storage():
     storage = _service_storage()
     service_template = storage.service_template.list()[0]
-    dependency_node_template = mock.models.create_dependency_node_template(
-        mock.models.DEPENDENCY_NODE_TEMPLATE_NAME, service_template)
-    mock.models.create_dependent_node_template(
-        mock.models.DEPENDENCY_NODE_NAME, service_template, dependency_node_template)
+    dependency_node_template = mock.models.create_dependency_node_template(service_template)
+    mock.models.create_dependent_node_template(service_template, dependency_node_template)
     storage.service_template.update(service_template)
     return storage
 
@@ -104,10 +102,8 @@ def _nodes_storage():
         mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
     mock.models.create_node(mock.models.DEPENDENCY_NODE_NAME, dependency_node_template, service)
 
-    dependent_node_template = \
-        mock.models.create_dependent_node_template(mock.models.DEPENDENT_NODE_TEMPLATE_NAME,
-                                                   service.service_template,
-                                                   dependency_node_template)
+    dependent_node_template = mock.models.create_dependent_node_template(service.service_template,
+                                                                         dependency_node_template)
 
     mock.models.create_node(mock.models.DEPENDENT_NODE_NAME, dependent_node_template, service)
     storage.service.update(service)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/2d834753/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
new file mode 100644
index 0000000..fa57e6f
--- /dev/null
+++ b/tests/storage/test_model_storage.py
@@ -0,0 +1,139 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from aria import (
+    application_model_storage,
+    modeling
+)
+from aria.storage import (
+    ModelStorage,
+    exceptions,
+    sql_mapi,
+)
+
+from tests import (
+    mock,
+    storage as tests_storage,
+    modeling as tests_modeling
+)
+
+
+@pytest.fixture
+def storage():
+    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
+                                initiator=tests_storage.init_inmemory_model_storage)
+    base_storage.register(tests_modeling.MockModel)
+    yield base_storage
+    tests_storage.release_sqlite_storage(base_storage)
+
+
+@pytest.fixture(scope='module', autouse=True)
+def module_cleanup():
+    modeling.models.aria_declarative_base.metadata.remove(tests_modeling.MockModel.__table__)  #pylint: disable=no-member
+
+
+def test_storage_base(storage):
+    with pytest.raises(AttributeError):
+        storage.non_existent_attribute()
+
+
+def test_model_storage(storage):
+    mock_model = tests_modeling.MockModel(value=0, name='model_name')
+    storage.mock_model.put(mock_model)
+
+    assert storage.mock_model.get_by_name('model_name') == mock_model
+
+    assert [mm_from_storage for mm_from_storage in storage.mock_model.iter()] == [mock_model]
+    assert [mm_from_storage for mm_from_storage in storage.mock_model] == [mock_model]
+
+    storage.mock_model.delete(mock_model)
+    with pytest.raises(exceptions.StorageError):
+        storage.mock_model.get(mock_model.id)
+
+
+def test_application_storage_factory():
+    storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                        initiator=tests_storage.init_inmemory_model_storage)
+
+    assert storage.service_template
+    assert storage.node_template
+    assert storage.group_template
+    assert storage.policy_template
+    assert storage.substitution_template
+    assert storage.substitution_template_mapping
+    assert storage.requirement_template
+    assert storage.relationship_template
+    assert storage.capability_template
+    assert storage.interface_template
+    assert storage.operation_template
+    assert storage.artifact_template
+
+    assert storage.service
+    assert storage.node
+    assert storage.group
+    assert storage.policy
+    assert storage.substitution
+    assert storage.substitution_mapping
+    assert storage.relationship
+    assert storage.capability
+    assert storage.interface
+    assert storage.operation
+    assert storage.artifact
+
+    assert storage.execution
+    assert storage.service_update
+    assert storage.service_update_step
+    assert storage.service_modification
+    assert storage.plugin
+    assert storage.task
+
+    assert storage.parameter
+    assert storage.type
+    assert storage.metadata
+
+    tests_storage.release_sqlite_storage(storage)
+
+
+@pytest.fixture
+def context(tmpdir):
+    result = mock.context.simple(str(tmpdir))
+    yield result
+    tests_storage.release_sqlite_storage(result.model)
+
+
+def test_mapi_include(context):
+    service1 = context.model.service.list()[0]
+    service1.name = 'service1'
+    service1.service_template.name = 'service_template1'
+    context.model.service.update(service1)
+
+    service_template2 = mock.models.create_service_template('service_template2')
+    service2 = mock.models.create_service(service_template2, 'service2')
+    context.model.service.put(service2)
+
+    assert service1 != service2
+    assert service1.service_template != service2.service_template
+
+    def assert_include(service):
+        st_name = context.model.service.get(service.id, include=('service_template_name',))
+        st_name_list = context.model.service.list(filters={'id': service.id},
+                                                  include=('service_template_name', ))
+        assert len(st_name) == len(st_name_list) == 1
+        assert st_name[0] == st_name_list[0][0] == service.service_template.name
+
+    assert_include(service1)
+    assert_include(service2)


[9/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
ARIA-48 cli


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/3f30fa35
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/3f30fa35
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/3f30fa35

Branch: refs/heads/ARIA-48-aria-cli
Commit: 3f30fa35daa54577c529be8277cae658a29e616e
Parents: 2d83475
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Mar 28 12:17:46 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Sun Apr 2 16:04:37 2017 +0300

----------------------------------------------------------------------
 aria/__init__.py                                |   2 +-
 aria/cli/VERSION                                |   3 +
 aria/cli/args_parser.py                         | 269 ---------
 aria/cli/cli.py                                 | 113 ----
 aria/cli/cli/__init__.py                        |  14 +
 aria/cli/cli/aria.py                            | 453 +++++++++++++++
 aria/cli/cli/helptexts.py                       |  56 ++
 aria/cli/colorful_event.py                      | 152 ++++++
 aria/cli/commands.py                            | 546 -------------------
 aria/cli/commands/__init__.py                   |  14 +
 aria/cli/commands/executions.py                 | 172 ++++++
 aria/cli/commands/logs.py                       |  68 +++
 aria/cli/commands/node_templates.py             | 104 ++++
 aria/cli/commands/nodes.py                      |  96 ++++
 aria/cli/commands/plugins.py                    | 145 +++++
 aria/cli/commands/service_templates.py          | 207 +++++++
 aria/cli/commands/services.py                   | 175 ++++++
 aria/cli/commands/workflows.py                  | 107 ++++
 aria/cli/config.py                              |  46 --
 aria/cli/config/__init__.py                     |  14 +
 aria/cli/config/config.py                       |  70 +++
 aria/cli/config/config_template.yaml            |  12 +
 aria/cli/constants.py                           |  18 +
 aria/cli/csar.py                                |  11 +-
 aria/cli/dry.py                                 |  88 ---
 aria/cli/env.py                                 | 118 ++++
 aria/cli/exceptions.py                          |  54 +-
 aria/cli/inputs.py                              | 118 ++++
 aria/cli/logger.py                              | 113 ++++
 aria/cli/main.py                                |  73 +++
 aria/cli/service_template_utils.py              | 140 +++++
 aria/cli/storage.py                             |  95 ----
 aria/cli/table.py                               |  90 +++
 aria/cli/utils.py                               | 152 ++++++
 aria/core.py                                    | 116 ++++
 aria/exceptions.py                              |  16 +
 aria/modeling/__init__.py                       |   2 +
 aria/modeling/exceptions.py                     |  18 +
 aria/modeling/orchestration.py                  |  14 +-
 aria/modeling/service_changes.py                |  10 +-
 aria/modeling/service_instance.py               |  15 +-
 aria/modeling/service_template.py               |  16 +-
 aria/modeling/utils.py                          |  87 ++-
 aria/orchestrator/context/common.py             |  27 +-
 aria/orchestrator/context/workflow.py           |  19 +-
 .../execution_plugin/ctx_proxy/server.py        |   3 +-
 aria/orchestrator/runner.py                     | 101 ----
 aria/orchestrator/workflow_runner.py            | 147 +++++
 aria/orchestrator/workflows/api/task.py         |  41 +-
 aria/orchestrator/workflows/builtin/__init__.py |   1 +
 .../workflows/builtin/execute_operation.py      |  15 +-
 aria/orchestrator/workflows/builtin/utils.py    |  86 ++-
 aria/storage/core.py                            |   5 +
 aria/utils/archive.py                           |  63 +++
 aria/utils/exceptions.py                        |  11 +
 aria/utils/file.py                              |  13 +
 aria/utils/formatting.py                        |  43 ++
 aria/utils/threading.py                         |  24 +
 aria/utils/type.py                              |  57 ++
 .../simple_v1_0/modeling/__init__.py            |   3 +-
 requirements.in                                 |   5 +
 setup.py                                        |   2 +-
 tests/mock/context.py                           |   7 +-
 tests/mock/models.py                            |  13 +-
 tests/modeling/test_models.py                   |  10 +-
 tests/orchestrator/context/test_operation.py    |  45 +-
 .../context/test_resource_render.py             |   4 +-
 tests/orchestrator/context/test_serialize.py    |   4 +-
 tests/orchestrator/context/test_toolbelt.py     |  11 +-
 tests/orchestrator/context/test_workflow.py     |  10 +-
 .../orchestrator/execution_plugin/test_local.py |  10 +-
 tests/orchestrator/execution_plugin/test_ssh.py |   2 +-
 .../workflows/builtin/test_execute_operation.py |   3 +-
 .../orchestrator/workflows/core/test_engine.py  |  10 +-
 .../test_task_graph_into_exececution_graph.py   | 111 ----
 .../test_task_graph_into_execution_graph.py     | 111 ++++
 tests/utils/test_threading.py                   |  35 ++
 77 files changed, 3633 insertions(+), 1621 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index b9251d5..df75b1e 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -84,6 +84,6 @@ def application_resource_storage(api, api_kwargs=None, initiator=None, initiator
 
     return storage.ResourceStorage(api_cls=api,
                                    api_kwargs=api_kwargs,
-                                   items=['blueprint', 'deployment', 'plugin'],
+                                   items=['service_template', 'service', 'plugin'],
                                    initiator=initiator,
                                    initiator_kwargs=initiator_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/VERSION
----------------------------------------------------------------------
diff --git a/aria/cli/VERSION b/aria/cli/VERSION
new file mode 100644
index 0000000..6a3252e
--- /dev/null
+++ b/aria/cli/VERSION
@@ -0,0 +1,3 @@
+{
+  "version": "0.1.0"
+}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/args_parser.py
----------------------------------------------------------------------
diff --git a/aria/cli/args_parser.py b/aria/cli/args_parser.py
deleted file mode 100644
index 81ee513..0000000
--- a/aria/cli/args_parser.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Argument parsing configuration and functions
-"""
-
-import argparse
-from functools import partial
-
-from ..utils.argparse import ArgumentParser
-
-NO_VERBOSE = 0
-
-
-class SmartFormatter(argparse.HelpFormatter):
-    """
-    TODO: what is this?
-    """
-    def _split_lines(self, text, width):
-        if text.startswith('R|'):
-            return text[2:].splitlines()
-        return super(SmartFormatter, self)._split_lines(text, width)
-
-
-def sub_parser_decorator(func=None, **parser_settings):
-    """
-    Decorated for sub_parser argument definitions
-    """
-    if not func:
-        return partial(sub_parser_decorator, **parser_settings)
-
-    def _wrapper(parser):
-        sub_parser = parser.add_parser(**parser_settings)
-        sub_parser.add_argument(
-            '-v', '--verbose',
-            dest='verbosity',
-            action='count',
-            default=NO_VERBOSE,
-            help='Set verbosity level (can be passed multiple times)')
-        func(sub_parser)
-        return sub_parser
-    return _wrapper
-
-
-def config_parser(parser=None):
-    """
-    Top level argparse configuration
-    """
-    parser = parser or ArgumentParser(
-        prog='ARIA',
-        description="ARIA's Command Line Interface",
-        formatter_class=SmartFormatter)
-    parser.add_argument('-v', '--version', action='version')
-    sub_parser = parser.add_subparsers(title='Commands', dest='command')
-    add_init_parser(sub_parser)
-    add_execute_parser(sub_parser)
-    add_parse_parser(sub_parser)
-    add_workflow_parser(sub_parser)
-    add_spec_parser(sub_parser)
-    add_csar_create_parser(sub_parser)
-    add_csar_open_parser(sub_parser)
-    add_csar_validate_parser(sub_parser)
-    return parser
-
-
-@sub_parser_decorator(
-    name='parse',
-    help='Parse a blueprint',
-    formatter_class=SmartFormatter)
-def add_parse_parser(parse):
-    """
-    ``parse`` command parser configuration
-    """
-    parse.add_argument(
-        'uri',
-        help='URI or file path to service template')
-    parse.add_argument(
-        'consumer',
-        nargs='?',
-        default='validate',
-        help='"validate" (default), "presentation", "template", "types", "instance", or consumer '
-             'class name (full class path or short name)')
-    parse.add_argument(
-        '--loader-source',
-        default='aria.parser.loading.DefaultLoaderSource',
-        help='loader source class for the parser')
-    parse.add_argument(
-        '--reader-source',
-        default='aria.parser.reading.DefaultReaderSource',
-        help='reader source class for the parser')
-    parse.add_argument(
-        '--presenter-source',
-        default='aria.parser.presentation.DefaultPresenterSource',
-        help='presenter source class for the parser')
-    parse.add_argument(
-        '--presenter',
-        help='force use of this presenter class in parser')
-    parse.add_argument(
-        '--prefix', nargs='*',
-        help='prefixes for imports')
-    parse.add_flag_argument(
-        'debug',
-        help_true='print debug info',
-        help_false='don\'t print debug info')
-    parse.add_flag_argument(
-        'cached-methods',
-        help_true='enable cached methods',
-        help_false='disable cached methods',
-        default=True)
-
-
-@sub_parser_decorator(
-    name='workflow',
-    help='Run a workflow on a blueprint',
-    formatter_class=SmartFormatter)
-def add_workflow_parser(workflow):
-    """
-    ``workflow`` command parser configuration
-    """
-    workflow.add_argument(
-        'uri',
-        help='URI or file path to service template')
-    workflow.add_argument(
-        '-w', '--workflow',
-        default='install',
-        help='The workflow name')
-    workflow.add_flag_argument(
-        'dry',
-        default=True,
-        help_true='dry run',
-        help_false='wet run')
-
-
-@sub_parser_decorator(
-    name='init',
-    help='Initialize environment',
-    formatter_class=SmartFormatter)
-def add_init_parser(init):
-    """
-    ``init`` command parser configuration
-    """
-    init.add_argument(
-        '-d', '--deployment-id',
-        required=True,
-        help='A unique ID for the deployment')
-    init.add_argument(
-        '-p', '--blueprint-path',
-        dest='blueprint_path',
-        required=True,
-        help='The path to the desired blueprint')
-    init.add_argument(
-        '-i', '--inputs',
-        dest='input',
-        action='append',
-        help='R|Inputs for the local workflow creation \n'
-             '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files, \n'
-             'a JSON string or as "key1=value1;key2=value2"). \n'
-             'This argument can be used multiple times')
-    init.add_argument(
-        '-b', '--blueprint-id',
-        dest='blueprint_id',
-        required=True,
-        help='The blueprint ID'
-    )
-
-
-@sub_parser_decorator(
-    name='execute',
-    help='Execute a workflow',
-    formatter_class=SmartFormatter)
-def add_execute_parser(execute):
-    """
-    ``execute`` command parser configuration
-    """
-    execute.add_argument(
-        '-d', '--deployment-id',
-        required=True,
-        help='A unique ID for the deployment')
-    execute.add_argument(
-        '-w', '--workflow',
-        dest='workflow_id',
-        help='The workflow to execute')
-    execute.add_argument(
-        '-p', '--parameters',
-        dest='parameters',
-        action='append',
-        help='R|Parameters for the workflow execution\n'
-             '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files,\n'
-             'a JSON string or as "key1=value1;key2=value2").\n'
-             'This argument can be used multiple times.')
-    execute.add_argument(
-        '--task-retries',
-        dest='task_retries',
-        type=int,
-        help='How many times should a task be retried in case of failure')
-    execute.add_argument(
-        '--task-retry-interval',
-        dest='task_retry_interval',
-        default=1,
-        type=int,
-        help='How many seconds to wait before each task is retried')
-
-
-@sub_parser_decorator(
-    name='csar-create',
-    help='Create a CSAR file from a TOSCA service template directory',
-    formatter_class=SmartFormatter)
-def add_csar_create_parser(parse):
-    parse.add_argument(
-        'source',
-        help='Service template directory')
-    parse.add_argument(
-        'entry',
-        help='Entry definition file relative to service template directory')
-    parse.add_argument(
-        '-d', '--destination',
-        help='Output CSAR zip destination',
-        required=True)
-
-
-@sub_parser_decorator(
-    name='csar-open',
-    help='Extracts a CSAR file to a TOSCA service template directory',
-    formatter_class=SmartFormatter)
-def add_csar_open_parser(parse):
-    parse.add_argument(
-        'source',
-        help='CSAR file location')
-    parse.add_argument(
-        '-d', '--destination',
-        help='Output directory to extract the CSAR into',
-        required=True)
-
-
-@sub_parser_decorator(
-    name='csar-validate',
-    help='Validates a CSAR file',
-    formatter_class=SmartFormatter)
-def add_csar_validate_parser(parse):
-    parse.add_argument(
-        'source',
-        help='CSAR file location')
-
-
-@sub_parser_decorator(
-    name='spec',
-    help='Specification tool',
-    formatter_class=SmartFormatter)
-def add_spec_parser(spec):
-    """
-    ``spec`` command parser configuration
-    """
-    spec.add_argument(
-        '--csv',
-        action='store_true',
-        help='output as CSV')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/cli.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli.py b/aria/cli/cli.py
deleted file mode 100644
index 8d014b3..0000000
--- a/aria/cli/cli.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI Entry point
-"""
-
-import os
-import logging
-import tempfile
-
-from .. import install_aria_extensions
-from ..logger import (
-    create_logger,
-    create_console_log_handler,
-    create_file_log_handler,
-    LoggerMixin,
-)
-from ..utils.exceptions import print_exception
-from .args_parser import config_parser
-from .commands import (
-    ParseCommand,
-    WorkflowCommand,
-    InitCommand,
-    ExecuteCommand,
-    CSARCreateCommand,
-    CSAROpenCommand,
-    CSARValidateCommand,
-    SpecCommand,
-)
-
-__version__ = '0.1.0'
-
-
-class AriaCli(LoggerMixin):
-    """
-    Context manager based class that enables proper top level error handling
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(AriaCli, self).__init__(*args, **kwargs)
-        self.commands = {
-            'parse': ParseCommand.with_logger(base_logger=self.logger),
-            'workflow': WorkflowCommand.with_logger(base_logger=self.logger),
-            'init': InitCommand.with_logger(base_logger=self.logger),
-            'execute': ExecuteCommand.with_logger(base_logger=self.logger),
-            'csar-create': CSARCreateCommand.with_logger(base_logger=self.logger),
-            'csar-open': CSAROpenCommand.with_logger(base_logger=self.logger),
-            'csar-validate': CSARValidateCommand.with_logger(base_logger=self.logger),
-            'spec': SpecCommand.with_logger(base_logger=self.logger),
-        }
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        """
-        Here we will handle errors
-        :param exc_type:
-        :param exc_val:
-        :param exc_tb:
-        :return:
-        """
-        # todo: error handling
-        # todo: cleanup if needed
-        # TODO: user message if needed
-        pass
-
-    def run(self):
-        """
-        Parses user arguments and run the appropriate command
-        """
-        parser = config_parser()
-        args, unknown_args = parser.parse_known_args()
-
-        command_handler = self.commands[args.command]
-        self.logger.debug('Running command: {args.command} handler: {0}'.format(
-            command_handler, args=args))
-        try:
-            command_handler(args, unknown_args)
-        except Exception as e:
-            print_exception(e)
-
-
-def main():
-    """
-    CLI entry point
-    """
-    install_aria_extensions()
-    create_logger(
-        handlers=[
-            create_console_log_handler(),
-            create_file_log_handler(file_path=os.path.join(tempfile.gettempdir(), 'aria_cli.log')),
-        ],
-        level=logging.INFO)
-    with AriaCli() as aria:
-        aria.run()
-
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/cli/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/__init__.py b/aria/cli/cli/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/cli/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/cli/aria.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/aria.py b/aria/cli/cli/aria.py
new file mode 100644
index 0000000..baa72eb
--- /dev/null
+++ b/aria/cli/cli/aria.py
@@ -0,0 +1,453 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import difflib
+import StringIO
+import traceback
+from functools import wraps
+
+import click
+
+from ..env import env, logger
+from ..cli import helptexts
+from ..inputs import inputs_to_dict
+from ..constants import SAMPLE_SERVICE_TEMPLATE_FILENAME
+from ...utils.exceptions import get_exception_as_string
+
+
+CLICK_CONTEXT_SETTINGS = dict(
+    help_option_names=['-h', '--help'],
+    token_normalize_func=lambda param: param.lower())
+
+
+class MutuallyExclusiveOption(click.Option):
+    """Makes options mutually exclusive. The option must pass a `cls` argument
+    with this class name and a `mutually_exclusive` argument with a list of
+    argument names it is mutually exclusive with.
+
+    NOTE: All mutually exclusive options must use this. It's not enough to
+    use it in just one of the options.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', []))
+        self.mutuality_error_message = \
+            kwargs.pop('mutuality_error_message',
+                       helptexts.DEFAULT_MUTUALITY_MESSAGE)
+        self.mutuality_string = ', '.join(self.mutually_exclusive)
+        if self.mutually_exclusive:
+            help = kwargs.get('help', '')
+            kwargs['help'] = (
+                '{0}. This argument is mutually exclusive with '
+                'arguments: [{1}] ({2})'.format(
+                    help,
+                    self.mutuality_string,
+                    self.mutuality_error_message))
+        super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
+
+    def handle_parse_result(self, ctx, opts, args):
+        if self.mutually_exclusive.intersection(opts) and self.name in opts:
+            raise click.UsageError(
+                'Illegal usage: `{0}` is mutually exclusive with '
+                'arguments: [{1}] ({2}).'.format(
+                    self.name,
+                    self.mutuality_string,
+                    self.mutuality_error_message))
+        return super(MutuallyExclusiveOption, self).handle_parse_result(
+            ctx, opts, args)
+
+
+def _format_version_data(version_data,
+                         prefix=None,
+                         suffix=None,
+                         infix=None):
+    all_data = version_data.copy()
+    all_data['prefix'] = prefix or ''
+    all_data['suffix'] = suffix or ''
+    all_data['infix'] = infix or ''
+    output = StringIO.StringIO()
+    output.write('{prefix}{version}'.format(**all_data))
+    output.write('{suffix}'.format(**all_data))
+    return output.getvalue()
+
+
+def show_version(ctx, param, value):
+    if not value:
+        return
+
+    cli_version_data = env.get_version_data()
+    cli_version = _format_version_data(
+        cli_version_data,
+        prefix='ARIA CLI ',
+        infix=' ' * 5,
+        suffix='')
+
+    logger.info(cli_version)
+    ctx.exit()
+
+
+def inputs_callback(ctx, param, value):
+    """Allow to pass any inputs we provide to a command as
+    processed inputs instead of having to call `inputs_to_dict`
+    inside the command.
+
+    `@aria.options.inputs` already calls this callback so that
+    every time you use the option it returns the inputs as a
+    dictionary.
+    """
+    if not value:
+        return {}
+
+    return inputs_to_dict(value)
+
+
+def set_verbosity_level(ctx, param, value):
+    if not value:
+        return
+
+    env.logging.verbosity_level = value
+
+
+def set_cli_except_hook():
+
+    def recommend(possible_solutions):
+        logger.info('Possible solutions:')
+        for solution in possible_solutions:
+            logger.info('  - {0}'.format(solution))
+
+    def new_excepthook(tpe, value, tb):
+        if env.logging.is_high_verbose_level():
+            # log error including traceback
+            logger.error(get_exception_as_string(tpe, value, tb))
+        else:
+            # write the full error to the log file
+            with open(env.logging.log_file, 'a') as log_file:
+                traceback.print_exception(
+                    etype=tpe,
+                    value=value,
+                    tb=tb,
+                    file=log_file)
+            # print only the error message
+            print value
+
+        if hasattr(value, 'possible_solutions'):
+            recommend(getattr(value, 'possible_solutions'))
+
+    sys.excepthook = new_excepthook
+
+
+def pass_logger(func):
+    """Simply passes the logger to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(logger=logger, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_plugin_manager(func):
+    """Simply passes the plugin manager to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(plugin_manager=env.plugin_manager, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_model_storage(func):
+    """Simply passes the model storage to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(model_storage=env.model_storage, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_resource_storage(func):
+    """Simply passes the resource storage to a command.
+    """
+    # Wraps here makes sure the original docstring propagates to click
+    @wraps(func)
+    def wrapper(*args, **kwargs):
+        return func(resource_storage=env.resource_storage, *args, **kwargs)
+
+    return wrapper
+
+
+def pass_context(func):
+    """Make click context ARIA specific
+
+    This exists purely for aesthetic reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.pass_context(func)
+
+
+class AliasedGroup(click.Group):
+    def __init__(self, *args, **kwargs):
+        self.max_suggestions = kwargs.pop("max_suggestions", 3)
+        self.cutoff = kwargs.pop("cutoff", 0.5)
+        super(AliasedGroup, self).__init__(*args, **kwargs)
+
+    def get_command(self, ctx, cmd_name):
+        rv = click.Group.get_command(self, ctx, cmd_name)
+        if rv is not None:
+            return rv
+        matches = \
+            [x for x in self.list_commands(ctx) if x.startswith(cmd_name)]
+        if not matches:
+            return None
+        elif len(matches) == 1:
+            return click.Group.get_command(self, ctx, matches[0])
+        ctx.fail('Too many matches: {0}'.format(', '.join(sorted(matches))))
+
+    def resolve_command(self, ctx, args):
+        """Override clicks ``resolve_command`` method
+        and appends *Did you mean ...* suggestions
+        to the raised exception message.
+        """
+        try:
+            return super(AliasedGroup, self).resolve_command(ctx, args)
+        except click.exceptions.UsageError as error:
+            error_msg = str(error)
+            original_cmd_name = click.utils.make_str(args[0])
+            matches = difflib.get_close_matches(
+                original_cmd_name,
+                self.list_commands(ctx),
+                self.max_suggestions,
+                self.cutoff)
+            if matches:
+                error_msg += '\n\nDid you mean one of these?\n    {0}'.format(
+                    '\n    '.join(matches))
+            raise click.exceptions.UsageError(error_msg, error.ctx)
+
+
+def group(name):
+    """Allow to create a group with a default click context
+    and a cls for click's `didyoueamn` without having to repeat
+    it for every group.
+    """
+    return click.group(
+        name=name,
+        context_settings=CLICK_CONTEXT_SETTINGS,
+        cls=AliasedGroup)
+
+
+def command(*args, **kwargs):
+    """Make Click commands ARIA specific
+
+    This exists purely for aesthetical reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.command(*args, **kwargs)
+
+
+def argument(*args, **kwargs):
+    """Make Click arguments ARIA specific
+
+    This exists purely for aesthetic reasons, otherwise
+    Some decorators are called `@click.something` instead of
+    `@aria.something`
+    """
+    return click.argument(*args, **kwargs)
+
+
+class Options(object):
+    def __init__(self):
+        """The options api is nicer when you use each option by calling
+        `@aria.options.some_option` instead of `@aria.some_option`.
+
+        Note that some options are attributes and some are static methods.
+        The reason for that is that we want to be explicit regarding how
+        a developer sees an option. It it can receive arguments, it's a
+        method - if not, it's an attribute.
+        """
+        self.version = click.option(
+            '--version',
+            is_flag=True,
+            callback=show_version,
+            expose_value=False,
+            is_eager=True,
+            help=helptexts.VERSION)
+
+        self.inputs = click.option(
+            '-i',
+            '--inputs',
+            multiple=True,
+            callback=inputs_callback,
+            help=helptexts.INPUTS)
+
+        self.json_output = click.option(
+            '--json-output',
+            is_flag=True,
+            help=helptexts.JSON_OUTPUT)
+
+        self.init_hard_reset = click.option(
+            '--hard',
+            is_flag=True,
+            help=helptexts.HARD_RESET)
+
+        self.reset_context = click.option(
+            '-r',
+            '--reset-context',
+            is_flag=True,
+            help=helptexts.RESET_CONTEXT)
+
+        self.enable_colors = click.option(
+            '--enable-colors',
+            is_flag=True,
+            default=False,
+            help=helptexts.ENABLE_COLORS)
+
+        self.node_name = click.option(
+            '-n',
+            '--node-name',
+            required=False,
+            help=helptexts.NODE_NAME)
+
+        self.descending = click.option(
+            '--descending',
+            required=False,
+            is_flag=True,
+            default=False,
+            help=helptexts.DESCENDING)
+
+    @staticmethod
+    def verbose(expose_value=False):
+        return click.option(
+            '-v',
+            '--verbose',
+            count=True,
+            callback=set_verbosity_level,
+            expose_value=expose_value,
+            is_eager=True,
+            help=helptexts.VERBOSE)
+
+    @staticmethod
+    def force(help):
+        return click.option(
+            '-f',
+            '--force',
+            is_flag=True,
+            help=help)
+
+    @staticmethod
+    def service_template_filename():
+        return click.option(
+            '-n',
+            '--service-template-filename',
+            default=SAMPLE_SERVICE_TEMPLATE_FILENAME,
+            help=helptexts.SERVICE_TEMPLATE_FILENAME)
+
+    @staticmethod
+    def workflow_id(default):
+        return click.option(
+            '-w',
+            '--workflow-id',
+            default=default,
+            help=helptexts.WORKFLOW_TO_EXECUTE.format(default))
+
+    @staticmethod
+    def task_thread_pool_size(default=1):
+        return click.option(
+            '--task-thread-pool-size',
+            type=int,
+            default=default,
+            help=helptexts.TASK_THREAD_POOL_SIZE.format(default))
+
+    @staticmethod
+    def task_max_attempts(default=1):
+        return click.option(
+            '--task-max-attempts',
+            type=int,
+            default=default,
+            help=helptexts.TASK_MAX_ATTEMPTS.format(default))
+
+    @staticmethod
+    def sort_by(default='created_at'):
+        return click.option(
+            '--sort-by',
+            required=False,
+            default=default,
+            help=helptexts.SORT_BY)
+
+    @staticmethod
+    def task_retry_interval(default=1):
+        return click.option(
+            '--task-retry-interval',
+            type=int,
+            default=default,
+            help=helptexts.TASK_RETRY_INTERVAL.format(default))
+
+    @staticmethod
+    def service_id(required=False):
+        return click.option(
+            '-s',
+            '--service-id',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+    @staticmethod
+    def execution_id(required=False):
+        return click.option(
+            '-e',
+            '--execution-id',
+            required=required,
+            help=helptexts.EXECUTION_ID)
+
+    @staticmethod
+    def service_template_id(required=False):
+        return click.option(
+            '-t',
+            '--service-template-id',
+            required=required,
+            help=helptexts.SERVICE_TEMPLATE_ID)
+
+    @staticmethod
+    def service_template_path(required=False):
+        return click.option(
+            '-p',
+            '--service-template-path',
+            required=required,
+            type=click.Path(exists=True))
+
+    @staticmethod
+    def service_name(required=False):
+        return click.option(
+            '-s',
+            '--service-name',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+    @staticmethod
+    def service_template_name(required=False):
+        return click.option(
+            '-t',
+            '--service-template-name',
+            required=required,
+            help=helptexts.SERVICE_ID)
+
+
+options = Options()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/cli/helptexts.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/helptexts.py b/aria/cli/cli/helptexts.py
new file mode 100644
index 0000000..02519cb
--- /dev/null
+++ b/aria/cli/cli/helptexts.py
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+VERBOSE = \
+    "Show verbose output. You can supply this up to three times (i.e. -vvv)"
+VERSION = "Display the version and exit"
+
+INPUTS_PARAMS_USAGE = (
+    '(Can be provided as wildcard based paths '
+    '(*.yaml, /my_inputs/, etc..) to YAML files, a JSON string or as '
+    'key1=value1;key2=value2). This argument can be used multiple times'
+)
+WORKFLOW_TO_EXECUTE = "The workflow to execute [default: {0}]"
+
+SERVICE_TEMPLATE_PATH = "The path to the application's service template file"
+SERVICE_TEMPLATE_ID = "The unique identifier for the service template"
+
+RESET_CONTEXT = "Reset the working environment"
+HARD_RESET = "Hard reset the configuration, including coloring and loggers"
+ENABLE_COLORS = "Enable colors in logger (use --hard when working with" \
+                " an initialized environment) [default: False]"
+
+SERVICE_TEMPLATE_FILENAME = (
+    "The name of the archive's main service template file. "
+    "This is only relevant if uploading an archive")
+INPUTS = "Inputs for the service {0}".format(INPUTS_PARAMS_USAGE)
+PARAMETERS = "Parameters for the workflow {0}".format(INPUTS_PARAMS_USAGE)
+TASK_RETRY_INTERVAL = \
+    "How long of a minimal interval should occur between task retry attempts [default: {0}]"
+TASK_MAX_ATTEMPTS = \
+    "How many times should a task be attempted in case of failures [default: {0}]"
+
+JSON_OUTPUT = "Output events in a consumable JSON format"
+
+SERVICE_ID = "The unique identifier for the service"
+EXECUTION_ID = "The unique identifier for the execution"
+IGNORE_RUNNING_NODES = "Delete the service even if it has running nodes"
+
+NODE_NAME = "The node's name"
+
+DEFAULT_MUTUALITY_MESSAGE = 'Cannot be used simultaneously'
+
+SORT_BY = "Key for sorting the list"
+DESCENDING = "Sort list in descending order [default: False]"

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/colorful_event.py
----------------------------------------------------------------------
diff --git a/aria/cli/colorful_event.py b/aria/cli/colorful_event.py
new file mode 100644
index 0000000..53e3d02
--- /dev/null
+++ b/aria/cli/colorful_event.py
@@ -0,0 +1,152 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from contextlib import contextmanager
+
+from colorama import Fore, Style
+
+from cloudify.event import Event
+
+
+def colorful_property(prop):
+    """
+    A decorator for coloring values of the parent event type properties
+    :param prop: the property to color (should be a method returning a color)
+    :return: a property which colors the value of the parent event type's
+             property with the same name
+    """
+    def _decorator(self):
+        # getting value of property from parent event type
+        val = getattr(super(ColorfulEvent, self), prop.__name__)
+        # getting the desired color
+        color = prop(self)
+        # coloring the value
+        return self._color_message(val, color)
+    return property(_decorator)
+
+
+class ColorfulEvent(Event):
+
+    RESET_COLOR = Fore.RESET + Style.RESET_ALL
+
+    TIMESTAMP_COLOR = Fore.RESET
+    LOG_TYPE_COLOR = Fore.YELLOW
+    EVENT_TYPE_COLOR = Fore.MAGENTA
+    DEPLOYMENT_ID_COLOR = Fore.CYAN
+    OPERATION_INFO_COLOR = Fore.RESET
+    NODE_ID_COLOR = Fore.BLUE
+    SOURCE_ID_COLOR = Fore.BLUE
+    TARGET_ID_COLOR = Fore.BLUE
+    OPERATION_COLOR = Fore.YELLOW
+
+    # colors entire message part according to event type
+    _message_color_by_event_type = {
+        'workflow_started': Style.BRIGHT + Fore.GREEN,
+        'workflow_succeeded': Style.BRIGHT + Fore.GREEN,
+        'workflow_failed': Style.BRIGHT + Fore.RED,
+        'workflow_cancelled': Style.BRIGHT + Fore.YELLOW,
+
+        'sending_task': Fore.RESET,
+        'task_started': Fore.RESET,
+        'task_succeeded': Fore.GREEN,
+        'task_rescheduled': Fore.YELLOW,
+        'task_failed': Fore.RED
+    }
+
+    # colors only the log level part
+    _log_level_to_color = {
+        'INFO': Fore.CYAN,
+        'WARN': Fore.YELLOW,
+        'WARNING': Fore.YELLOW,
+        'ERROR': Fore.RED,
+        'FATAL': Fore.RED
+    }
+
+    _color_context = RESET_COLOR
+
+    @property
+    def operation_info(self):
+        color = self.OPERATION_INFO_COLOR
+
+        with self._nest_colors(color):
+            op_info = super(ColorfulEvent, self).operation_info
+
+        return self._color_message(op_info, color)
+
+    @property
+    def text(self):
+        event_type = super(ColorfulEvent, self).event_type  # might be None
+        color = self._message_color_by_event_type.get(event_type)
+
+        with self._nest_colors(color):
+            msg = super(ColorfulEvent, self).text
+
+        return self._color_message(msg, color)
+
+    @property
+    def log_level(self):
+        lvl = super(ColorfulEvent, self).log_level
+        color = self._log_level_to_color.get(lvl)
+        return self._color_message(lvl, color)
+
+    @colorful_property
+    def timestamp(self):
+        return self.TIMESTAMP_COLOR
+
+    @colorful_property
+    def printable_timestamp(self):
+        return self.TIMESTAMP_COLOR
+
+    @colorful_property
+    def event_type_indicator(self):
+        return self.LOG_TYPE_COLOR if self.is_log_message else \
+            self.EVENT_TYPE_COLOR
+
+    @colorful_property
+    def operation(self):
+        return self.OPERATION_COLOR
+
+    @colorful_property
+    def node_id(self):
+        return self.NODE_ID_COLOR
+
+    @colorful_property
+    def source_id(self):
+        return self.SOURCE_ID_COLOR
+
+    @colorful_property
+    def target_id(self):
+        return self.TARGET_ID_COLOR
+
+    @colorful_property
+    def deployment_id(self):
+        return self.DEPLOYMENT_ID_COLOR
+
+    @contextmanager
+    def _nest_colors(self, nesting_color):
+        prev_color_context = self._color_context
+        if nesting_color:
+            self._color_context = nesting_color
+        yield
+        self._color_context = prev_color_context
+
+    def _color_message(self, val, color):
+        if not val or not color:
+            return val
+
+        return "{0}{1}{2}".format(
+            color,
+            val,
+            self._color_context)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands.py b/aria/cli/commands.py
deleted file mode 100644
index ee329e7..0000000
--- a/aria/cli/commands.py
+++ /dev/null
@@ -1,546 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI various commands implementation
-"""
-
-import json
-import os
-import sys
-import csv
-import shutil
-import tempfile
-from glob import glob
-from importlib import import_module
-
-from ruamel import yaml # @UnresolvedImport
-
-from .. import extension
-from ..logger import LoggerMixin
-from ..parser import iter_specifications
-from ..parser.consumption import (
-    ConsumptionContext,
-    ConsumerChain,
-    Read,
-    Validate,
-    ServiceTemplate,
-    Types,
-    Inputs,
-    ServiceInstance
-)
-from ..parser.loading import LiteralLocation, UriLocation
-from ..utils.application import StorageManager
-from ..utils.caching import cachedmethod
-from ..utils.console import (puts, Colored, indent)
-from ..utils.imports import (import_fullname, import_modules)
-from ..utils.collections import OrderedDict
-from ..orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
-from ..orchestrator.runner import Runner
-from ..orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
-from .dry import convert_to_dry
-
-from .exceptions import (
-    AriaCliFormatInputsError,
-    AriaCliYAMLInputsError,
-    AriaCliInvalidInputsError
-)
-from . import csar
-
-
-class BaseCommand(LoggerMixin):
-    """
-    Base class for CLI commands.
-    """
-
-    def __repr__(self):
-        return 'AriaCli({cls.__name__})'.format(cls=self.__class__)
-
-    def __call__(self, args_namespace, unknown_args):
-        """
-        __call__ method is called when running command
-        :param args_namespace:
-        """
-        pass
-
-    def parse_inputs(self, inputs):
-        """
-        Returns a dictionary of inputs `resources` can be:
-        - A list of files.
-        - A single file
-        - A directory containing multiple input files
-        - A key1=value1;key2=value2 pairs string.
-        - Wildcard based string (e.g. *-inputs.yaml)
-        """
-
-        parsed_dict = {}
-
-        def _format_to_dict(input_string):
-            self.logger.info('Processing inputs source: {0}'.format(input_string))
-            try:
-                input_string = input_string.strip()
-                try:
-                    parsed_dict.update(json.loads(input_string))
-                except BaseException:
-                    parsed_dict.update((i.split('=')
-                                        for i in input_string.split(';')
-                                        if i))
-            except Exception as exc:
-                raise AriaCliFormatInputsError(str(exc), inputs=input_string)
-
-        def _handle_inputs_source(input_path):
-            self.logger.info('Processing inputs source: {0}'.format(input_path))
-            try:
-                with open(input_path) as input_file:
-                    content = yaml.safe_load(input_file)
-            except yaml.YAMLError as exc:
-                raise AriaCliYAMLInputsError(
-                    '"{0}" is not a valid YAML. {1}'.format(input_path, str(exc)))
-            if isinstance(content, dict):
-                parsed_dict.update(content)
-                return
-            if content is None:
-                return
-            raise AriaCliInvalidInputsError('Invalid inputs', inputs=input_path)
-
-        for input_string in inputs if isinstance(inputs, list) else [inputs]:
-            if os.path.isdir(input_string):
-                for input_file in os.listdir(input_string):
-                    _handle_inputs_source(os.path.join(input_string, input_file))
-                continue
-            input_files = glob(input_string)
-            if input_files:
-                for input_file in input_files:
-                    _handle_inputs_source(input_file)
-                continue
-            _format_to_dict(input_string)
-        return parsed_dict
-
-
-class ParseCommand(BaseCommand):
-    """
-    :code:`parse` command.
-    
-    Given a blueprint, emits information in human-readable, JSON, or YAML format from various phases
-    of the ARIA parser.
-    """
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(ParseCommand, self).__call__(args_namespace, unknown_args)
-
-        if args_namespace.prefix:
-            for prefix in args_namespace.prefix:
-                extension.parser.uri_loader_prefix().append(prefix)
-
-        cachedmethod.ENABLED = args_namespace.cached_methods
-
-        context = ParseCommand.create_context_from_namespace(args_namespace)
-        context.args = unknown_args
-
-        consumer = ConsumerChain(context, (Read, Validate))
-
-        consumer_class_name = args_namespace.consumer
-        dumper = None
-        if consumer_class_name == 'validate':
-            dumper = None
-        elif consumer_class_name == 'presentation':
-            dumper = consumer.consumers[0]
-        elif consumer_class_name == 'template':
-            consumer.append(ServiceTemplate)
-        elif consumer_class_name == 'types':
-            consumer.append(ServiceTemplate, Types)
-        elif consumer_class_name == 'instance':
-            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
-        else:
-            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
-            consumer.append(import_fullname(consumer_class_name))
-
-        if dumper is None:
-            # Default to last consumer
-            dumper = consumer.consumers[-1]
-
-        consumer.consume()
-
-        if not context.validation.dump_issues():
-            dumper.dump()
-            exit(1)
-
-    @staticmethod
-    def create_context_from_namespace(namespace, **kwargs):
-        args = vars(namespace).copy()
-        args.update(kwargs)
-        return ParseCommand.create_context(**args)
-
-    @staticmethod
-    def create_context(uri,
-                       loader_source,
-                       reader_source,
-                       presenter_source,
-                       presenter,
-                       debug,
-                       **kwargs):
-        context = ConsumptionContext()
-        context.loading.loader_source = import_fullname(loader_source)()
-        context.reading.reader_source = import_fullname(reader_source)()
-        context.presentation.location = UriLocation(uri) if isinstance(uri, basestring) else uri
-        context.presentation.presenter_source = import_fullname(presenter_source)()
-        context.presentation.presenter_class = import_fullname(presenter)
-        context.presentation.print_exceptions = debug
-        return context
-
-
-class WorkflowCommand(BaseCommand):
-    """
-    :code:`workflow` command.
-    """
-
-    WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(WorkflowCommand, self).__call__(args_namespace, unknown_args)
-
-        context = self._parse(args_namespace.uri)
-        workflow_fn, inputs = self._get_workflow(context, args_namespace.workflow)
-        self._dry = args_namespace.dry
-        self._run(context, args_namespace.workflow, workflow_fn, inputs)
-    
-    def _parse(self, uri):
-        # Parse
-        context = ConsumptionContext()
-        context.presentation.location = UriLocation(uri)
-        consumer = ConsumerChain(context, (Read, Validate, ServiceTemplate, Inputs,
-                                           ServiceInstance))
-        consumer.consume()
-
-        if context.validation.dump_issues():
-            exit(1)
-        
-        return context
-    
-    def _get_workflow(self, context, workflow_name):
-        if workflow_name in BUILTIN_WORKFLOWS:
-            workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.{0}'.format(
-                workflow_name))
-            inputs = {}
-        else:
-            workflow = context.modeling.instance.policies.get(workflow_name)
-            if workflow is None:
-                raise AttributeError('workflow policy does not exist: "{0}"'.format(workflow_name))
-            if workflow.type.role != 'workflow':
-                raise AttributeError('policy is not a workflow: "{0}"'.format(workflow_name))
-
-            sys.path.append(os.path.dirname(str(context.presentation.location)))
-    
-            workflow_fn = import_fullname(workflow.properties['implementation'].value)
-    
-            for k in workflow.properties:
-                if k in WORKFLOW_DECORATOR_RESERVED_ARGUMENTS:
-                    raise AttributeError('workflow policy "{0}" defines a reserved property: "{1}"'
-                                         .format(workflow_name, k))
-    
-            inputs = OrderedDict([
-                (k, v.value) for k, v in workflow.properties.iteritems()
-                if k not in WorkflowCommand.WORKFLOW_POLICY_INTERNAL_PROPERTIES
-            ])
-        
-        return workflow_fn, inputs
-    
-    def _run(self, context, workflow_name, workflow_fn, inputs):
-        # Storage
-        def _initialize_storage(model_storage):
-            if self._dry:
-                convert_to_dry(context.modeling.instance)
-            context.modeling.store(model_storage)
-
-        # Create runner
-        runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
-                        lambda: context.modeling.instance.id)
-        
-        # Run
-        runner.run()
-   
-
-class InitCommand(BaseCommand):
-    """
-    :code:`init` command.
-    
-    Broken. Currently maintained for reference.
-    """
-
-    _IN_VIRTUAL_ENV = hasattr(sys, 'real_prefix')
-
-    def __call__(self, args_namespace, unknown_args):
-        super(InitCommand, self).__call__(args_namespace, unknown_args)
-        self._workspace_setup()
-        inputs = self.parse_inputs(args_namespace.input) if args_namespace.input else None
-        plan, deployment_plan = self._parse_blueprint(args_namespace.blueprint_path, inputs)
-        self._create_storage(
-            blueprint_plan=plan,
-            blueprint_path=args_namespace.blueprint_path,
-            deployment_plan=deployment_plan,
-            blueprint_id=args_namespace.blueprint_id,
-            deployment_id=args_namespace.deployment_id,
-            main_file_name=os.path.basename(args_namespace.blueprint_path))
-        self.logger.info('Initiated {0}'.format(args_namespace.blueprint_path))
-        self.logger.info(
-            'If you make changes to the blueprint, '
-            'run `aria local init -p {0}` command again to apply them'.format(
-                args_namespace.blueprint_path))
-
-    def _workspace_setup(self):
-        try:
-            create_user_space()
-            self.logger.debug(
-                'created user space path in: {0}'.format(user_space()))
-        except IOError:
-            self.logger.debug(
-                'user space path already exist - {0}'.format(user_space()))
-        try:
-            create_local_storage()
-            self.logger.debug(
-                'created local storage path in: {0}'.format(local_storage()))
-        except IOError:
-            self.logger.debug(
-                'local storage path already exist - {0}'.format(local_storage()))
-        return local_storage()
-
-    def _parse_blueprint(self, blueprint_path, inputs=None):
-        # TODO
-        pass
-
-    @staticmethod
-    def _create_storage(
-            blueprint_path,
-            blueprint_plan,
-            deployment_plan,
-            blueprint_id,
-            deployment_id,
-            main_file_name=None):
-        resource_storage = application_resource_storage(
-            FileSystemResourceDriver(local_resource_storage()))
-        model_storage = application_model_storage(
-            FileSystemModelDriver(local_model_storage()))
-        resource_storage.setup()
-        model_storage.setup()
-        storage_manager = StorageManager(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            blueprint_path=blueprint_path,
-            blueprint_id=blueprint_id,
-            blueprint_plan=blueprint_plan,
-            deployment_id=deployment_id,
-            deployment_plan=deployment_plan
-        )
-        storage_manager.create_blueprint_storage(
-            blueprint_path,
-            main_file_name=main_file_name
-        )
-        storage_manager.create_nodes_storage()
-        storage_manager.create_deployment_storage()
-        storage_manager.create_node_instances_storage()
-
-
-class ExecuteCommand(BaseCommand):
-    """
-    :code:`execute` command.
-
-    Broken. Currently maintained for reference.
-    """
-
-    def __call__(self, args_namespace, unknown_args):
-        super(ExecuteCommand, self).__call__(args_namespace, unknown_args)
-        parameters = (self.parse_inputs(args_namespace.parameters)
-                      if args_namespace.parameters else {})
-        resource_storage = application_resource_storage(
-            FileSystemResourceDriver(local_resource_storage()))
-        model_storage = application_model_storage(
-            FileSystemModelDriver(local_model_storage()))
-        deployment = model_storage.service_instance.get(args_namespace.deployment_id)
-
-        try:
-            workflow = deployment.workflows[args_namespace.workflow_id]
-        except KeyError:
-            raise ValueError(
-                '{0} workflow does not exist. existing workflows are: {1}'.format(
-                    args_namespace.workflow_id,
-                    deployment.workflows.keys()))
-
-        workflow_parameters = self._merge_and_validate_execution_parameters(
-            workflow,
-            args_namespace.workflow_id,
-            parameters
-        )
-        workflow_context = WorkflowContext(
-            name=args_namespace.workflow_id,
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            deployment_id=args_namespace.deployment_id,
-            workflow_id=args_namespace.workflow_id,
-            parameters=workflow_parameters,
-        )
-        workflow_function = self._load_workflow_handler(workflow['operation'])
-        tasks_graph = workflow_function(workflow_context, **workflow_context.parameters)
-        executor = ProcessExecutor()
-        workflow_engine = Engine(executor=executor,
-                                 workflow_context=workflow_context,
-                                 tasks_graph=tasks_graph)
-        workflow_engine.execute()
-        executor.close()
-
-    @staticmethod
-    def _merge_and_validate_execution_parameters(
-            workflow,
-            workflow_name,
-            execution_parameters):
-        merged_parameters = {}
-        workflow_parameters = workflow.get('parameters', {})
-        missing_mandatory_parameters = set()
-
-        for name, param in workflow_parameters.iteritems():
-            if 'default' not in param:
-                if name not in execution_parameters:
-                    missing_mandatory_parameters.add(name)
-                    continue
-                merged_parameters[name] = execution_parameters[name]
-                continue
-            merged_parameters[name] = (execution_parameters[name] if name in execution_parameters
-                                       else param['default'])
-
-        if missing_mandatory_parameters:
-            raise ValueError(
-                'Workflow "{0}" must be provided with the following '
-                'parameters to execute: {1}'.format(
-                    workflow_name, ','.join(missing_mandatory_parameters)))
-
-        custom_parameters = dict(
-            (k, v) for (k, v) in execution_parameters.iteritems()
-            if k not in workflow_parameters)
-
-        if custom_parameters:
-            raise ValueError(
-                'Workflow "{0}" does not have the following parameters declared: {1}. '
-                'Remove these parameters'.format(
-                    workflow_name, ','.join(custom_parameters.keys())))
-
-        return merged_parameters
-
-    @staticmethod
-    def _load_workflow_handler(handler_path):
-        module_name, spec_handler_name = handler_path.rsplit('.', 1)
-        try:
-            module = import_module(module_name)
-            return getattr(module, spec_handler_name)
-        except ImportError:
-            # TODO: exception handler
-            raise
-        except AttributeError:
-            # TODO: exception handler
-            raise
-
-
-class BaseCSARCommand(BaseCommand):
-    @staticmethod
-    def _parse_and_dump(reader):
-        context = ConsumptionContext()
-        context.loading.prefixes += [os.path.join(reader.destination, 'definitions')]
-        context.presentation.location = LiteralLocation(reader.entry_definitions_yaml)
-        chain = ConsumerChain(context, (Read, Validate, Model, Instance))
-        chain.consume()
-        if context.validation.dump_issues():
-            raise RuntimeError('Validation failed')
-        dumper = chain.consumers[-1]
-        dumper.dump()
-
-    def _read(self, source, destination):
-        reader = csar.read(
-            source=source,
-            destination=destination,
-            logger=self.logger)
-        self.logger.info(
-            'Path: {r.destination}\n'
-            'TOSCA meta file version: {r.meta_file_version}\n'
-            'CSAR Version: {r.csar_version}\n'
-            'Created By: {r.created_by}\n'
-            'Entry definitions: {r.entry_definitions}'
-            .format(r=reader))
-        self._parse_and_dump(reader)
-
-    def _validate(self, source):
-        workdir = tempfile.mkdtemp()
-        try:
-            self._read(
-                source=source,
-                destination=workdir)
-        finally:
-            shutil.rmtree(workdir, ignore_errors=True)
-
-
-class CSARCreateCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSARCreateCommand, self).__call__(args_namespace, unknown_args)
-        csar.write(
-            source=args_namespace.source,
-            entry=args_namespace.entry,
-            destination=args_namespace.destination,
-            logger=self.logger)
-        self._validate(args_namespace.destination)
-
-
-class CSAROpenCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSAROpenCommand, self).__call__(args_namespace, unknown_args)
-        self._read(
-            source=args_namespace.source,
-            destination=args_namespace.destination)
-
-
-class CSARValidateCommand(BaseCSARCommand):
-    def __call__(self, args_namespace, unknown_args):
-        super(CSARValidateCommand, self).__call__(args_namespace, unknown_args)
-        self._validate(args_namespace.source)
-
-
-class SpecCommand(BaseCommand):
-    """
-    :code:`spec` command.
-    
-    Emits all uses of :code:`@dsl_specification` in the codebase, in human-readable or CSV format.
-    """
-    
-    def __call__(self, args_namespace, unknown_args):
-        super(SpecCommand, self).__call__(args_namespace, unknown_args)
-
-        # Make sure that all @dsl_specification decorators are processed
-        for pkg in extension.parser.specification_package():
-            import_modules(pkg)
-
-        # TODO: scan YAML documents as well
-
-        if args_namespace.csv:
-            writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
-            writer.writerow(('Specification', 'Section', 'Code', 'URL'))
-            for spec, sections in iter_specifications():
-                for section, details in sections:
-                    writer.writerow((spec, section, details['code'], details['url']))
-
-        else:
-            for spec, sections in iter_specifications():
-                puts(Colored.cyan(spec))
-                with indent(2):
-                    for section, details in sections:
-                        puts(Colored.blue(section))
-                        with indent(2):
-                            for k, v in details.iteritems():
-                                puts('%s: %s' % (Colored.magenta(k), v))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/__init__.py b/aria/cli/commands/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/commands/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
new file mode 100644
index 0000000..d767fa1
--- /dev/null
+++ b/aria/cli/commands/executions.py
@@ -0,0 +1,172 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import time
+
+from .. import utils
+from ..table import print_data
+from ..cli import aria, helptexts
+from ..exceptions import AriaCliError
+from ...modeling.models import Execution
+from ...storage.exceptions import StorageError
+from ...orchestrator.workflow_runner import WorkflowRunner
+from ...utils import formatting
+from ...utils import threading
+
+EXECUTION_COLUMNS = ['id', 'workflow_name', 'status', 'service_name',
+                     'created_at', 'error']
+
+
+@aria.group(name='executions')
+@aria.options.verbose()
+def executions():
+    """Handle workflow executions
+    """
+    pass
+
+
+@executions.command(name='show',
+                    short_help='Show execution information')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(execution_id, model_storage, logger):
+    """Show information for a specific execution
+
+    `EXECUTION_ID` is the execution to get information on.
+    """
+    try:
+        logger.info('Showing execution {0}'.format(execution_id))
+        execution = model_storage.execution.get(execution_id)
+    except StorageError:
+        raise AriaCliError('Execution {0} not found'.format(execution_id))
+
+    print_data(EXECUTION_COLUMNS, execution.to_dict(), 'Execution:', max_width=50)
+
+    # print execution parameters
+    logger.info('Execution Inputs:')
+    if execution.inputs:
+        #TODO check this section, havent tested it
+        execution_inputs = [ei.to_dict() for ei in execution.inputs]
+        for input_name, input_value in formatting.decode_dict(
+                execution_inputs).iteritems():
+            logger.info('\t{0}: \t{1}'.format(input_name, input_value))
+    else:
+        logger.info('\tNo inputs')
+    logger.info('')
+
+
+@executions.command(name='list',
+                    short_help='List service executions')
+@aria.options.service_name(required=False)
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List executions
+
+    If `SERVICE_NAME` is provided, list executions for that service.
+    Otherwise, list executions for all services.
+    """
+    if service_name:
+        logger.info('Listing executions for service {0}...'.format(
+            service_name))
+        try:
+            service = model_storage.service.get_by_name(service_name)
+            filters = dict(service=service)
+        except StorageError:
+            raise AriaCliError('Service {0} does not exist'.format(
+                service_name))
+    else:
+        logger.info('Listing all executions...')
+        filters = {}
+
+    executions = [e.to_dict() for e in model_storage.execution.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(EXECUTION_COLUMNS, executions, 'Executions:')
+
+
+@executions.command(name='start',
+                    short_help='Execute a workflow')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.inputs
+@aria.options.task_max_attempts()
+@aria.options.task_retry_interval()
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def start(workflow_name,
+          service_name,
+          inputs,
+          task_max_attempts,
+          task_retry_interval,
+          model_storage,
+          resource_storage,
+          plugin_manager,
+          logger):
+    """Execute a workflow
+
+    `WORKFLOW_ID` is the id of the workflow to execute (e.g. `uninstall`)
+    """
+    workflow_runner = \
+        WorkflowRunner(workflow_name, service_name, inputs,
+                       model_storage, resource_storage, plugin_manager,
+                       task_max_attempts, task_retry_interval)
+
+    execution_thread_name = '{0}_{1}'.format(service_name, workflow_name)
+    execution_thread = threading.ExceptionThread(target=workflow_runner.execute,
+                                                 name=execution_thread_name)
+    execution_thread.daemon = True  # allows force-cancel to exit immediately
+
+    logger.info('Starting execution. Press Ctrl+C cancel')
+    execution_thread.start()
+    try:
+        while execution_thread.is_alive():
+            # using join without a timeout blocks and ignores KeyboardInterrupt
+            execution_thread.join(1)
+    except KeyboardInterrupt:
+        _cancel_execution(workflow_runner, execution_thread, logger)
+
+    # raise any errors from the execution thread (note these are not workflow execution errors)
+    execution_thread.raise_error_if_exists()
+
+    execution = workflow_runner.execution
+    logger.info('Execution has ended with "{0}" status'.format(execution.status))
+    if execution.status == Execution.FAILED:
+        logger.info('Execution error:\n{0}'.format(execution.error))
+
+
+def _cancel_execution(workflow_runner, execution_thread, logger):
+    logger.info('Cancelling execution. Press Ctrl+C again to force-cancel')
+    try:
+        workflow_runner.cancel()
+        while execution_thread.is_alive():
+            execution_thread.join(1)
+    except KeyboardInterrupt:
+        logger.info('Force-cancelling execution')
+        # TODO handle execution (update status etc.) and exit process

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/logs.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py
new file mode 100644
index 0000000..3662063
--- /dev/null
+++ b/aria/cli/commands/logs.py
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+
+
+@aria.group(name='logs')
+@aria.options.verbose()
+def logs():
+    """Show logs from workflow executions
+    """
+    pass
+
+
+@logs.command(name='list',
+              short_help='List execution logs')
+@aria.argument('execution-id')
+@aria.options.json_output
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(execution_id,
+         json_output,
+         model_storage,
+         logger):
+    """Display logs for an execution
+    """
+    logger.info('Listing logs for execution id {0}'.format(execution_id))
+    # events_logger = get_events_logger(json_output)
+    logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
+                                  sort=utils.storage_sort_param('created_at', False))
+    # TODO: print logs nicely
+    if logs:
+        for log in logs:
+            print log
+    else:
+        logger.info('\tNo logs')
+
+
+@logs.command(name='delete',
+              short_help='Delete execution logs')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(execution_id, model_storage, logger):
+    """Delete logs of an execution
+
+    `EXECUTION_ID` is the execution logs to delete.
+    """
+    logger.info('Deleting logs for execution id {0}'.format(execution_id))
+    logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
+    for log in logs:
+        model_storage.log.delete(log)
+    logger.info('Deleted logs for execution id {0}'.format(execution_id))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
new file mode 100644
index 0000000..5614aee
--- /dev/null
+++ b/aria/cli/commands/node_templates.py
@@ -0,0 +1,104 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from .. import utils
+from ..cli import aria
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+
+NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
+
+
+@aria.group(name='node-templates')
+@aria.options.verbose()
+def node_templates():
+    """Handle a service template's node templates
+    """
+    pass
+
+
+@node_templates.command(name='show',
+                        short_help='Show node information')
+@aria.argument('node-template-id')
+# @aria.options.service_template_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_template_id, model_storage, logger):
+    """Show information for a specific node of a specific service template
+
+    `NODE_TEMPLATE_ID` is the node id to get information on.
+    """
+    # logger.info('Showing node template {0} for service template {1}'.format(
+    #     node_template_id, service_template_name))
+    logger.info('Showing node template {0}'.format(node_template_id))
+    try:
+        #TODO get node template of a specific service template instead?
+        node_template = model_storage.node_template.get(node_template_id)
+    except StorageError:
+        raise AriaCliError('Node template {0} was not found'.format(node_template_id))
+
+    print_data(NODE_TEMPLATE_COLUMNS, node_template.to_dict(), 'Node template:', max_width=50)
+
+    # print node template properties
+    logger.info('Node template properties:')
+    if node_template.properties:
+        logger.info(utils.get_parameter_templates_as_string(node_template.properties))
+    else:
+        logger.info('\tNo properties')
+
+    # print node IDs
+    nodes = node_template.nodes.all()
+    logger.info('Nodes:')
+    if nodes:
+        for node in nodes:
+            logger.info('\t{0}'.format(node.name))
+    else:
+        logger.info('\tNo nodes')
+
+
+@node_templates.command(name='list',
+                        short_help='List node templates for a service template')
+@aria.options.service_template_name()
+@aria.options.sort_by('service_template_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_name, sort_by, descending, model_storage, logger):
+    """List node templates
+
+    If `SERVICE_TEMPLATE_NAME` is provided, list nodes for that service template.
+    Otherwise, list node templates for all service templates.
+    """
+    if service_template_name:
+        logger.info('Listing node templates for service template {0}...'.format(
+            service_template_name))
+        try:
+            service_template = model_storage.service_template.get_by_name(service_template_name)
+            filters = dict(service_template=service_template)
+        except StorageError:
+            raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
+    else:
+        logger.info('Listing all node templates...')
+        filters = {}
+
+    node_templates = [nt.to_dict() for nt in model_storage.node_template.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(NODE_TEMPLATE_COLUMNS, node_templates, 'Node templates:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
new file mode 100644
index 0000000..f38c917
--- /dev/null
+++ b/aria/cli/commands/nodes.py
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+from ..table import print_data
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+
+NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
+
+
+@aria.group(name='nodes')
+@aria.options.verbose()
+def nodes():
+    """Handle a service's nodes
+    """
+    pass
+
+
+@nodes.command(name='show',
+               short_help='Show node information')
+@aria.argument('node_id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_id, model_storage, logger):
+    """Showing information for a specific node
+
+    `NODE_ID` is the id of the node to get information on.
+    """
+    logger.info('Showing node {0}'.format(node_id))
+    try:
+        node = model_storage.node.get(node_id)
+    except StorageError:
+        raise AriaCliError('Node {0} not found'.format(node_id))
+
+    print_data(NODE_COLUMNS, node.to_dict(), 'Node:', 50)
+
+    # print node attributes
+    logger.info('Node attributes:')
+    if node.runtime_properties:
+        for prop_name, prop_value in node.runtime_properties.iteritems():
+            logger.info('\t{0}: {1}'.format(prop_name, prop_value))
+    else:
+        logger.info('\tNo attributes')
+    logger.info('')
+
+
+@nodes.command(name='list',
+               short_help='List node for a service')
+@aria.options.service_name(required=False)
+@aria.options.sort_by('service_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+         sort_by,
+         descending,
+         model_storage,
+         logger):
+    """List nodes
+
+    If `SERVICE_NAME` is provided, list nodes for that service.
+    Otherwise, list nodes for all services.
+    """
+    if service_name:
+        logger.info('Listing nodes for service {0}...'.format(service_name))
+        try:
+            service = model_storage.service.get_by_name(service_name)
+            filters = dict(service=service)
+        except StorageError:
+            raise AriaCliError('Service {0} does not exist'.format(service_name))
+    else:
+        logger.info('Listing all nodes...')
+        filters = {}
+
+    nodes = [node.to_dict() for node in model_storage.node.list(
+        filters=filters,
+        sort=utils.storage_sort_param(sort_by, descending))]
+
+    print_data(NODE_COLUMNS, nodes, 'Nodes:')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/cli/commands/plugins.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/plugins.py b/aria/cli/commands/plugins.py
new file mode 100644
index 0000000..d31aa99
--- /dev/null
+++ b/aria/cli/commands/plugins.py
@@ -0,0 +1,145 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import tarfile
+
+from ..table import print_data
+from ..cli import helptexts, aria
+from ..exceptions import AriaCliError
+from ..utils import storage_sort_param
+
+
+PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'distribution',
+                  'supported_platform', 'distribution_release', 'uploaded_at']
+EXCLUDED_COLUMNS = ['archive_name', 'distribution_version', 'excluded_wheels',
+                    'package_source', 'supported_py_versions', 'wheels']
+
+
+@aria.group(name='plugins')
+@aria.options.verbose()
+def plugins():
+    """Handle plugins
+    """
+    pass
+
+
+@plugins.command(name='validate',
+                 short_help='Validate a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_logger
+def validate(plugin_path, logger):
+    """Validate a plugin
+
+    This will try to validate the plugin's archive is not corrupted.
+    A valid plugin is a wagon (http://github.com/cloudify-cosomo/wagon)
+    in the tar.gz format (suffix may also be .wgn).
+
+    `PLUGIN_PATH` is the path to wagon archive to validate.
+    """
+    logger.info('Validating plugin {0}...'.format(plugin_path))
+
+    if not tarfile.is_tarfile(plugin_path):
+        raise AriaCliError(
+            'Archive {0} is of an unsupported type. Only '
+            'tar.gz/wgn is allowed'.format(plugin_path))
+    with tarfile.open(plugin_path) as tar:
+        tar_members = tar.getmembers()
+        package_json_path = "{0}/{1}".format(
+            tar_members[0].name, 'package.json')
+        # TODO: Find a better way to validate a plugin.
+        try:
+            tar.getmember(package_json_path)
+        except KeyError:
+            raise AriaCliError(
+                'Failed to validate plugin {0} '
+                '(package.json was not found in archive)'.format(plugin_path))
+
+    logger.info('Plugin validated successfully')
+
+
+@plugins.command(name='delete',
+                 short_help='Delete a plugin')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(plugin_id, model_storage, logger):
+    """Delete a plugin
+
+    `PLUGIN_ID` is the id of the plugin to delete.
+    """
+    logger.info('Deleting plugin {0}...'.format(plugin_id))
+    model_storage.plugin.delete(plugin_id=plugin_id)
+    logger.info('Plugin deleted')
+
+
+@plugins.command(name='install',
+                 short_help='Install a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_context
+@aria.pass_logger
+def install(ctx, plugin_path, logger):
+    """Install a plugin
+
+    `PLUGIN_PATH` is the path to wagon archive to install.
+    """
+    ctx.invoke(validate, plugin_path=plugin_path)
+    logger.info('Installing plugin {0}...'.format(plugin_path))
+    plugin = plugin_manager.install(plugin_path)
+    logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id))
+
+
+@plugins.command(name='show',
+                 short_help='show plugin information')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(plugin_id, model_storage, logger):
+    """Show information for a specific plugin
+
+    `PLUGIN_ID` is the id of the plugin to show information on.
+    """
+    logger.info('Showing plugin {0}...'.format(plugin_id))
+    plugin = model_storage.plugin.get(plugin_id)
+    _transform_plugin_response(plugin)
+    print_data(PLUGIN_COLUMNS, plugin, 'Plugin:')
+
+
+@plugins.command(name='list',
+                 short_help='List plugins')
+@aria.options.sort_by('uploaded_at')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+    """List all plugins on the manager
+    """
+    logger.info('Listing all plugins...')
+    plugins_list = model_storage.plugin.list(
+        sort=storage_sort_param(sort_by, descending))
+    for plugin in plugins_list:
+        _transform_plugin_response(plugin)
+    print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:')
+
+
+def _transform_plugin_response(plugin):
+    """Remove any columns that shouldn't be displayed in the CLI
+    """
+    for column in EXCLUDED_COLUMNS:
+        plugin.pop(column, None)



[7/9] incubator-ariatosca git commit: ARIA-48 cli

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index 127641f..f631e79 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -68,17 +68,6 @@ class BaseContext(object):
         self._workdir = workdir
         self.logger = None
 
-    def _create_execution(self):
-        now = datetime.utcnow()
-        execution = self.model.execution.model_cls(
-            service_instance=self.service_instance,
-            workflow_name=self._workflow_name,
-            created_at=now,
-            parameters=self.parameters,
-        )
-        self.model.execution.put(execution)
-        return execution.id
-
     def _register_logger(self, logger_name=None, level=None, task_id=None):
         self.logger = self.PrefixedLogger(logging.getLogger(logger_name or self.__class__.__name__),
                                           self.logging_id,
@@ -168,13 +157,13 @@ class BaseContext(object):
         Download a blueprint resource from the resource storage
         """
         try:
-            self.resource.deployment.download(entry_id=str(self.service.id),
-                                              destination=destination,
-                                              path=path)
+            self.resource.service.download(entry_id=str(self.service.id),
+                                           destination=destination,
+                                           path=path)
         except exceptions.StorageError:
-            self.resource.blueprint.download(entry_id=str(self.service_template.id),
-                                             destination=destination,
-                                             path=path)
+            self.resource.service_template.download(entry_id=str(self.service_template.id),
+                                                    destination=destination,
+                                                    path=path)
 
     def download_resource_and_render(self, destination, path=None, variables=None):
         """
@@ -193,9 +182,9 @@ class BaseContext(object):
         Read a deployment resource as string from the resource storage
         """
         try:
-            return self.resource.deployment.read(entry_id=str(self.service.id), path=path)
+            return self.resource.service.read(entry_id=str(self.service.id), path=path)
         except exceptions.StorageError:
-            return self.resource.deployment.read(entry_id=str(self.service_template.id), path=path)
+            return self.resource.service_template.read(entry_id=str(self.service_template.id), path=path)
 
     def get_resource_and_render(self, path=None, variables=None):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 5f86d9d..bc9f653 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -31,11 +31,11 @@ class WorkflowContext(BaseContext):
     """
     def __init__(self,
                  workflow_name,
+                 execution_id,
                  parameters=None,
                  task_max_attempts=1,
                  task_retry_interval=0,
                  task_ignore_failure=False,
-                 execution_id=None,
                  *args, **kwargs):
         super(WorkflowContext, self).__init__(*args, **kwargs)
         self._workflow_name = workflow_name
@@ -43,28 +43,15 @@ class WorkflowContext(BaseContext):
         self._task_max_attempts = task_max_attempts
         self._task_retry_interval = task_retry_interval
         self._task_ignore_failure = task_ignore_failure
-        # TODO: execution creation should happen somewhere else
-        # should be moved there, when such logical place exists
-        self._execution_id = execution_id or self._create_execution()
+        self._execution_id = execution_id
         self._register_logger()
 
     def __repr__(self):
         return (
             '{name}(deployment_id={self._service_id}, '
-            'workflow_name={self._workflow_name}'.format(
+            'workflow_name={self._workflow_name}, execution_id={self._execution_id})'.format(
                 name=self.__class__.__name__, self=self))
 
-    def _create_execution(self):
-        now = datetime.utcnow()
-        execution = self.model.execution.model_cls(
-            service=self.service,
-            workflow_name=self._workflow_name,
-            created_at=now,
-            parameters=self.parameters,
-        )
-        self.model.execution.put(execution)
-        return execution.id
-
     @property
     def logging_id(self):
         return '{0}[{1}]'.format(self._workflow_name, self._execution_id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
index 817d064..52a5312 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
@@ -24,6 +24,7 @@ import StringIO
 import wsgiref.simple_server
 
 import bottle
+from aria import modeling
 
 from .. import exceptions
 
@@ -111,7 +112,7 @@ class CtxProxy(object):
             result = json.dumps({
                 'type': result_type,
                 'payload': payload
-            })
+            }, cls=modeling.utils.ModelJSONEncoder)
         except Exception as e:
             traceback_out = StringIO.StringIO()
             traceback.print_exc(file=traceback_out)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/runner.py b/aria/orchestrator/runner.py
deleted file mode 100644
index f1633fa..0000000
--- a/aria/orchestrator/runner.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow runner
-"""
-
-import tempfile
-import os
-
-from .context.workflow import WorkflowContext
-from .workflows.core.engine import Engine
-from .workflows.executor.thread import ThreadExecutor
-from ..storage import (
-    sql_mapi,
-    filesystem_rapi,
-)
-from .. import (
-    application_model_storage,
-    application_resource_storage
-)
-
-
-class Runner(object):
-    """
-    Runs workflows on a deployment. By default uses temporary storage (either on disk or in memory)
-    but can also be used with existing storage.
-
-    Handles the initialization of the storage engine and provides convenience methods for
-    sub-classes to create tasks.
-
-    :param path: path to Sqlite database file; use '' (the default) to use a temporary file,
-                 and None to use an in-memory database
-    :type path: string
-    """
-
-    def __init__(self, workflow_name, workflow_fn, inputs, initialize_model_storage_fn,
-                 service_id_fn, storage_path='', is_storage_temporary=True):
-        if storage_path == '':
-            # Temporary file storage
-            the_file, storage_path = tempfile.mkstemp(suffix='.db', prefix='aria-')
-            os.close(the_file)
-
-        self._storage_path = storage_path
-        self._storage_dir = os.path.dirname(storage_path)
-        self._storage_name = os.path.basename(storage_path)
-        self._is_storage_temporary = is_storage_temporary
-
-        workflow_context = self.create_workflow_context(workflow_name, initialize_model_storage_fn,
-                                                        service_id_fn)
-
-        tasks_graph = workflow_fn(ctx=workflow_context, **inputs)
-
-        self._engine = Engine(
-            executor=ThreadExecutor(),
-            workflow_context=workflow_context,
-            tasks_graph=tasks_graph)
-
-    def run(self):
-        try:
-            self._engine.execute()
-        finally:
-            self.cleanup()
-
-    def create_workflow_context(self,
-                                workflow_name,
-                                initialize_model_storage_fn,
-                                service_id_fn):
-        self.cleanup()
-        model_storage = application_model_storage(
-            sql_mapi.SQLAlchemyModelAPI,
-            initiator_kwargs=dict(base_dir=self._storage_dir, filename=self._storage_name))
-        if initialize_model_storage_fn:
-            initialize_model_storage_fn(model_storage)
-        resource_storage = application_resource_storage(
-            filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory='.'))
-        return WorkflowContext(
-            name=workflow_name,
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            service_id=service_id_fn(),
-            workflow_name=self.__class__.__name__,
-            task_max_attempts=1,
-            task_retry_interval=1)
-
-    def cleanup(self):
-        if (self._is_storage_temporary and (self._storage_path is not None) and
-                os.path.isfile(self._storage_path)):
-            os.remove(self._storage_path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
new file mode 100644
index 0000000..8b6b431
--- /dev/null
+++ b/aria/orchestrator/workflow_runner.py
@@ -0,0 +1,147 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Workflow runner
+"""
+
+import sys
+from datetime import datetime
+
+from .context.workflow import WorkflowContext
+from .workflows.builtin import BUILTIN_WORKFLOWS, BUILTIN_WORKFLOWS_PATH_PREFIX
+from .workflows.core.engine import Engine
+from .workflows.executor.process import ProcessExecutor
+from ..exceptions import AriaException
+from ..modeling import utils as modeling_utils
+from ..modeling import models
+from ..utils.imports import import_fullname
+
+
+DEFAULT_TASK_MAX_ATTEMPTS = 1
+DEFAULT_TASK_RETRY_INTERVAL = 1
+# TODO move this constant somewhere in the DSL parser?
+WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
+
+
+class WorkflowRunner(object):
+
+    def __init__(self, workflow_name, service_name, inputs,
+                 model_storage, resource_storage, plugin_manager,
+                 task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
+                 task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
+
+        self._model_storage = model_storage
+        self._workflow_name = workflow_name
+        service = model_storage.service.get_by_name(service_name)
+        # the IDs are stored rather than the models themselves, so this module could be used
+        # by several threads without raising errors on model objects shared between threads
+        self._service_id = service.id
+
+        self._validate_workflow_exists_for_service()
+
+        workflow_fn = self._get_workflow_fn()
+
+        execution = self._create_execution_models(inputs)
+        self._execution_id = execution.id
+
+        workflow_context = WorkflowContext(
+            name=self.__class__.__name__,
+            model_storage=self._model_storage,
+            resource_storage=resource_storage,
+            service_id=service.id,
+            execution_id=execution.id,
+            workflow_name=workflow_name,
+            task_max_attempts=task_max_attempts,
+            task_retry_interval=task_retry_interval)
+
+        # transforming the execution inputs to dict, to pass them to the workflow function
+        execution_inputs_dict = {input.name: input.value for input in
+                                 self.execution.inputs.values()}
+        self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
+
+        self._engine = Engine(
+            executor=ProcessExecutor(plugin_manager=plugin_manager),
+            workflow_context=workflow_context,
+            tasks_graph=self._tasks_graph)
+
+    @property
+    def execution(self):
+        return self._model_storage.execution.get(self._execution_id)
+
+    @property
+    def service(self):
+        return self._model_storage.service.get(self._service_id)
+
+    def execute(self):
+        #TODO uncomment, commented for testing purposes
+        # self._validate_no_active_executions()
+        self._engine.execute()
+
+    def cancel(self):
+        self._engine.cancel_execution()
+
+    def _create_execution_models(self, inputs):
+        execution = models.Execution(
+            created_at=datetime.utcnow(),
+            service=self.service,
+            workflow_name=self._workflow_name,
+            inputs={})
+
+        # built-in workflows don't have any inputs, and are also
+        # not a part of the service's workflows field
+        if self._workflow_name not in BUILTIN_WORKFLOWS:
+            workflow_inputs = {k: v for k, v in
+                               self.service.workflows[self._workflow_name].inputs
+                               if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES}
+
+            execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs)
+
+        self._model_storage.execution.put(execution)
+        return execution
+
+    def _validate_workflow_exists_for_service(self):
+        if self._workflow_name not in self.service.workflows and \
+                        self._workflow_name not in BUILTIN_WORKFLOWS:
+            raise AriaException('No workflow policy {0} declared in service instance {1}'
+                                .format(self._workflow_name, self.service.name))
+
+    def _validate_no_active_executions(self):
+        active_executions_filter = dict(service=self.service,
+                                        status=models.Execution.ACTIVE_STATES)
+        active_executions = self._model_storage.execution.list(filter=active_executions_filter)
+        if active_executions:
+            raise AriaException("Can't start execution; Service {0} has a running "
+                                "execution with id {1}"
+                                .format(self.service.name, active_executions[0].id))
+
+    def _get_workflow_fn(self):
+        if self._workflow_name in BUILTIN_WORKFLOWS:
+            return import_fullname('{0}.{1}'.format(BUILTIN_WORKFLOWS_PATH_PREFIX,
+                                                    self._workflow_name))
+
+        workflow = self.service.workflows[self._workflow_name]
+
+        try:
+            # TODO: perhaps pass to import_fullname as paths instead of appending to sys path?
+            # TODO: revisit; workflow.implementation to be used instead?
+            sys.path.append(workflow.properties['implementation'].value)
+            # sys.path.append(os.path.dirname(str(context.presentation.location)))
+        except KeyError:
+            # no implementation field - a script has been provided directly
+            pass
+
+        workflow_fn = import_fullname(workflow.properties['implementation'].value)
+        return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index f49ec2e..2ec85b9 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -19,7 +19,7 @@ Provides the tasks to be entered into the task graph
 import copy
 
 from ....modeling import models
-from ....utils.collections import OrderedDict
+from ....modeling import utils as modeling_utils
 from ....utils.uuid import generate_uuid
 from ... import context
 from .. import exceptions
@@ -63,7 +63,6 @@ class OperationTask(BaseTask):
 
     def __init__(self,
                  actor,
-                 actor_type,
                  interface_name,
                  operation_name,
                  runs_on=None,
@@ -76,6 +75,7 @@ class OperationTask(BaseTask):
         :meth:`for_relationship`.
         """
 
+        actor_type = type(actor).__name__.lower()
         assert isinstance(actor, (models.Node, models.Relationship))
         assert actor_type in ('node', 'relationship')
         assert interface_name and operation_name
@@ -93,22 +93,7 @@ class OperationTask(BaseTask):
         self.interface_name = interface_name
         self.operation_name = operation_name
 
-        # Wrap inputs
-        inputs = copy.deepcopy(inputs) if inputs else {}
-        for k, v in inputs.iteritems():
-            if not isinstance(v, models.Parameter):
-                inputs[k] = models.Parameter.wrap(k, v)
-
-        # TODO: Suggestion: these extra inputs could be stored as a separate entry in the task
-        # model, because they are different from the operation inputs. If we do this, then the two
-        # kinds of inputs should *not* be merged here.
-
-        operation = self._get_operation()
-        if operation is None:
-            raise exceptions.OperationNotFoundException(
-                'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
-                .format(self.operation_name, self.interface_name, actor_type, actor.name))
-
+        operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
         self.plugin = None
         if operation.plugin_specification:
             self.plugin = OperationTask._find_plugin(operation.plugin_specification)
@@ -117,9 +102,8 @@ class OperationTask(BaseTask):
                     'Could not find plugin of operation "{0}" on interface "{1}" for {2} "{3}"'
                     .format(self.operation_name, self.interface_name, actor_type, actor.name))
 
+        self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs)
         self.implementation = operation.implementation
-        self.inputs = OperationTask._merge_inputs(operation.inputs, inputs)
-
         self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
                                                      name=actor.name,
                                                      interface=self.interface_name,
@@ -128,14 +112,6 @@ class OperationTask(BaseTask):
     def __repr__(self):
         return self.name
 
-    def _get_operation(self):
-        interface = self.actor.interfaces.get(self.interface_name)
-        if interface:
-            return interface.operations.get(self.operation_name)
-        return None
-
-
-
     @classmethod
     def for_node(cls,
                  node,
@@ -163,7 +139,6 @@ class OperationTask(BaseTask):
         assert isinstance(node, models.Node)
         return cls(
             actor=node,
-            actor_type='node',
             interface_name=interface_name,
             operation_name=operation_name,
             max_attempts=max_attempts,
@@ -202,7 +177,6 @@ class OperationTask(BaseTask):
         assert runs_on in models.Task.RUNS_ON
         return cls(
             actor=relationship,
-            actor_type='relationship',
             interface_name=interface_name,
             operation_name=operation_name,
             runs_on=runs_on,
@@ -216,13 +190,6 @@ class OperationTask(BaseTask):
         workflow_context = context.workflow.current.get()
         return plugin_specification.find_plugin(workflow_context.model.plugin.list())
 
-    @staticmethod
-    def _merge_inputs(operation_inputs, override_inputs=None):
-        final_inputs = OrderedDict(operation_inputs)
-        if override_inputs:
-            final_inputs.update(override_inputs)
-        return final_inputs
-
 
 class WorkflowTask(BaseTask):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/__init__.py b/aria/orchestrator/workflows/builtin/__init__.py
index d43a962..8b13c62 100644
--- a/aria/orchestrator/workflows/builtin/__init__.py
+++ b/aria/orchestrator/workflows/builtin/__init__.py
@@ -24,6 +24,7 @@ from .stop import stop
 
 
 BUILTIN_WORKFLOWS = ('install', 'uninstall', 'start', 'stop')
+BUILTIN_WORKFLOWS_PATH_PREFIX = 'aria.orchestrator.workflows.builtin'
 
 
 __all__ = [

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 348f47a..7ee135f 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -17,6 +17,7 @@
 Builtin execute_operation workflow
 """
 
+from . import utils
 from ..api.task import OperationTask
 from ... import workflow
 
@@ -28,7 +29,6 @@ def execute_operation(
         interface_name,
         operation_name,
         operation_kwargs,
-        allow_kwargs_override,
         run_by_dependency_order,
         type_names,
         node_template_ids,
@@ -41,7 +41,6 @@ def execute_operation(
     :param TaskGraph graph: the graph which will describe the workflow.
     :param basestring operation: the operation name to execute
     :param dict operation_kwargs:
-    :param bool allow_kwargs_override:
     :param bool run_by_dependency_order:
     :param type_names:
     :param node_template_ids:
@@ -71,8 +70,7 @@ def execute_operation(
                 node=node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                operation_kwargs=operation_kwargs,
-                allow_kwargs_override=allow_kwargs_override
+                operation_kwargs=operation_kwargs
             )
         )
 
@@ -108,21 +106,16 @@ def _create_node_task(
         node,
         interface_name,
         operation_name,
-        operation_kwargs,
-        allow_kwargs_override):
+        operation_kwargs):
     """
     A workflow which executes a single operation
     :param node: the node instance to install
     :param basestring operation: the operation name
     :param dict operation_kwargs:
-    :param bool allow_kwargs_override:
     :return:
     """
 
-    if allow_kwargs_override is not None:
-        operation_kwargs['allow_kwargs_override'] = allow_kwargs_override
-
-    return OperationTask.for_node(
+    return utils.create_node_task(
         node=node,
         interface_name=interface_name,
         operation_name=operation_name,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/orchestrator/workflows/builtin/utils.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/utils.py b/aria/orchestrator/workflows/builtin/utils.py
index d79318f..8890084 100644
--- a/aria/orchestrator/workflows/builtin/utils.py
+++ b/aria/orchestrator/workflows/builtin/utils.py
@@ -12,26 +12,31 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from ..api.task import OperationTask
+
+from ..api.task import OperationTask, StubTask
 from .. import exceptions
 
 
-def create_node_task(node, interface_name, operation_name):
+def create_node_task(node, interface_name, operation_name, **kwargs):
     """
     Returns a new operation task if the operation exists in the node, otherwise returns None.
     """
 
     try:
+        if _is_empty_task(node, interface_name, operation_name):
+            return StubTask()
+
         return OperationTask.for_node(node=node,
                                       interface_name=interface_name,
-                                      operation_name=operation_name)
+                                      operation_name=operation_name,
+                                      **kwargs)
     except exceptions.OperationNotFoundException:
         # We will skip nodes which do not have the operation
         return None
 
 
 def create_relationships_tasks(
-        node, interface_name, source_operation_name=None, target_operation_name=None):
+        node, interface_name, source_operation_name=None, target_operation_name=None, **kwargs):
     """
     Creates a relationship task (source and target) for all of a node_instance relationships.
     :param basestring source_operation_name: the relationship operation name.
@@ -43,21 +48,18 @@ def create_relationships_tasks(
     """
     sub_tasks = []
     for relationship in node.outbound_relationships:
-        try:
-            relationship_operations = relationship_tasks(
-                relationship,
-                interface_name,
-                source_operation_name=source_operation_name,
-                target_operation_name=target_operation_name)
-            sub_tasks.append(relationship_operations)
-        except exceptions.OperationNotFoundException:
-            # We will skip relationships which do not have the operation
-            pass
+        relationship_operations = relationship_tasks(
+            relationship,
+            interface_name,
+            source_operation_name=source_operation_name,
+            target_operation_name=target_operation_name,
+            **kwargs)
+        sub_tasks.append(relationship_operations)
     return sub_tasks
 
 
-def relationship_tasks(
-        relationship, interface_name, source_operation_name=None, target_operation_name=None):
+def relationship_tasks(relationship, interface_name, source_operation_name=None,
+                       target_operation_name=None, **kwargs):
     """
     Creates a relationship task source and target.
     :param Relationship relationship: the relationship instance itself
@@ -68,19 +70,35 @@ def relationship_tasks(
     """
     operations = []
     if source_operation_name:
-        operations.append(
-            OperationTask.for_relationship(relationship=relationship,
-                                           interface_name=interface_name,
-                                           operation_name=source_operation_name,
-                                           runs_on='source')
-        )
+        try:
+            if _is_empty_task(relationship, interface_name, source_operation_name):
+                operations.append(StubTask())
+
+            operations.append(
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=source_operation_name,
+                                               runs_on='source',
+                                               **kwargs)
+            )
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
     if target_operation_name:
-        operations.append(
-            OperationTask.for_relationship(relationship=relationship,
-                                           interface_name=interface_name,
-                                           operation_name=target_operation_name,
-                                           runs_on='target')
-        )
+        try:
+            if _is_empty_task(relationship, interface_name, target_operation_name):
+                operations.append(StubTask())
+
+            operations.append(
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=target_operation_name,
+                                               runs_on='target',
+                                               **kwargs)
+            )
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
 
     return operations
 
@@ -108,3 +126,15 @@ def create_node_task_dependencies(graph, tasks_and_nodes, reverse=False):
                     graph.add_dependency(dependency, task)
             else:
                 graph.add_dependency(task, dependencies)
+
+
+def _is_empty_task(actor, interface_name, operation_name):
+    interface = actor.interfaces.get(interface_name)
+    if interface:
+        operation = interface.operations.get(operation_name)
+        if operation:
+            return operation.implementation is None
+
+    raise exceptions.OperationNotFoundException(
+        'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
+            .format(operation_name, interface_name, type(actor).__name__.lower(), actor.name))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index 8302fc9..0e900bb 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -37,6 +37,7 @@ API:
     * drivers - module, a pool of ARIA standard drivers.
     * StorageDriver - class, abstract model implementation.
 """
+import logging
 
 from aria.logger import LoggerMixin
 from . import sql_mapi
@@ -71,6 +72,10 @@ class Storage(LoggerMixin):
         :param kwargs:
         """
         super(Storage, self).__init__(**kwargs)
+        # Set the logger handler of any storage object to NullHandler.
+        # This is since the absence of a handler shows up while using the CLI in the form of:
+        # `No handlers could be found for logger "aria.ResourceStorage"`.
+        self.logger.addHandler(logging.NullHandler())
         self.api = api_cls
         self.registered = {}
         self._initiator = initiator

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/utils/archive.py
----------------------------------------------------------------------
diff --git a/aria/utils/archive.py b/aria/utils/archive.py
new file mode 100644
index 0000000..5077dec
--- /dev/null
+++ b/aria/utils/archive.py
@@ -0,0 +1,63 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import tarfile
+import zipfile
+import tempfile
+from contextlib import closing
+
+
+def is_archive(source):
+    return tarfile.is_tarfile(source) or zipfile.is_zipfile(source)
+
+
+def extract_archive(source):
+    if tarfile.is_tarfile(source):
+        return untar(source)
+    elif zipfile.is_zipfile(source):
+        return unzip(source)
+    raise ValueError(
+        'Unsupported archive type provided or archive is not valid: {0}.'.format(source))
+
+
+def tar(source, destination):
+    with closing(tarfile.open(destination, 'w:gz')) as tar:
+        tar.add(source, arcname=os.path.basename(source))
+
+
+def untar(archive, destination=None):
+    if not destination:
+        destination = tempfile.mkdtemp()
+    with closing(tarfile.open(name=archive)) as tar:
+        tar.extractall(path=destination, members=tar.getmembers())
+    return destination
+
+
+def zip(source, destination):
+    with closing(zipfile.ZipFile(destination, 'w')) as zip_file:
+        for root, _, files in os.walk(source):
+            for filename in files:
+                file_path = os.path.join(root, filename)
+                source_dir = os.path.dirname(source)
+                zip_file.write(
+                    file_path, os.path.relpath(file_path, source_dir))
+    return destination
+
+
+def unzip(archive, destination=None):
+    if not destination:
+        destination = tempfile.mkdtemp()
+    with closing(zipfile.ZipFile(archive, 'r')) as zip_file:
+        zip_file.extractall(destination)
+    return destination

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/utils/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/utils/exceptions.py b/aria/utils/exceptions.py
index 9e3e80f..b60cee4 100644
--- a/aria/utils/exceptions.py
+++ b/aria/utils/exceptions.py
@@ -15,6 +15,7 @@
 
 import sys
 import linecache
+import StringIO
 import traceback as tb
 
 import jsonpickle
@@ -89,6 +90,16 @@ def _print_stack(frame):
                 puts(line)
 
 
+def get_exception_as_string(exc_type, exc_val, traceback):
+    s_traceback = StringIO.StringIO()
+    tb.print_exception(
+        etype=exc_type,
+        value=exc_val,
+        tb=traceback,
+        file=s_traceback)
+    return s_traceback.getvalue()
+
+
 class _WrappedException(Exception):
 
     def __init__(self, exception_type, exception_str):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/utils/file.py
----------------------------------------------------------------------
diff --git a/aria/utils/file.py b/aria/utils/file.py
index b515f70..6d1aa16 100644
--- a/aria/utils/file.py
+++ b/aria/utils/file.py
@@ -15,6 +15,7 @@
 
 import errno
 import os
+import shutil
 
 
 def makedirs(path):
@@ -26,3 +27,15 @@ def makedirs(path):
     except IOError as e:
         if e.errno != errno.EEXIST:
             raise
+
+def remove_if_exists(path):
+
+    try:
+        if os.path.isfile(path):
+            os.remove(path)
+        if os.path.isdir(path):
+            shutil.rmtree(path)
+
+    except OSError as e:
+        if e.errno != errno.ENOENT:  # errno.ENOENT = no such file or directory
+            raise  # re-raise exception if a different error occurred

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index 8a223e9..698393f 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -83,6 +83,49 @@ def full_type_name(value):
     return name if module == '__builtin__' else '%s.%s' % (module, name)
 
 
+def decode_list(data):
+    rv = []
+    for item in data:
+        if isinstance(item, unicode):
+            item = item.encode('utf-8')
+        elif isinstance(item, list):
+            item = decode_list(item)
+        elif isinstance(item, dict):
+            item = decode_dict(item)
+        rv.append(item)
+    return rv
+
+
+def decode_dict(data):
+    rv = {}
+    for key, value in data.iteritems():
+        if isinstance(key, unicode):
+            key = key.encode('utf-8')
+        if isinstance(value, unicode):
+            value = value.encode('utf-8')
+        elif isinstance(value, list):
+            value = decode_list(value)
+        elif isinstance(value, dict):
+            value = decode_dict(value)
+        rv[key] = value
+    return rv
+
+
+def try_convert_from_str(string, target_type):
+    if target_type == basestring:
+        return string
+    if target_type == bool:
+        if string.lower() == 'true':
+            return True
+        if string.lower() == 'false':
+            return False
+        return string
+    try:
+        return target_type(string)
+    except ValueError:
+        return string
+
+
 def safe_str(value):
     """
     Like :code:`str` coercion, but makes sure that Unicode strings are properly

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/utils/threading.py
----------------------------------------------------------------------
diff --git a/aria/utils/threading.py b/aria/utils/threading.py
index b99250d..f4e9c0e 100644
--- a/aria/utils/threading.py
+++ b/aria/utils/threading.py
@@ -15,6 +15,7 @@
 
 from __future__ import absolute_import  # so we can import standard 'threading'
 
+import sys
 import itertools
 import multiprocessing
 from threading import (Thread, Lock)
@@ -255,3 +256,26 @@ class LockedList(list):
 
     def __exit__(self, the_type, value, traceback):
         return self.lock.__exit__(the_type, value, traceback)
+
+
+class ExceptionThread(Thread):
+    """
+    A thread from which top level exceptions can be retrieved or reraised
+    """
+    def __init__(self, *args, **kwargs):
+        Thread.__init__(self, *args, **kwargs)
+        self.exception = None
+
+    def run(self):
+        try:
+            super(ExceptionThread, self).run()
+        except BaseException:
+            self.exception = sys.exc_info()
+
+    def is_error(self):
+        return self.exception is not None
+
+    def raise_error_if_exists(self):
+        if self.is_error():
+            t, v, tb = self.exception
+            raise t, v, tb

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/aria/utils/type.py
----------------------------------------------------------------------
diff --git a/aria/utils/type.py b/aria/utils/type.py
new file mode 100644
index 0000000..e427be1
--- /dev/null
+++ b/aria/utils/type.py
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def validate_value_type(value, type_name):
+    """Supports both python and yaml type names"""
+    #TODO add timestamp type?
+
+    name_to_type = {
+        'list': list,
+        'dict': dict,
+        'str': str,
+        'unicode': str,
+        'string': str,
+        'int': int,
+        'integer': int,
+        'bool': bool,
+        'boolean': bool,
+        'float': float
+    }
+
+    type = name_to_type.get(type_name.lower())
+    if type is None:
+        raise ValueError('No supported type_name was provided')
+    try:
+        type(value)
+    except ValueError:
+        raise False
+
+
+def convert_value_to_type(str_value, type_name):
+    try:
+        if type_name.lower() in ['str', 'unicode']:
+            return str_value.decode('utf-8')
+        elif type_name.lower() == 'int':
+            return int(str_value)
+        elif type_name.lower() == 'bool':
+            return bool(str_value)
+        elif type_name.lower() == 'float':
+            return float(str_value)
+        else:
+            raise ValueError('No supported type_name was provided')
+    except ValueError:
+        raise ValueError('Trying to convert {0} to {1} failed'.format(str_value,
+                                                                      type_name))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index d0a39e6..1661623 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -19,6 +19,7 @@ Creates ARIA service template models based on the TOSCA presentation.
 Relies on many helper methods in the presentation classes. 
 """
 
+import os
 import re
 from types import FunctionType
 from datetime import datetime
@@ -34,7 +35,7 @@ from ..data_types import coerce_value
 
 def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches
     model = ServiceTemplate(created_at=datetime.now(),
-                            main_file_name=str(context.presentation.location))
+                            main_file_name=os.path.basename(str(context.presentation.location)))
 
     model.description = context.presentation.get('service_template', 'description', 'value')
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/requirements.in
----------------------------------------------------------------------
diff --git a/requirements.in b/requirements.in
index bc27479..2c7978f 100644
--- a/requirements.in
+++ b/requirements.in
@@ -25,6 +25,11 @@ SQLAlchemy>=1.1.0, <1.2  # version 1.2 dropped support of python 2.6
 wagon==0.6.0
 bottle>=0.12.0, <0.13
 Fabric>=1.13.0, <1.14
+click==4.1
+colorama==0.3.3
+PrettyTable>=0.7,<0.8
+click_didyoumean==0.0.3
+backports.shutil_get_terminal_size==1.0.0
 
 # Since the tool we are using to generate our requirements.txt, `pip-tools`,
 # does not currently support conditional dependencies (;), we're adding our original

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index 7be5275..9603f7a 100644
--- a/setup.py
+++ b/setup.py
@@ -60,7 +60,7 @@ except IOError:
     extras_require = {}
 
 
-console_scripts = ['aria = aria.cli.cli:main']
+console_scripts = ['aria = aria.cli.main:main']
 
 
 class InstallCommand(install):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index f943d7e..ac0a8a7 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -39,12 +39,17 @@ def simple(tmpdir, inmemory=False, context_kwargs=None, topology=None):
         api_kwargs=dict(directory=os.path.join(tmpdir, 'resources'))
     )
 
+    service_id = topology(model_storage)
+    execution = models.create_execution(model_storage.service.get(service_id))
+    model_storage.execution.put(execution)
+
     final_kwargs = dict(
         name='simple_context',
         model_storage=model_storage,
         resource_storage=resource_storage,
-        service_id=topology(model_storage),
+        service_id=service_id,
         workflow_name=models.WORKFLOW_NAME,
+        execution_id=execution.id,
         task_max_attempts=models.TASK_MAX_ATTEMPTS,
         task_retry_interval=models.TASK_RETRY_INTERVAL
     )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 1d29e2d..6b7f810 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -40,7 +40,6 @@ from aria.orchestrator.workflows.builtin.workflows import (
 SERVICE_NAME = 'test_service_name'
 SERVICE_TEMPLATE_NAME = 'test_service_template_name'
 WORKFLOW_NAME = 'test_workflow_name'
-EXECUTION_NAME = 'test_execution_name'
 TASK_RETRY_INTERVAL = 1
 TASK_MAX_ATTEMPTS = 1
 
@@ -168,6 +167,13 @@ def create_interface_template(service_template, interface_name, operation_name,
 def create_interface(service, interface_name, operation_name, operation_kwargs=None,
                      interface_kwargs=None):
     the_type = service.service_template.interface_types.get_descendant('test_interface_type')
+
+    if operation_kwargs and operation_kwargs.get('inputs'):
+        wrapped_inputs = {}
+        for input_name, input_value in operation_kwargs['inputs'].iteritems():
+            wrapped_inputs[input_name] = models.Parameter.wrap(input_name, input_value)
+        operation_kwargs['inputs'] = wrapped_inputs
+
     operation = models.Operation(
         name=operation_name,
         **(operation_kwargs or {})
@@ -183,10 +189,11 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N
 def create_execution(service):
     return models.Execution(
         service=service,
-        status=models.Execution.STARTED,
+        status=models.Execution.PENDING,
         workflow_name=WORKFLOW_NAME,
+        created_at=datetime.utcnow(),
         started_at=datetime.utcnow(),
-        parameters=None
+        inputs={}
     )
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 8cd00f8..e459821 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -253,7 +253,7 @@ class TestService(object):
 class TestExecution(object):
 
     @pytest.mark.parametrize(
-        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, '
+        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, inputs, '
         'status, workflow_name',
         [
             (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
@@ -268,11 +268,11 @@ class TestExecution(object):
             (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
             (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'),
             (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
+            (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
         ]
     )
     def test_execution_model_creation(self, service_storage, is_valid, created_at, started_at,
-                                      ended_at, error, is_system_workflow, parameters, status,
+                                      ended_at, error, is_system_workflow, inputs, status,
                                       workflow_name):
         execution = _test_model(
             is_valid=is_valid,
@@ -285,7 +285,7 @@ class TestExecution(object):
                 ended_at=ended_at,
                 error=error,
                 is_system_workflow=is_system_workflow,
-                parameters=parameters,
+                inputs=inputs,
                 status=status,
                 workflow_name=workflow_name,
             ))
@@ -299,7 +299,7 @@ class TestExecution(object):
                 id='e_id',
                 workflow_name='w_name',
                 status=status,
-                parameters={},
+                inputs={},
                 created_at=now,
             )
             return execution

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index f55b83e..47c82dc 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -69,16 +69,17 @@ def test_node_operation_task_execution(ctx, thread_executor):
     interface_name = 'Standard'
     operation_name = 'create'
 
+    inputs = {'putput': True}
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+                              inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -124,17 +125,18 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
     interface_name = 'Configure'
     operation_name = 'post_configure'
 
+    inputs = {'putput': True}
     relationship = ctx.model.relationship.list()[0]
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)),
+        operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__),
+                              inputs=inputs),
     )
 
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -232,21 +234,21 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
     ctx.model.plugin.put(plugin)
     plugin_specification = mock.models.create_plugin_specification()
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    filename = 'test_file'
+    content = 'file content'
+    inputs = {'filename': filename, 'content': content}
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
             implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
-            plugin_specification=plugin_specification)
+            plugin_specification=plugin_specification,
+            inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
-    filename = 'test_file'
-    content = 'file content'
-    inputs = {'filename': filename, 'content': content}
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(api.task.OperationTask.for_node(node=node,
@@ -278,21 +280,22 @@ def test_node_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
 
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+    inputs = {
+        'op_start': 'op_start',
+        'op_end': 'op_end',
+    }
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
-            implementation=op_path(logged_operation, module_path=__name__))
+            implementation=op_path(logged_operation, module_path=__name__),
+            inputs=inputs)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
-    inputs = {
-        'op_start': 'op_start',
-        'op_end': 'op_end',
-    }
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
@@ -312,20 +315,20 @@ def test_relationship_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0]
 
     relationship = ctx.model.relationship.list()[0]
+    inputs = {
+        'op_start': 'op_start',
+        'op_end': 'op_end',
+    }
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__),
+                              inputs=inputs)
     )
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
 
-    inputs = {
-        'op_start': 'op_start',
-        'op_end': 'op_end',
-    }
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/context/test_resource_render.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py
index 696e9b3..8113746 100644
--- a/tests/orchestrator/context/test_resource_render.py
+++ b/tests/orchestrator/context/test_resource_render.py
@@ -64,9 +64,9 @@ def resources(tmpdir, ctx):
     implicit_ctx_template_path.write(_IMPLICIT_CTX_TEMPLATE)
     variables_template_path = tmpdir.join(_VARIABLES_TEMPLATE_PATH)
     variables_template_path.write(_VARIABLES_TEMPLATE)
-    ctx.resource.deployment.upload(entry_id='1',
+    ctx.resource.service.upload(entry_id='1',
                                    source=str(implicit_ctx_template_path),
                                    path=_IMPLICIT_CTX_TEMPLATE_PATH)
-    ctx.resource.deployment.upload(entry_id='1',
+    ctx.resource.service.upload(entry_id='1',
                                    source=str(variables_template_path),
                                    path=_VARIABLES_TEMPLATE_PATH)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index db45e8e..5fdb674 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -34,7 +34,7 @@ def test_serialize_operation_context(context, executor, tmpdir):
     test_file = tmpdir.join(TEST_FILE_NAME)
     test_file.write(TEST_FILE_CONTENT)
     resource = context.resource
-    resource.blueprint.upload(TEST_FILE_ENTRY_ID, str(test_file))
+    resource.service_template.upload(TEST_FILE_ENTRY_ID, str(test_file))
     graph = _mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
@@ -73,7 +73,7 @@ def _mock_operation(ctx):
     # a correct ctx.deployment.name tells us we kept the correct deployment_id
     assert ctx.service.name == mock.models.SERVICE_NAME
     # Here we test that the resource storage was properly re-created
-    test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
+    test_file_content = ctx.resource.service_template.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
     assert test_file_content == TEST_FILE_CONTENT
     # a non empty plugin workdir tells us that we kept the correct base_workdir
     assert ctx.plugin_workdir is not None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index cf82127..213d964 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -76,15 +76,16 @@ def test_host_ip(workflow_context, executor):
     interface_name = 'Standard'
     operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
+    inputs = {'putput': True}
     interface = mock.models.create_interface(
         dependency_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__),
+                              inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
     workflow_context.model.node.update(dependency_node)
-    inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
@@ -106,17 +107,17 @@ def test_relationship_tool_belt(workflow_context, executor):
     interface_name = 'Configure'
     operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
+    inputs = {'putput': True}
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__),
+                              inputs=inputs)
     )
     relationship.interfaces[interface.name] = interface
     workflow_context.model.relationship.update(relationship)
 
-    inputs = {'putput': True}
-
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index fa1f387..3c35435 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -35,7 +35,7 @@ class TestWorkflowContext(object):
         assert execution.service_template == storage.service_template.get_by_name(
             models.SERVICE_TEMPLATE_NAME)
         assert execution.status == storage.execution.model_cls.PENDING
-        assert execution.parameters == {}
+        assert execution.inputs == {}
         assert execution.created_at <= datetime.utcnow()
 
     def test_subsequent_workflow_context_creation_do_not_fail(self, storage):
@@ -49,11 +49,13 @@ class TestWorkflowContext(object):
         :param storage:
         :return WorkflowContext:
         """
+        service = storage.service.get_by_name(models.SERVICE_NAME)
         return context.workflow.WorkflowContext(
             name='simple_context',
             model_storage=storage,
             resource_storage=None,
-            service_id=storage.service.get_by_name(models.SERVICE_NAME).id,
+            service_id=service,
+            execution_id=storage.execution.list(filters=dict(service=service))[0].id,
             workflow_name=models.WORKFLOW_NAME,
             task_max_attempts=models.TASK_MAX_ATTEMPTS,
             task_retry_interval=models.TASK_RETRY_INTERVAL
@@ -66,6 +68,8 @@ def storage():
         sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage)
     workflow_storage.service_template.put(models.create_service_template())
     service_template = workflow_storage.service_template.get_by_name(models.SERVICE_TEMPLATE_NAME)
-    workflow_storage.service.put(models.create_service(service_template))
+    service = models.create_service(service_template)
+    workflow_storage.service.put(service)
+    workflow_storage.execution.put(models.create_execution(service))
     yield workflow_storage
     test_storage.release_sqlite_storage(workflow_storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index e3612cf..67d527c 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -460,14 +460,15 @@ if __name__ == '__main__':
              env_var='value',
              inputs=None):
         local_script_path = script_path
-        script_path = os.path.basename(local_script_path) if local_script_path else None
+        script_path = os.path.basename(local_script_path) if local_script_path else ''
+        inputs = inputs or {}
+        process = process or {}
         if script_path:
-            workflow_context.resource.deployment.upload(
+            workflow_context.resource.service.upload(
                 entry_id=str(workflow_context.service.id),
                 source=local_script_path,
                 path=script_path)
 
-        inputs = inputs or {}
         inputs.update({
             'script_path': script_path,
             'process': process,
@@ -483,7 +484,8 @@ if __name__ == '__main__':
                 'op',
                 operation_kwargs=dict(implementation='{0}.{1}'.format(
                     operations.__name__,
-                    operations.run_script_locally.__name__))
+                    operations.run_script_locally.__name__),
+                    inputs=inputs)
             )
             node.interfaces[interface.name] = interface
             graph.add_tasks(api.task.OperationTask.for_node(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index dd36466..d17def1 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -258,7 +258,7 @@ class TestWithActualSSHServer(object):
         return collected[signal][0]['kwargs']['exception']
 
     def _upload(self, source, path):
-        self._workflow_context.resource.deployment.upload(
+        self._workflow_context.resource.service.upload(
             entry_id=str(self._workflow_context.service.id),
             source=source,
             path=path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index 360e17d..41e2b6b 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -34,7 +34,8 @@ def test_execute_operation(ctx):
     interface = mock.models.create_interface(
         ctx.service,
         interface_name,
-        operation_name
+        operation_name,
+        operation_kwargs={'implementation': 'stub-implementation'}
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 6f97952..e793c49 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -61,12 +61,18 @@ class BaseTest(object):
             retry_interval=None,
             ignore_failure=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+        operation_kwargs = dict(implementation='{name}.{func.__name__}'.format(
+            name=__name__, func=func))
+        if inputs:
+            # the operation has to declare the inputs before those may be passed
+            operation_kwargs['inputs'] = inputs
+
         interface = mock.models.create_interface(
             node.service,
             'aria.interfaces.lifecycle',
             'create',
-            operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
-                                                                                 func=func))
+            operation_kwargs=operation_kwargs
         )
         node.interfaces[interface.name] = interface
         return api.task.OperationTask.for_node(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
deleted file mode 100644
index 0a95d43..0000000
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from networkx import topological_sort, DiGraph
-
-from aria.orchestrator import context
-from aria.orchestrator.workflows import api, core
-
-from tests import mock
-from tests import storage
-
-
-def test_task_graph_into_execution_graph(tmpdir):
-    interface_name = 'Standard'
-    operation_name = 'create'
-    task_context = mock.context.simple(str(tmpdir))
-    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    interface = mock.models.create_interface(
-        node.service,
-        interface_name,
-        operation_name
-    )
-    node.interfaces[interface.name] = interface
-    task_context.model.node.update(node)
-
-    def sub_workflow(name, **_):
-        return api.task_graph.TaskGraph(name)
-
-    with context.workflow.current.push(task_context):
-        test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
-        simple_before_task = api.task.OperationTask.for_node(node=node,
-                                                             interface_name=interface_name,
-                                                             operation_name=operation_name)
-        simple_after_task = api.task.OperationTask.for_node(node=node,
-                                                            interface_name=interface_name,
-                                                            operation_name=operation_name)
-
-        inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
-        inner_task = api.task.OperationTask.for_node(node=node,
-                                                     interface_name=interface_name,
-                                                     operation_name=operation_name)
-        inner_task_graph.add_tasks(inner_task)
-
-    test_task_graph.add_tasks(simple_before_task)
-    test_task_graph.add_tasks(simple_after_task)
-    test_task_graph.add_tasks(inner_task_graph)
-    test_task_graph.add_dependency(inner_task_graph, simple_before_task)
-    test_task_graph.add_dependency(simple_after_task, inner_task_graph)
-
-    # Direct check
-    execution_graph = DiGraph()
-    core.translation.build_execution_graph(task_graph=test_task_graph,
-                                           execution_graph=execution_graph)
-    execution_tasks = topological_sort(execution_graph)
-
-    assert len(execution_tasks) == 7
-
-    expected_tasks_names = [
-        '{0}-Start'.format(test_task_graph.id),
-        simple_before_task.id,
-        '{0}-Start'.format(inner_task_graph.id),
-        inner_task.id,
-        '{0}-End'.format(inner_task_graph.id),
-        simple_after_task.id,
-        '{0}-End'.format(test_task_graph.id)
-    ]
-
-    assert expected_tasks_names == execution_tasks
-
-    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
-                      core.task.StartWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
-                                  simple_before_task)
-    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
-                      core.task.StartSubWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
-                                  inner_task)
-    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
-                      core.task.EndSubWorkflowTask)
-
-    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
-                                  simple_after_task)
-    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
-                      core.task.EndWorkflowTask)
-    storage.release_sqlite_storage(task_context.model)
-
-
-def _assert_execution_is_api_task(execution_task, api_task):
-    assert execution_task.id == api_task.id
-    assert execution_task.name == api_task.name
-    assert execution_task.implementation == api_task.implementation
-    assert execution_task.actor == api_task.actor
-    assert execution_task.inputs == api_task.inputs
-
-
-def _get_task_by_name(task_name, graph):
-    return graph.node[task_name]['task']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
new file mode 100644
index 0000000..0a95d43
--- /dev/null
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -0,0 +1,111 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from networkx import topological_sort, DiGraph
+
+from aria.orchestrator import context
+from aria.orchestrator.workflows import api, core
+
+from tests import mock
+from tests import storage
+
+
+def test_task_graph_into_execution_graph(tmpdir):
+    interface_name = 'Standard'
+    operation_name = 'create'
+    task_context = mock.context.simple(str(tmpdir))
+    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
+        operation_name
+    )
+    node.interfaces[interface.name] = interface
+    task_context.model.node.update(node)
+
+    def sub_workflow(name, **_):
+        return api.task_graph.TaskGraph(name)
+
+    with context.workflow.current.push(task_context):
+        test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
+        simple_before_task = api.task.OperationTask.for_node(node=node,
+                                                             interface_name=interface_name,
+                                                             operation_name=operation_name)
+        simple_after_task = api.task.OperationTask.for_node(node=node,
+                                                            interface_name=interface_name,
+                                                            operation_name=operation_name)
+
+        inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
+        inner_task = api.task.OperationTask.for_node(node=node,
+                                                     interface_name=interface_name,
+                                                     operation_name=operation_name)
+        inner_task_graph.add_tasks(inner_task)
+
+    test_task_graph.add_tasks(simple_before_task)
+    test_task_graph.add_tasks(simple_after_task)
+    test_task_graph.add_tasks(inner_task_graph)
+    test_task_graph.add_dependency(inner_task_graph, simple_before_task)
+    test_task_graph.add_dependency(simple_after_task, inner_task_graph)
+
+    # Direct check
+    execution_graph = DiGraph()
+    core.translation.build_execution_graph(task_graph=test_task_graph,
+                                           execution_graph=execution_graph)
+    execution_tasks = topological_sort(execution_graph)
+
+    assert len(execution_tasks) == 7
+
+    expected_tasks_names = [
+        '{0}-Start'.format(test_task_graph.id),
+        simple_before_task.id,
+        '{0}-Start'.format(inner_task_graph.id),
+        inner_task.id,
+        '{0}-End'.format(inner_task_graph.id),
+        simple_after_task.id,
+        '{0}-End'.format(test_task_graph.id)
+    ]
+
+    assert expected_tasks_names == execution_tasks
+
+    assert isinstance(_get_task_by_name(execution_tasks[0], execution_graph),
+                      core.task.StartWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[1], execution_graph),
+                                  simple_before_task)
+    assert isinstance(_get_task_by_name(execution_tasks[2], execution_graph),
+                      core.task.StartSubWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[3], execution_graph),
+                                  inner_task)
+    assert isinstance(_get_task_by_name(execution_tasks[4], execution_graph),
+                      core.task.EndSubWorkflowTask)
+
+    _assert_execution_is_api_task(_get_task_by_name(execution_tasks[5], execution_graph),
+                                  simple_after_task)
+    assert isinstance(_get_task_by_name(execution_tasks[6], execution_graph),
+                      core.task.EndWorkflowTask)
+    storage.release_sqlite_storage(task_context.model)
+
+
+def _assert_execution_is_api_task(execution_task, api_task):
+    assert execution_task.id == api_task.id
+    assert execution_task.name == api_task.name
+    assert execution_task.implementation == api_task.implementation
+    assert execution_task.actor == api_task.actor
+    assert execution_task.inputs == api_task.inputs
+
+
+def _get_task_by_name(task_name, graph):
+    return graph.node[task_name]['task']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3f30fa35/tests/utils/test_threading.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_threading.py b/tests/utils/test_threading.py
new file mode 100644
index 0000000..39ce717
--- /dev/null
+++ b/tests/utils/test_threading.py
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import pytest
+
+from aria.utils import threading
+
+
+class TestPluginManager(object):
+
+    def test_exception_raised_from_thread(self):
+
+        def error_raising_func():
+            raise ValueError('This is an error')
+
+        thread = threading.ExceptionThread(target=error_raising_func)
+        thread.start()
+        thread.join()
+
+        assert thread.is_error()
+        with pytest.raises(ValueError):
+            thread.raise_error_if_exists()


[5/9] incubator-ariatosca git commit: ARIA-86-Create-a-basic-Hello-World-blueprint-example

Posted by ra...@apache.org.
ARIA-86-Create-a-basic-Hello-World-blueprint-example


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/16ae46a6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/16ae46a6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/16ae46a6

Branch: refs/heads/ARIA-48-aria-cli
Commit: 16ae46a61b01acd2b654acc301e40cc29c683e9a
Parents: 4400e30
Author: max-orlov <ma...@gigaspaces.com>
Authored: Thu Mar 30 17:29:17 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Thu Mar 30 17:59:55 2017 +0300

----------------------------------------------------------------------
 examples/hello-world/helloworld.yaml      |  27 +++++++++++++
 examples/hello-world/images/aria-logo.png | Bin 0 -> 23601 bytes
 examples/hello-world/index.html           |  14 +++++++
 examples/hello-world/scripts/configure.sh |  23 +++++++++++
 examples/hello-world/scripts/start.sh     |  52 +++++++++++++++++++++++++
 examples/hello-world/scripts/stop.sh      |  15 +++++++
 6 files changed, 131 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16ae46a6/examples/hello-world/helloworld.yaml
----------------------------------------------------------------------
diff --git a/examples/hello-world/helloworld.yaml b/examples/hello-world/helloworld.yaml
new file mode 100644
index 0000000..b6efaca
--- /dev/null
+++ b/examples/hello-world/helloworld.yaml
@@ -0,0 +1,27 @@
+tosca_definitions_version: tosca_simple_profile_for_nfv_1_0
+
+node_types:
+  web_app:
+    derived_from: tosca.nodes.WebApplication
+    properties:
+      port:
+        type: integer
+        default: 8080
+
+topology_template:
+
+  node_templates:
+    web_server:
+      type: tosca.nodes.WebServer
+
+    web_app:
+      type: web_app
+      properties:
+        port: 9090
+      requirements:
+        - host: web_server
+      interfaces:
+        Standard:
+          configure: scripts/configure.sh
+          start: scripts/start.sh
+          stop: scripts/stop.sh

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16ae46a6/examples/hello-world/images/aria-logo.png
----------------------------------------------------------------------
diff --git a/examples/hello-world/images/aria-logo.png b/examples/hello-world/images/aria-logo.png
new file mode 100644
index 0000000..3505844
Binary files /dev/null and b/examples/hello-world/images/aria-logo.png differ

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16ae46a6/examples/hello-world/index.html
----------------------------------------------------------------------
diff --git a/examples/hello-world/index.html b/examples/hello-world/index.html
new file mode 100644
index 0000000..8d21c3a
--- /dev/null
+++ b/examples/hello-world/index.html
@@ -0,0 +1,14 @@
+<html>
+    <header>
+        <title>ARIA Hello World</title>
+    </header>
+<body>
+    <h1>Hello, World!</h1>
+    <p>
+        blueprint_id = {{ ctx.service_template.name }}<br/>
+        deployment_id = {{ ctx.service.name }}<br/>
+        node_id = {{ ctx.node.name }}
+    </p>
+    <img src="aria-logo.png">
+</body>
+</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16ae46a6/examples/hello-world/scripts/configure.sh
----------------------------------------------------------------------
diff --git a/examples/hello-world/scripts/configure.sh b/examples/hello-world/scripts/configure.sh
new file mode 100755
index 0000000..b55ec17
--- /dev/null
+++ b/examples/hello-world/scripts/configure.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+set -e
+
+TEMP_DIR="/tmp"
+PYTHON_FILE_SERVER_ROOT=${TEMP_DIR}/python-simple-http-webserver
+if [ -d ${PYTHON_FILE_SERVER_ROOT} ]; then
+	echo "Removing file server root folder ${PYTHON_FILE_SERVER_ROOT}"
+	rm -rf ${PYTHON_FILE_SERVER_ROOT}
+fi
+ctx logger info "Creating HTTP server root directory at ${PYTHON_FILE_SERVER_ROOT}"
+
+mkdir -p ${PYTHON_FILE_SERVER_ROOT}
+
+cd ${PYTHON_FILE_SERVER_ROOT}
+
+index_path="index.html"
+image_path="images/aria-logo.png"
+
+ctx logger info "Downloading blueprint resources..."
+ctx download-resource-and-render ${PYTHON_FILE_SERVER_ROOT}/index.html ${index_path}
+ctx download-resource ${PYTHON_FILE_SERVER_ROOT}/aria-logo.png ${image_path}
+

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16ae46a6/examples/hello-world/scripts/start.sh
----------------------------------------------------------------------
diff --git a/examples/hello-world/scripts/start.sh b/examples/hello-world/scripts/start.sh
new file mode 100755
index 0000000..96298c5
--- /dev/null
+++ b/examples/hello-world/scripts/start.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+set -e
+
+TEMP_DIR="/tmp"
+PYTHON_FILE_SERVER_ROOT=${TEMP_DIR}/python-simple-http-webserver
+PID_FILE="server.pid"
+
+ctx logger info "Starting HTTP server from ${PYTHON_FILE_SERVER_ROOT}"
+
+port=$(ctx node properties port)
+
+cd ${PYTHON_FILE_SERVER_ROOT}
+ctx logger info "Starting SimpleHTTPServer"
+nohup python -m SimpleHTTPServer ${port} > /dev/null 2>&1 &
+echo $! > ${PID_FILE}
+
+ctx logger info "Waiting for server to launch on port ${port}"
+url="http://localhost:${port}"
+
+server_is_up() {
+	if which wget >/dev/null; then
+		if wget $url >/dev/null; then
+			return 0
+		fi
+	elif which curl >/dev/null; then
+		if curl $url >/dev/null; then
+			return 0
+		fi
+	else
+		ctx logger error "Both curl, wget were not found in path"
+		exit 1
+	fi
+	return 1
+}
+
+STARTED=false
+for i in $(seq 1 15)
+do
+	if server_is_up; then
+		ctx logger info "Server is up."
+		STARTED=true
+    	break
+	else
+		ctx logger info "Server not up. waiting 1 second."
+		sleep 1
+	fi
+done
+if [ ${STARTED} = false ]; then
+	ctx logger error "Failed starting web server in 15 seconds."
+	exit 1
+fi

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16ae46a6/examples/hello-world/scripts/stop.sh
----------------------------------------------------------------------
diff --git a/examples/hello-world/scripts/stop.sh b/examples/hello-world/scripts/stop.sh
new file mode 100755
index 0000000..5461caf
--- /dev/null
+++ b/examples/hello-world/scripts/stop.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -e
+
+TEMP_DIR="/tmp"
+PYTHON_FILE_SERVER_ROOT=${TEMP_DIR}/python-simple-http-webserver
+PID_FILE="server.pid"
+
+PID=`cat ${PYTHON_FILE_SERVER_ROOT}/${PID_FILE}`
+
+ctx logger info "Shutting down file server. pid = ${PID}"
+kill -9 ${PID} || exit $?
+
+ctx logger info "Deleting file server root directory (${PYTHON_FILE_SERVER_ROOT})"
+rm -rf ${PYTHON_FILE_SERVER_ROOT}