You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by em...@apache.org on 2017/03/21 20:00:36 UTC

[01/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/master 95177d0f7 -> 9841ca4ae


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/storage/test_resource_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_resource_storage.py b/tests/storage/test_resource_storage.py
index 4d01a88..efacb2e 100644
--- a/tests/storage/test_resource_storage.py
+++ b/tests/storage/test_resource_storage.py
@@ -28,20 +28,20 @@ from . import TestFileSystem
 
 class TestResourceStorage(TestFileSystem):
     def _create(self, storage):
-        storage.register('blueprint')
+        storage.register('service_template')
 
     def _upload(self, storage, tmp_path, id):
         with open(tmp_path, 'w') as f:
             f.write('fake context')
 
-        storage.blueprint.upload(entry_id=id, source=tmp_path)
+        storage.service_template.upload(entry_id=id, source=tmp_path)
 
     def _upload_dir(self, storage, tmp_dir, tmp_file_name, id):
         file_source = os.path.join(tmp_dir, tmp_file_name)
         with open(file_source, 'w') as f:
             f.write('fake context')
 
-        storage.blueprint.upload(entry_id=id, source=tmp_dir)
+        storage.service_template.upload(entry_id=id, source=tmp_dir)
 
     def _create_storage(self):
         return ResourceStorage(FileSystemResourceAPI,
@@ -50,27 +50,27 @@ class TestResourceStorage(TestFileSystem):
     def test_name(self):
         api = FileSystemResourceAPI
         storage = ResourceStorage(FileSystemResourceAPI,
-                                  items=['blueprint'],
+                                  items=['service_template'],
                                   api_kwargs=dict(directory=self.path))
         assert repr(storage) == 'ResourceStorage(api={api})'.format(api=api)
         assert 'directory={resource_dir}'.format(resource_dir=self.path) in \
-               repr(storage.registered['blueprint'])
+               repr(storage.registered['service_template'])
 
     def test_create(self):
         storage = self._create_storage()
         self._create(storage)
-        assert os.path.exists(os.path.join(self.path, 'blueprint'))
+        assert os.path.exists(os.path.join(self.path, 'service_template'))
 
     def test_upload_file(self):
         storage = ResourceStorage(FileSystemResourceAPI, api_kwargs=dict(directory=self.path))
         self._create(storage)
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
-        self._upload(storage, tmpfile_path, id='blueprint_id')
+        self._upload(storage, tmpfile_path, id='service_template_id')
 
         storage_path = os.path.join(
             self.path,
-            'blueprint',
-            'blueprint_id',
+            'service_template',
+            'service_template_id',
             os.path.basename(tmpfile_path))
         assert os.path.exists(storage_path)
 
@@ -82,11 +82,11 @@ class TestResourceStorage(TestFileSystem):
         self._create(storage)
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
         tmpfile_name = os.path.basename(tmpfile_path)
-        self._upload(storage, tmpfile_path, 'blueprint_id')
+        self._upload(storage, tmpfile_path, 'service_template_id')
 
         temp_dir = tempfile.mkdtemp(dir=self.path)
-        storage.blueprint.download(
-            entry_id='blueprint_id',
+        storage.service_template.download(
+            entry_id='service_template_id',
             destination=temp_dir,
             path=tmpfile_name)
 
@@ -97,23 +97,23 @@ class TestResourceStorage(TestFileSystem):
         storage = self._create_storage()
         self._create(storage)
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.download(entry_id='blueprint_id', destination='', path='fake_path')
+            storage.service_template.download(entry_id='service_template_id', destination='',
+                                              path='fake_path')
 
     def test_data_non_existing_file(self):
         storage = self._create_storage()
         self._create(storage)
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.read(entry_id='blueprint_id', path='fake_path')
+            storage.service_template.read(entry_id='service_template_id', path='fake_path')
 
     def test_data_file(self):
         storage = self._create_storage()
         self._create(storage)
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
-        self._upload(storage, tmpfile_path, 'blueprint_id')
+        self._upload(storage, tmpfile_path, 'service_template_id')
 
-        assert storage.blueprint.read(
-            entry_id='blueprint_id',
-            path=os.path.basename(tmpfile_path)) == 'fake context'
+        assert storage.service_template.read(entry_id='service_template_id',
+                                             path=os.path.basename(tmpfile_path)) == 'fake context'
 
     def test_upload_dir(self):
         storage = self._create_storage()
@@ -121,12 +121,12 @@ class TestResourceStorage(TestFileSystem):
         tmp_dir = tempfile.mkdtemp(suffix=self.__class__.__name__, dir=self.path)
         second_level_tmp_dir = tempfile.mkdtemp(dir=tmp_dir)
         tmp_filename = tempfile.mkstemp(dir=second_level_tmp_dir)[1]
-        self._upload_dir(storage, tmp_dir, tmp_filename, id='blueprint_id')
+        self._upload_dir(storage, tmp_dir, tmp_filename, id='service_template_id')
 
         destination = os.path.join(
             self.path,
-            'blueprint',
-            'blueprint_id',
+            'service_template',
+            'service_template_id',
             os.path.basename(second_level_tmp_dir),
             os.path.basename(tmp_filename))
 
@@ -138,21 +138,21 @@ class TestResourceStorage(TestFileSystem):
         tmp_dir = tempfile.mkdtemp(suffix=self.__class__.__name__, dir=self.path)
         second_level_tmp_dir = tempfile.mkdtemp(dir=tmp_dir)
         tmp_filename = tempfile.mkstemp(dir=second_level_tmp_dir)[1]
-        self._upload_dir(storage, tmp_dir, tmp_filename, id='blueprint_id')
+        self._upload_dir(storage, tmp_dir, tmp_filename, id='service_template_id')
 
         second_update_file = tempfile.mkstemp(dir=self.path)[1]
         with open(second_update_file, 'w') as f:
             f.write('fake context2')
 
-        storage.blueprint.upload(
-            entry_id='blueprint_id',
+        storage.service_template.upload(
+            entry_id='service_template_id',
             source=second_update_file,
             path=os.path.basename(second_level_tmp_dir))
 
         assert os.path.isfile(os.path.join(
             self.path,
-            'blueprint',
-            'blueprint_id',
+            'service_template',
+            'service_template_id',
             os.path.basename(second_level_tmp_dir),
             os.path.basename(second_update_file)))
 
@@ -162,11 +162,11 @@ class TestResourceStorage(TestFileSystem):
         tmp_dir = tempfile.mkdtemp(suffix=self.__class__.__name__, dir=self.path)
         second_level_tmp_dir = tempfile.mkdtemp(dir=tmp_dir)
         tmp_filename = tempfile.mkstemp(dir=second_level_tmp_dir)[1]
-        self._upload_dir(storage, tmp_dir, tmp_filename, id='blueprint_id')
+        self._upload_dir(storage, tmp_dir, tmp_filename, id='service_template_id')
 
         temp_destination_dir = tempfile.mkdtemp(dir=self.path)
-        storage.blueprint.download(
-            entry_id='blueprint_id',
+        storage.service_template.download(
+            entry_id='service_template_id',
             destination=temp_destination_dir)
 
         destination_file_path = os.path.join(
@@ -187,26 +187,28 @@ class TestResourceStorage(TestFileSystem):
         tempfile.mkstemp(dir=tmp_dir)
         tempfile.mkstemp(dir=tmp_dir)
 
-        storage.blueprint.upload(entry_id='blueprint_id', source=tmp_dir)
+        storage.service_template.upload(entry_id='service_template_id', source=tmp_dir)
 
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.read(entry_id='blueprint_id', path='')
+            storage.service_template.read(entry_id='service_template_id', path='')
 
     def test_delete_resource(self):
         storage = self._create_storage()
         self._create(storage)
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
-        self._upload(storage, tmpfile_path, 'blueprint_id')
+        self._upload(storage, tmpfile_path, 'service_template_id')
         tmpfile2_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
-        self._upload(storage, tmpfile2_path, 'blueprint_id')
+        self._upload(storage, tmpfile2_path, 'service_template_id')
 
         # deleting the first resource and expecting an error on read
-        storage.blueprint.delete(entry_id='blueprint_id', path=os.path.basename(tmpfile_path))
+        storage.service_template.delete(entry_id='service_template_id',
+                                        path=os.path.basename(tmpfile_path))
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.read(entry_id='blueprint_id', path=os.path.basename(tmpfile_path))
+            storage.service_template.read(entry_id='service_template_id',
+                                          path=os.path.basename(tmpfile_path))
         # the second resource should still be available for reading
-        assert storage.blueprint.read(
-            entry_id='blueprint_id',
+        assert storage.service_template.read(
+            entry_id='service_template_id',
             path=os.path.basename(tmpfile2_path)) == 'fake context'
 
     def test_delete_directory(self):
@@ -217,31 +219,31 @@ class TestResourceStorage(TestFileSystem):
         tmp_dir = tempfile.mkdtemp(suffix=self.__class__.__name__, dir=self.path)
         second_level_tmp_dir = tempfile.mkdtemp(dir=tmp_dir)
         tmp_filename = tempfile.mkstemp(dir=second_level_tmp_dir)[1]
-        self._upload_dir(storage, tmp_dir, tmp_filename, id='blueprint_id')
+        self._upload_dir(storage, tmp_dir, tmp_filename, id='service_template_id')
         file_path_in_dir = os.path.join(
             os.path.basename(second_level_tmp_dir),
             os.path.basename(tmp_filename))
 
         # should be able to read the file and download the directory..
-        assert storage.blueprint.read(
-            entry_id='blueprint_id',
+        assert storage.service_template.read(
+            entry_id='service_template_id',
             path=file_path_in_dir) == 'fake context'
-        storage.blueprint.download(
-            entry_id='blueprint_id',
+        storage.service_template.download(
+            entry_id='service_template_id',
             path=os.path.basename(second_level_tmp_dir),
             destination=temp_destination_dir)
 
         # after deletion, the file and directory should both be gone
-        storage.blueprint.delete(
-            entry_id='blueprint_id',
+        storage.service_template.delete(
+            entry_id='service_template_id',
             path=os.path.basename(second_level_tmp_dir))
         with pytest.raises(exceptions.StorageError):
-            assert storage.blueprint.read(
-                entry_id='blueprint_id',
+            assert storage.service_template.read(
+                entry_id='service_template_id',
                 path=file_path_in_dir) == 'fake context'
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.download(
-                entry_id='blueprint_id',
+            storage.service_template.download(
+                entry_id='service_template_id',
                 path=os.path.basename(second_level_tmp_dir),
                 destination=temp_destination_dir)
 
@@ -253,20 +255,20 @@ class TestResourceStorage(TestFileSystem):
         tmp_dir = tempfile.mkdtemp(suffix=self.__class__.__name__, dir=self.path)
         second_level_tmp_dir = tempfile.mkdtemp(dir=tmp_dir)
         tmp_filename = tempfile.mkstemp(dir=second_level_tmp_dir)[1]
-        self._upload_dir(storage, tmp_dir, tmp_filename, id='blueprint_id')
+        self._upload_dir(storage, tmp_dir, tmp_filename, id='service_template_id')
         file_path_in_dir = os.path.join(
             os.path.basename(second_level_tmp_dir),
             os.path.basename(tmp_filename))
 
         # deleting without specifying a path - delete all resources of this entry
-        storage.blueprint.delete(entry_id='blueprint_id')
+        storage.service_template.delete(entry_id='service_template_id')
         with pytest.raises(exceptions.StorageError):
-            assert storage.blueprint.read(
-                entry_id='blueprint_id',
+            assert storage.service_template.read(
+                entry_id='service_template_id',
                 path=file_path_in_dir) == 'fake context'
         with pytest.raises(exceptions.StorageError):
-            storage.blueprint.download(
-                entry_id='blueprint_id',
+            storage.service_template.download(
+                entry_id='service_template_id',
                 path=os.path.basename(second_level_tmp_dir),
                 destination=temp_destination_dir)
 
@@ -274,4 +276,5 @@ class TestResourceStorage(TestFileSystem):
         storage = self._create_storage()
         self._create(storage)
         # deleting a nonexisting resource - no effect is expected to happen
-        assert storage.blueprint.delete(entry_id='blueprint_id', path='fake-file') is False
+        assert storage.service_template.delete(entry_id='service_template_id',
+                                               path='fake-file') is False

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/storage/test_structures.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_structures.py b/tests/storage/test_structures.py
deleted file mode 100644
index 27e99d7..0000000
--- a/tests/storage/test_structures.py
+++ /dev/null
@@ -1,218 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-import sqlalchemy
-
-from aria.storage import (
-    ModelStorage,
-    sql_mapi,
-    exceptions,
-    modeling,
-)
-from aria.storage.modeling import type
-
-from ..storage import release_sqlite_storage, structure, init_inmemory_model_storage
-from . import MockModel
-from ..mock import (
-    models,
-    context as mock_context
-)
-
-
-@pytest.fixture
-def storage():
-    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
-                                initiator=init_inmemory_model_storage)
-    base_storage.register(MockModel)
-    yield base_storage
-    release_sqlite_storage(base_storage)
-
-
-@pytest.fixture(scope='module', autouse=True)
-def module_cleanup():
-    modeling.model.aria_declarative_base.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
-
-
-@pytest.fixture
-def context(tmpdir):
-    ctx = mock_context.simple(str(tmpdir))
-    yield ctx
-    release_sqlite_storage(ctx.model)
-
-
-def test_inner_dict_update(storage):
-    inner_dict = {'inner_value': 1}
-
-    mock_model = MockModel(model_dict={'inner_dict': inner_dict, 'value': 0})
-    storage.mock_model.put(mock_model)
-
-    storage_mm = storage.mock_model.get(mock_model.id)
-    assert storage_mm == mock_model
-
-    storage_mm.model_dict['inner_dict']['inner_value'] = 2
-    storage_mm.model_dict['value'] = -1
-    storage.mock_model.update(storage_mm)
-    storage_mm = storage.mock_model.get(storage_mm.id)
-
-    assert storage_mm.model_dict['inner_dict']['inner_value'] == 2
-    assert storage_mm.model_dict['value'] == -1
-
-
-def test_inner_list_update(storage):
-    mock_model = MockModel(model_list=[0, [1]])
-    storage.mock_model.put(mock_model)
-
-    storage_mm = storage.mock_model.get(mock_model.id)
-    assert storage_mm == mock_model
-
-    storage_mm.model_list[1][0] = 'new_inner_value'
-    storage_mm.model_list[0] = 'new_value'
-    storage.mock_model.update(storage_mm)
-    storage_mm = storage.mock_model.get(storage_mm.id)
-
-    assert storage_mm.model_list[1][0] == 'new_inner_value'
-    assert storage_mm.model_list[0] == 'new_value'
-
-
-def test_model_to_dict(context):
-    service_instance = context.service_instance
-    service_instance = service_instance.to_dict()
-
-    expected_keys = [
-        'description',
-        '_metadata',
-        'created_at',
-        'permalink',
-        'policy_triggers',
-        'policy_types',
-        'scaling_groups',
-        'updated_at',
-        'workflows',
-    ]
-
-    for expected_key in expected_keys:
-        assert expected_key in service_instance
-
-
-def test_relationship_model_ordering(context):
-    service_instance = context.model.service_instance.get_by_name(models.DEPLOYMENT_NAME)
-    source_node = context.model.node.get_by_name(models.DEPENDENT_NODE_INSTANCE_NAME)
-    target_node = context.model.node.get_by_name(models.DEPENDENCY_NODE_INSTANCE_NAME)
-    new_node_template = modeling.model.NodeTemplate(
-        name='new_node',
-        type_name='test_node_type',
-        type_hierarchy=[],
-        default_instances=1,
-        min_instances=1,
-        max_instances=1,
-        service_template=service_instance.service_template
-    )
-    new_node = modeling.model.Node(
-        name='new_node_instance',
-        runtime_properties={},
-        service_instance=service_instance,
-        node_template=new_node_template,
-        state='',
-        scaling_groups=[]
-    )
-
-    source_to_new_relationship = modeling.model.Relationship(
-        target_node=new_node,
-        source_node=source_node,
-    )
-
-    new_to_target_relationship = modeling.model.Relationship(
-        source_node=new_node,
-        target_node=target_node,
-    )
-
-    context.model.node_template.put(new_node_template)
-    context.model.node.put(new_node)
-    context.model.relationship.put(source_to_new_relationship)
-    context.model.relationship.put(new_to_target_relationship)
-
-    def flip_and_assert(node, direction):
-        """
-        Reversed the order of relationships and assert effects took place.
-        :param node: the node instance to operatate on
-        :param direction: the type of relationships to flip (inbound/outbount)
-        :return:
-        """
-        assert direction in ('inbound', 'outbound')
-
-        relationships = getattr(node, direction + '_relationships').all()
-        assert len(relationships) == 2
-
-        reversed_relationship_instances = list(reversed(relationships))
-        assert relationships != reversed_relationship_instances
-
-        relationships[:] = reversed_relationship_instances
-        context.model.node.update(node)
-        assert relationships == reversed_relationship_instances
-
-    flip_and_assert(source_node, 'outbound')
-    flip_and_assert(target_node, 'inbound')
-
-
-class StrictClass(modeling.model.aria_declarative_base, structure.ModelMixin):
-    __tablename__ = 'strict_class'
-
-    strict_dict = sqlalchemy.Column(type.StrictDict(basestring, basestring))
-    strict_list = sqlalchemy.Column(type.StrictList(basestring))
-
-
-def test_strict_dict():
-
-    strict_class = StrictClass()
-
-    def assert_strict(sc):
-        with pytest.raises(exceptions.StorageError):
-            sc.strict_dict = {'key': 1}
-
-        with pytest.raises(exceptions.StorageError):
-            sc.strict_dict = {1: 'value'}
-
-        with pytest.raises(exceptions.StorageError):
-            sc.strict_dict = {1: 1}
-
-    assert_strict(strict_class)
-    strict_class.strict_dict = {'key': 'value'}
-    assert strict_class.strict_dict == {'key': 'value'}
-
-    assert_strict(strict_class)
-    with pytest.raises(exceptions.StorageError):
-        strict_class.strict_dict['key'] = 1
-    with pytest.raises(exceptions.StorageError):
-        strict_class.strict_dict[1] = 'value'
-    with pytest.raises(exceptions.StorageError):
-        strict_class.strict_dict[1] = 1
-
-
-def test_strict_list():
-    strict_class = StrictClass()
-
-    def assert_strict(sc):
-        with pytest.raises(exceptions.StorageError):
-            sc.strict_list = [1]
-
-    assert_strict(strict_class)
-    strict_class.strict_list = ['item']
-    assert strict_class.strict_list == ['item']
-
-    assert_strict(strict_class)
-    with pytest.raises(exceptions.StorageError):
-        strict_class.strict_list[0] = 1


[04/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/extensions/aria_extension_tosca/simple_v1_0/functions.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/functions.py b/extensions/aria_extension_tosca/simple_v1_0/functions.py
index e385817..405aa8f 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/functions.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/functions.py
@@ -19,8 +19,9 @@ from aria.utils.collections import FrozenList
 from aria.utils.formatting import as_raw, safe_repr
 from aria.parser import dsl_specification
 from aria.parser.exceptions import InvalidValueError
-from aria.parser.modeling import (Function, CannotEvaluateFunctionException)
 from aria.parser.validation import Issue
+from aria.modeling.exceptions import CannotEvaluateFunctionException
+from aria.modeling.functions import Function
 
 #
 # Intrinsic
@@ -139,8 +140,8 @@ class GetInput(Function):
             raise CannotEvaluateFunctionException()
         the_input = context.modeling.instance.inputs.get(
             self.input_property_name,
-            context.modeling.model.inputs.get(self.input_property_name))
-        return the_input.value if the_input is not None else None
+            context.modeling.template.inputs.get(self.input_property_name))
+        return as_raw(the_input.value) if the_input is not None else None
 
 @dsl_specification('4.4.2', 'tosca-simple-1.0')
 class GetProperty(Function):
@@ -209,7 +210,7 @@ class GetProperty(Function):
                         found = False
                         break
                 if found:
-                    return value
+                    return as_raw(value)
 
         raise InvalidValueError(
             'function "get_property" could not find "%s" in modelable entity "%s"' \

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index f340954..4477732 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -13,248 +13,292 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+"""
+Creates ARIA service template models based on the TOSCA presentation.
+
+Relies on many helper methods in the presentation classes. 
+"""
+
 import re
+from types import FunctionType
+from datetime import datetime
 
-from aria.parser.modeling import (Type, RelationshipType, PolicyType, ServiceModel, NodeTemplate,
+from aria.modeling.models import (Type, ServiceTemplate, NodeTemplate,
                                   RequirementTemplate, RelationshipTemplate, CapabilityTemplate,
                                   GroupTemplate, PolicyTemplate, SubstitutionTemplate,
-                                  MappingTemplate, InterfaceTemplate, OperationTemplate,
-                                  ArtifactTemplate, Metadata, Parameter)
+                                  SubstitutionTemplateMapping, InterfaceTemplate, OperationTemplate,
+                                  ArtifactTemplate, Metadata, Parameter, PluginSpecification)
 
 from ..data_types import coerce_value
-from platform import node
 
-def create_service_model(context): # pylint: disable=too-many-locals,too-many-branches
-    model = ServiceModel()
+
+def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches
+    model = ServiceTemplate(created_at=datetime.now(),
+                            main_file_name=str(context.presentation.location))
 
     model.description = context.presentation.get('service_template', 'description', 'value')
 
+    # Metadata
     metadata = context.presentation.get('service_template', 'metadata')
     if metadata is not None:
-        substitution_template = Metadata()
-        substitution_template.values['template_name'] = metadata.template_name
-        substitution_template.values['template_author'] = metadata.template_author
-        substitution_template.values['template_version'] = metadata.template_version
-        custom = metadata.custom
-        if custom:
-            for name, v in custom.iteritems():
-                substitution_template.values[name] = v
-        model.metadata = substitution_template
+        create_metadata_models(context, model, metadata)
 
+    # Types
+    model.node_types = Type(variant='node')
     create_types(context,
-                 context.modeling.node_types,
+                 model.node_types,
                  context.presentation.get('service_template', 'node_types'))
+    model.group_types = Type(variant='group')
     create_types(context,
-                 context.modeling.group_types,
+                 model.group_types,
                  context.presentation.get('service_template', 'group_types'))
+    model.policy_types = Type(variant='policy')
     create_types(context,
-                 context.modeling.capability_types,
-                 context.presentation.get('service_template', 'capability_types'))
-    create_types(context,
-                 context.modeling.relationship_types,
-                 context.presentation.get('service_template', 'relationship_types'),
-                 create_relationship_type)
+                 model.policy_types,
+                 context.presentation.get('service_template', 'policy_types'))
+    model.relationship_types = Type(variant='relationship')
     create_types(context,
-                 context.modeling.policy_types,
-                 context.presentation.get('service_template', 'policy_types'),
-                 create_policy_type)
+                 model.relationship_types,
+                 context.presentation.get('service_template', 'relationship_types'))
+    model.capability_types = Type(variant='capability')
     create_types(context,
-                 context.modeling.artifact_types,
-                 context.presentation.get('service_template', 'artifact_types'))
+                 model.capability_types,
+                 context.presentation.get('service_template', 'capability_types'))
+    model.interface_types = Type(variant='interface')
     create_types(context,
-                 context.modeling.interface_types,
+                 model.interface_types,
                  context.presentation.get('service_template', 'interface_types'))
+    model.artifact_types = Type(variant='artifact')
+    create_types(context,
+                 model.artifact_types,
+                 context.presentation.get('service_template', 'artifact_types'))
 
+    # Topology template
     topology_template = context.presentation.get('service_template', 'topology_template')
     if topology_template is not None:
-        create_properties_from_values(model.inputs, topology_template._get_input_values(context))
-        create_properties_from_values(model.outputs, topology_template._get_output_values(context))
+        create_parameter_models_from_values(model.inputs,
+                                            topology_template._get_input_values(context))
+        create_parameter_models_from_values(model.outputs,
+                                            topology_template._get_output_values(context))
 
+    # Plugin specifications
+    policies = context.presentation.get('service_template', 'topology_template', 'policies')
+    if policies:
+        for policy in policies.itervalues():
+            if model.policy_types.get_descendant(policy.type).role == 'plugin':
+                plugin_specification = create_plugin_specification_model(context, policy)
+                model.plugin_specifications[plugin_specification.name] = plugin_specification
+
+    # Node templates
     node_templates = context.presentation.get('service_template', 'topology_template',
                                               'node_templates')
     if node_templates:
-        for node_template_name, node_template in node_templates.iteritems():
-            model.node_templates[node_template_name] = create_node_template(context, node_template)
+        for node_template in node_templates.itervalues():
+            node_template_model = create_node_template_model(context, model, node_template)
+            model.node_templates[node_template_model.name] = node_template_model
+        for node_template in node_templates.itervalues():
+            fix_node_template_model(context, model, node_template)
 
+    # Group templates
     groups = context.presentation.get('service_template', 'topology_template', 'groups')
     if groups:
-        for group_name, group in groups.iteritems():
-            model.group_templates[group_name] = create_group_template(context, group)
+        for group in groups.itervalues():
+            group_template_model = create_group_template_model(context, model, group)
+            model.group_templates[group_template_model.name] = group_template_model
 
+    # Policy templates
     policies = context.presentation.get('service_template', 'topology_template', 'policies')
     if policies:
-        for policy_name, policy in policies.iteritems():
-            model.policy_templates[policy_name] = create_policy_template(context, policy)
+        for policy in policies.itervalues():
+            policy_template_model = create_policy_template_model(context, model, policy)
+            model.policy_templates[policy_template_model.name] = policy_template_model
 
+    # Substitution template
     substitution_mappings = context.presentation.get('service_template', 'topology_template',
                                                      'substitution_mappings')
-    if substitution_mappings is not None:
-        substitution_template = SubstitutionTemplate(substitution_mappings.node_type)
-        capabilities = substitution_mappings.capabilities
-        if capabilities:
-            for mapped_capability_name, capability in capabilities.iteritems():
-                substitution_template.capability_templates[mapped_capability_name] = \
-                    MappingTemplate(mapped_capability_name, capability.node_template,
-                                    capability.capability)
-        requirements = substitution_mappings.requirements
-        if requirements:
-            for mapped_requirement_name, requirement in requirements.iteritems():
-                substitution_template.requirement_templates[mapped_requirement_name] = \
-                    MappingTemplate(mapped_requirement_name, requirement.node_template,
-                                    requirement.requirement)
-        model.substitution_template = substitution_template
+    if substitution_mappings:
+        model.substitution_template = create_substitution_template_model(context, model,
+                                                                         substitution_mappings)
 
     return model
 
-def create_node_template(context, node_template):
+
+def create_metadata_models(context, service_template, metadata):
+    service_template.meta_data['template_name'] = Metadata(name='template_name',
+                                                           value=metadata.template_name)
+    service_template.meta_data['template_author'] = Metadata(name='template_author',
+                                                             value=metadata.template_author)
+    service_template.meta_data['template_version'] = Metadata(name='template_version',
+                                                              value=metadata.template_version)
+    custom = metadata.custom
+    if custom:
+        for name, value in custom.iteritems():
+            service_template.meta_data[name] = Metadata(name=name,
+                                                        value=value)
+
+
+def create_node_template_model(context, service_template, node_template):
     node_type = node_template._get_type(context)
-    model = NodeTemplate(name=node_template._name, type_name=node_type._name)
+    node_type = service_template.node_types.get_descendant(node_type._name)
+    model = NodeTemplate(name=node_template._name,
+                         type=node_type)
+    
+    model.default_instances = 1
+    model.min_instances = 0
 
     if node_template.description:
         model.description = node_template.description.value
 
-    create_properties_from_values(model.properties, node_template._get_property_values(context))
-    create_interface_templates(context, model.interface_templates,
-                               node_template._get_interfaces(context))
+    create_parameter_models_from_values(model.properties,
+                                        node_template._get_property_values(context))
+    create_interface_template_models(context, service_template, model.interface_templates,
+                                     node_template._get_interfaces(context))
 
     artifacts = node_template._get_artifacts(context)
     if artifacts:
         for artifact_name, artifact in artifacts.iteritems():
-            model.artifact_templates[artifact_name] = create_artifact_template(context, artifact)
-
-    requirements = node_template._get_requirements(context)
-    if requirements:
-        for _, requirement in requirements:
-            model.requirement_templates.append(create_requirement_template(context, requirement))
+            model.artifact_templates[artifact_name] = \
+                create_artifact_template_model(context, service_template, artifact)
 
     capabilities = node_template._get_capabilities(context)
     if capabilities:
         for capability_name, capability in capabilities.iteritems():
-            model.capability_templates[capability_name] = create_capability_template(context,
-                                                                                     capability)
+            model.capability_templates[capability_name] = \
+                create_capability_template_model(context, service_template, capability)
 
-    create_node_filter_constraint_lambdas(context, node_template.node_filter,
-                                          model.target_node_template_constraints)
+    if model.target_node_template_constraints:
+        model.target_node_template_constraints = []
+        create_node_filter_constraint_lambdas(context, node_template.node_filter,
+                                              model.target_node_template_constraints)
 
     return model
 
-def create_interface_template(context, interface):
-    interface_type = interface._get_type(context)
-    model = InterfaceTemplate(name=interface._name, type_name=interface_type._name)
 
-    if interface_type.description:
-        model.description = interface_type.description.value
-
-    inputs = interface.inputs
-    if inputs:
-        for input_name, the_input in inputs.iteritems():
-            model.inputs[input_name] = Parameter(the_input.value.type, the_input.value.value,
-                                                 the_input.value.description)
-
-    operations = interface.operations
-    if operations:
-        for operation_name, operation in operations.iteritems():
-            model.operation_templates[operation_name] = create_operation_template(context,
-                                                                                  operation)
+def fix_node_template_model(context, service_template, node_template):
+    # Requirements have to be created after all node templates have been created, because
+    # requirements might reference another node template
+    model = service_template.node_templates[node_template._name]
+    requirements = node_template._get_requirements(context)
+    if requirements:
+        for _, requirement in requirements:
+            model.requirement_templates.append(create_requirement_template_model(context,
+                                                                                 service_template,
+                                                                                 requirement))
 
-    return model if model.operation_templates else None
 
-def create_operation_template(context, operation): # pylint: disable=unused-argument
-    model = OperationTemplate(name=operation._name)
+def create_group_template_model(context, service_template, group):
+    group_type = group._get_type(context)
+    group_type = service_template.group_types.get_descendant(group_type._name)
+    model = GroupTemplate(name=group._name,
+                          type=group_type)
 
-    if operation.description:
-        model.description = operation.description.value
+    if group.description:
+        model.description = group.description.value
 
-    implementation = operation.implementation
-    if implementation is not None:
-        model.implementation = implementation.primary
-        dependencies = implementation.dependencies
-        if dependencies is not None:
-            model.dependencies = dependencies
+    create_parameter_models_from_values(model.properties, group._get_property_values(context))
+    create_interface_template_models(context, service_template, model.interface_templates,
+                                     group._get_interfaces(context))
 
-    inputs = operation.inputs
-    if inputs:
-        for input_name, the_input in inputs.iteritems():
-            model.inputs[input_name] = Parameter(the_input.value.type, the_input.value.value,
-                                                 the_input.value.description)
+    members = group.members
+    if members:
+        for member in members:
+            node_template = service_template.node_templates[member]
+            assert node_template
+            model.node_templates.append(node_template)
 
     return model
 
-def create_artifact_template(context, artifact):
-    model = ArtifactTemplate(name=artifact._name, type_name=artifact.type,
-                             source_path=artifact.file)
 
-    if artifact.description:
-        model.description = artifact.description.value
+def create_policy_template_model(context, service_template, policy):
+    policy_type = policy._get_type(context)
+    policy_type = service_template.policy_types.get_descendant(policy_type._name)
+    model = PolicyTemplate(name=policy._name,
+                           type=policy_type)
 
-    model.target_path = artifact.deploy_path
+    if policy.description:
+        model.description = policy.description.value
 
-    repository = artifact._get_repository(context)
-    if repository is not None:
-        model.repository_url = repository.url
-        credential = repository._get_credential(context)
-        if credential:
-            for k, v in credential.iteritems():
-                model.repository_credential[k] = v
+    create_parameter_models_from_values(model.properties, policy._get_property_values(context))
 
-    create_properties_from_values(model.properties, artifact._get_property_values(context))
+    node_templates, groups = policy._get_targets(context)
+    if node_templates:
+        for target in node_templates:
+            node_template = service_template.node_templates[target._name]
+            assert node_template
+            model.node_templates.append(node_template)
+    if groups:
+        for target in groups:
+            group_template = service_template.group_templates[target._name]
+            assert group_template
+            model.group_templates.append(group_template)
 
     return model
 
-def create_requirement_template(context, requirement):
+
+def create_requirement_template_model(context, service_template, requirement):
     model = {'name': requirement._name}
 
     node, node_variant = requirement._get_node(context)
     if node is not None:
         if node_variant == 'node_type':
-            model['target_node_type_name'] = node._name
+            node_type = service_template.node_types.get_descendant(node._name)
+            model['target_node_type'] = node_type
         else:
-            model['target_node_template_name'] = node._name
+            node_template = service_template.node_templates[node._name]
+            model['target_node_template'] = node_template
 
     capability, capability_variant = requirement._get_capability(context)
     if capability is not None:
         if capability_variant == 'capability_type':
-            model['target_capability_type_name'] = capability._name
+            capability_type = \
+                service_template.capability_types.get_descendant(capability._name)
+            model['target_capability_type'] = capability_type
         else:
             model['target_capability_name'] = capability._name
 
     model = RequirementTemplate(**model)
 
-    create_node_filter_constraint_lambdas(context, requirement.node_filter,
-                                          model.target_node_template_constraints)
+    if model.target_node_template_constraints:
+        model.target_node_template_constraints = []
+        create_node_filter_constraint_lambdas(context, requirement.node_filter,
+                                              model.target_node_template_constraints)
 
     relationship = requirement.relationship
     if relationship is not None:
-        model.relationship_template = create_relationship_template(context, relationship)
+        model.relationship_template = \
+            create_relationship_template_model(context, service_template, relationship)
+        model.relationship_template.name = requirement._name
 
     return model
 
-def create_relationship_type(context, relationship_type): # pylint: disable=unused-argument
-    return RelationshipType(relationship_type._name)
-
-def create_policy_type(context, policy_type): # pylint: disable=unused-argument
-    return PolicyType(policy_type._name)
 
-def create_relationship_template(context, relationship):
+def create_relationship_template_model(context, service_template, relationship):
     relationship_type, relationship_type_variant = relationship._get_type(context)
     if relationship_type_variant == 'relationship_type':
-        model = RelationshipTemplate(type_name=relationship_type._name)
+        relationship_type = service_template.relationship_types.get_descendant(
+            relationship_type._name)
+        model = RelationshipTemplate(type=relationship_type)
     else:
         relationship_template = relationship_type
         relationship_type = relationship_template._get_type(context)
-        model = RelationshipTemplate(type_name=relationship_type._name,
-                                     template_name=relationship_template._name)
+        relationship_type = service_template.relationship_types.get_descendant(
+            relationship_type._name)
+        model = RelationshipTemplate(type=relationship_type)
         if relationship_template.description:
             model.description = relationship_template.description.value
 
-    create_properties_from_assignments(model.properties, relationship.properties)
-    create_interface_templates(context, model.source_interface_templates, relationship.interfaces)
+    create_parameter_models_from_assignments(model.properties, relationship.properties)
+    create_interface_template_models(context, service_template, model.interface_templates,
+                                     relationship.interfaces)
 
     return model
 
-def create_capability_template(context, capability):
+
+def create_capability_template_model(context, service_template, capability):
     capability_type = capability._get_type(context)
-    model = CapabilityTemplate(name=capability._name, type_name=capability_type._name)
+    capability_type = service_template.capability_types.get_descendant(capability_type._name)
+    model = CapabilityTemplate(name=capability._name,
+                               type=capability_type)
 
     capability_definition = capability._get_definition(context)
     if capability_definition.description:
@@ -267,51 +311,154 @@ def create_capability_template(context, capability):
 
     valid_source_types = capability_definition.valid_source_types
     if valid_source_types:
-        model.valid_source_node_type_names = valid_source_types
+        for valid_source_type in valid_source_types:
+            # TODO: handle shortcut type names
+            node_type = service_template.node_types.get_descendant(valid_source_type)
+            model.valid_source_node_types.append(node_type)
 
-    create_properties_from_assignments(model.properties, capability.properties)
+    create_parameter_models_from_assignments(model.properties, capability.properties)
 
     return model
 
-def create_group_template(context, group):
-    group_type = group._get_type(context)
-    model = GroupTemplate(name=group._name, type_name=group_type._name)
 
-    if group.description:
-        model.description = group.description.value
+def create_interface_template_model(context, service_template, interface):
+    interface_type = interface._get_type(context)
+    interface_type = service_template.interface_types.get_descendant(interface_type._name)
+    model = InterfaceTemplate(name=interface._name,
+                              type=interface_type)
 
-    create_properties_from_values(model.properties, group._get_property_values(context))
-    create_interface_templates(context, model.interface_templates, group._get_interfaces(context))
+    if interface_type.description:
+        model.description = interface_type.description
 
-    members = group.members
-    if members:
-        for member in members:
-            model.member_node_template_names.append(member)
+    inputs = interface.inputs
+    if inputs:
+        for input_name, the_input in inputs.iteritems():
+            model.inputs[input_name] = Parameter(name=input_name,
+                                                 type_name=the_input.value.type,
+                                                 value=the_input.value.value,
+                                                 description=the_input.value.description)
+
+    operations = interface.operations
+    if operations:
+        for operation_name, operation in operations.iteritems():
+            model.operation_templates[operation_name] = \
+                create_operation_template_model(context, service_template, operation)
+
+    return model if model.operation_templates else None
+
+
+def create_operation_template_model(context, service_template, operation): # pylint: disable=unused-argument
+    model = OperationTemplate(name=operation._name)
+
+    if operation.description:
+        model.description = operation.description.value
+
+    implementation = operation.implementation
+    if (implementation is not None) and operation.implementation.primary:
+        model.plugin_specification, model.implementation = \
+            parse_implementation_string(context, service_template, operation.implementation.primary)
+
+        dependencies = implementation.dependencies
+        if dependencies is not None:
+            model.dependencies = dependencies
+
+    inputs = operation.inputs
+    if inputs:
+        for input_name, the_input in inputs.iteritems():
+            model.inputs[input_name] = Parameter(name=input_name,
+                                                 type_name=the_input.value.type,
+                                                 value=the_input.value.value,
+                                                 description=the_input.value.description)
 
     return model
 
-def create_policy_template(context, policy):
-    policy_type = policy._get_type(context)
-    model = PolicyTemplate(name=policy._name, type_name=policy_type._name)
 
-    if policy.description:
-        model.description = policy.description.value
+def create_artifact_template_model(context, service_template, artifact):
+    artifact_type = artifact._get_type(context)
+    artifact_type = service_template.artifact_types.get_descendant(artifact_type._name)
+    model = ArtifactTemplate(name=artifact._name,
+                             type=artifact_type,
+                             source_path=artifact.file)
+
+    if artifact.description:
+        model.description = artifact.description.value
 
-    create_properties_from_values(model.properties, policy._get_property_values(context))
+    model.target_path = artifact.deploy_path
 
-    node_templates, groups = policy._get_targets(context)
-    for node_template in node_templates:
-        model.target_node_template_names.append(node_template._name)
-    for group in groups:
-        model.target_group_template_names.append(group._name)
+    repository = artifact._get_repository(context)
+    if repository is not None:
+        model.repository_url = repository.url
+        credential = repository._get_credential(context)
+        if credential:
+            model.repository_credential = {}
+            for k, v in credential.iteritems():
+                model.repository_credential[k] = v
+
+    create_parameter_models_from_values(model.properties, artifact._get_property_values(context))
+
+    return model
+
+
+def create_substitution_template_model(context, service_template, substitution_mappings):
+    node_type = service_template.node_types.get_descendant(substitution_mappings.node_type)
+    model = SubstitutionTemplate(node_type=node_type)
+
+    capabilities = substitution_mappings.capabilities
+    if capabilities:
+        for mapped_capability_name, capability in capabilities.iteritems():
+            name = 'capability.' + mapped_capability_name
+            node_template_model = service_template.node_templates[capability.node_template]
+            capability_template_model = \
+                node_template_model.capability_templates[capability.capability]
+            model.mappings[name] = \
+                SubstitutionTemplateMapping(name=name,
+                                            node_template=node_template_model,
+                                            capability_template=capability_template_model)
+
+    requirements = substitution_mappings.requirements
+    if requirements:
+        for mapped_requirement_name, requirement in requirements.iteritems():
+            name = 'requirement.' + mapped_requirement_name
+            node_template_model = service_template.node_templates[requirement.node_template]
+            requirement_template_model = None
+            for a_model in node_template_model.requirement_templates:
+                if a_model.name == requirement.requirement:
+                    requirement_template_model = a_model
+                    break
+            model.mappings[name] = \
+                SubstitutionTemplateMapping(name=name,
+                                            node_template=node_template_model,
+                                            requirement_template=requirement_template_model)
 
     return model
 
+
+def create_plugin_specification_model(context, policy):
+    properties = policy.properties
+
+    def get(name):
+        prop = properties.get(name)
+        return prop.value if prop is not None else None
+
+    model = PluginSpecification(name=policy._name,
+                                archive_name=get('archive_name') or '',
+                                distribution=get('distribution'),
+                                distribution_release=get('distribution_release'),
+                                distribution_version=get('distribution_version'),
+                                package_name=get('package_name') or '',
+                                package_source=get('package_source'),
+                                package_version=get('package_version'),
+                                supported_platform=get('supported_platform'),
+                                supported_py_versions=get('supported_py_versions'))
+
+    return model
+
+
 #
 # Utils
 #
 
-def create_types(context, root, types, normalize=None):
+def create_types(context, root, types):
     if types is None:
         return
 
@@ -325,39 +472,49 @@ def create_types(context, root, types, normalize=None):
         for name, the_type in types.iteritems():
             if root.get_descendant(name) is None:
                 parent_type = the_type._get_parent(context)
-                if normalize:
-                    model = normalize(context, the_type)
-                else:
-                    model = Type(the_type._name)
+                model = Type(name=the_type._name,
+                             role=the_type._get_extension('role'))
                 if the_type.description:
                     model.description = the_type.description.value
-                model.role = the_type._get_extension('role')
                 if parent_type is None:
+                    model.parent = root
+                    model.variant = root.variant
                     root.children.append(model)
                 else:
                     container = root.get_descendant(parent_type._name)
                     if container is not None:
+                        model.parent = container
+                        model.variant = container.variant
                         container.children.append(model)
 
-def create_properties_from_values(properties, source_properties):
+
+def create_parameter_models_from_values(properties, source_properties):
     if source_properties:
         for property_name, prop in source_properties.iteritems():
-            properties[property_name] = Parameter(prop.type, prop.value, prop.description)
+            properties[property_name] = Parameter(name=property_name,
+                                                  type_name=prop.type,
+                                                  value=prop.value,
+                                                  description=prop.description)
 
-def create_properties_from_assignments(properties, source_properties):
+
+def create_parameter_models_from_assignments(properties, source_properties):
     if source_properties:
         for property_name, prop in source_properties.iteritems():
-            properties[property_name] = Parameter(prop.value.type, prop.value.value,
-                                                  prop.value.description)
+            properties[property_name] = Parameter(name=property_name,
+                                                  type_name=prop.value.type,
+                                                  value=prop.value.value,
+                                                  description=prop.value.description)
+
 
-def create_interface_templates(context, interfaces, source_interfaces):
+def create_interface_template_models(context, service_template, interfaces, source_interfaces):
     if source_interfaces:
         for interface_name, interface in source_interfaces.iteritems():
-            interface = create_interface_template(context, interface)
+            interface = create_interface_template_model(context, service_template, interface)
             if interface is not None:
                 interfaces[interface_name] = interface
 
-def create_node_filter_constraint_lambdas(context, node_filter, node_type_constraints):
+
+def create_node_filter_constraint_lambdas(context, node_filter, target_node_template_constraints):
     if node_filter is None:
         return
 
@@ -367,7 +524,7 @@ def create_node_filter_constraint_lambdas(context, node_filter, node_type_constr
             func = create_constraint_clause_lambda(context, node_filter, constraint_clause,
                                                    property_name, None)
             if func is not None:
-                node_type_constraints.append(func)
+                target_node_template_constraints.append(func)
 
     capabilities = node_filter.capabilities
     if capabilities is not None:
@@ -378,7 +535,8 @@ def create_node_filter_constraint_lambdas(context, node_filter, node_type_constr
                     func = create_constraint_clause_lambda(context, node_filter, constraint_clause,
                                                            property_name, capability_name)
                     if func is not None:
-                        node_type_constraints.append(func)
+                        target_node_template_constraints.append(func)
+
 
 def create_constraint_clause_lambda(context, node_filter, constraint_clause, property_name, # pylint: disable=too-many-return-statements
                                     capability_name):
@@ -497,3 +655,23 @@ def create_constraint_clause_lambda(context, node_filter, constraint_clause, pro
         return pattern
 
     return None
+
+
+def parse_implementation_string(context, service_template, implementation):
+    if not implementation:
+        return None, ''
+
+    index = implementation.find('>')
+    if index == -1:
+        return None, implementation
+    plugin_name = implementation[:index].strip()
+    
+    if plugin_name == 'execution':
+        plugin_specification = None
+    else:
+        plugin_specification = service_template.plugin_specifications.get(plugin_name)
+        if plugin_specification is None:
+            raise ValueError('unknown plugin: "{0}"'.format(plugin_name))
+
+    implementation = implementation[index+1:].strip()
+    return plugin_specification, implementation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/extensions/aria_extension_tosca/simple_v1_0/presenter.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/presenter.py b/extensions/aria_extension_tosca/simple_v1_0/presenter.py
index 0809014..eee5769 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/presenter.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/presenter.py
@@ -19,7 +19,7 @@ from aria.parser.presentation import Presenter
 
 from .functions import (Concat, Token, GetInput, GetProperty, GetAttribute, GetOperationOutput,
                         GetNodesOfType, GetArtifact)
-from .modeling import create_service_model
+from .modeling import create_service_template_model
 from .templates import ServiceTemplate
 
 class ToscaSimplePresenter1_0(Presenter): # pylint: disable=invalid-name
@@ -75,5 +75,5 @@ class ToscaSimplePresenter1_0(Presenter): # pylint: disable=invalid-name
         return FrozenList(import_locations) if import_locations else EMPTY_READ_ONLY_LIST
 
     @cachedmethod
-    def _get_service_model(self, context): # pylint: disable=no-self-use
-        return create_service_model(context)
+    def _get_model(self, context): # pylint: disable=no-self-use
+        return create_service_template_model(context)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/end2end/test_orchestrator.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_orchestrator.py b/tests/end2end/test_orchestrator.py
index f930577..7b8dc97 100644
--- a/tests/end2end/test_orchestrator.py
+++ b/tests/end2end/test_orchestrator.py
@@ -17,9 +17,9 @@ import sys
 
 from aria.orchestrator.runner import Runner
 from aria.orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
-from aria.parser.modeling.storage import initialize_storage
 from aria.utils.imports import import_fullname
 from aria.utils.collections import OrderedDict
+from aria.cli.dry import convert_to_dry
 
 from tests.parser.service_templates import consume_node_cellar
 
@@ -38,23 +38,25 @@ def test_custom():
 def _workflow(workflow_name):
     context, _ = consume_node_cellar()
 
+    convert_to_dry(context.modeling.instance)
+
     # TODO: this logic will eventually stabilize and be part of the ARIA API,
     # likely somewhere in aria.orchestrator.workflows
     if workflow_name in BUILTIN_WORKFLOWS:
         workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.' + workflow_name)
         inputs = {}
     else:
-        policy = context.modeling.instance.policies[workflow_name]
-        sys.path.append(policy.properties['implementation'].value)
-
-        workflow_fn = import_fullname(policy.properties['function'].value)
+        workflow = context.modeling.instance.policies[workflow_name]
+        sys.path.append(workflow.properties['implementation'].value)
+        workflow_fn = import_fullname(workflow.properties['function'].value)
         inputs = OrderedDict([
-            (k, v.value) for k, v in policy.properties.iteritems()
+            (k, v.value) for k, v in workflow.properties.iteritems()
             if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES
         ])
 
     def _initialize_storage(model_storage):
-        initialize_storage(context, model_storage, 1)
+        context.modeling.store(model_storage)
 
-    runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage, 1)
+    runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
+                    lambda: context.modeling.instance.id)
     runner.run()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/end2end/test_tosca_simple_v1_0.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_tosca_simple_v1_0.py b/tests/end2end/test_tosca_simple_v1_0.py
index f2a41ce..4658fc3 100644
--- a/tests/end2end/test_tosca_simple_v1_0.py
+++ b/tests/end2end/test_tosca_simple_v1_0.py
@@ -71,7 +71,7 @@ def test_use_case_network_4():
 
 
 def test_use_case_webserver_dbms_1():
-    consume_use_case('webserver-dbms-1', 'model')
+    consume_use_case('webserver-dbms-1', 'template')
 
 
 def test_use_case_webserver_dbms_2():
@@ -83,7 +83,7 @@ def test_use_case_multi_tier_1():
 
 
 def test_use_case_container_1():
-    consume_use_case('container-1', 'model')
+    consume_use_case('container-1', 'template')
 
 
 # NodeCellar
@@ -101,7 +101,7 @@ def test_node_cellar_presentation():
 
 
 def test_node_cellar_model():
-    consume_node_cellar('model')
+    consume_node_cellar('template')
 
 
 def test_node_cellar_types():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index cf1a32d..3de3133 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -38,13 +38,13 @@ def simple(tmpdir, inmemory=False, context_kwargs=None):
         api_kwargs=dict(directory=os.path.join(tmpdir, 'resources'))
     )
 
-    service_instance_id = create_simple_topology_two_nodes(model_storage)
+    service_id = create_simple_topology_two_nodes(model_storage)
 
     final_kwargs = dict(
         name='simple_context',
         model_storage=model_storage,
         resource_storage=resource_storage,
-        service_instance_id=service_instance_id,
+        service_id=service_id,
         workflow_name=models.WORKFLOW_NAME,
         task_max_attempts=models.TASK_MAX_ATTEMPTS,
         task_retry_interval=models.TASK_RETRY_INTERVAL

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 301fc01..bf43a75 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -15,132 +15,185 @@
 
 from datetime import datetime
 
-from aria.storage.modeling import model
+from aria.modeling import models
 from . import operations
 
-DEPLOYMENT_NAME = 'test_deployment_id'
-BLUEPRINT_NAME = 'test_blueprint_id'
-WORKFLOW_NAME = 'test_workflow_id'
-EXECUTION_NAME = 'test_execution_id'
+SERVICE_NAME = 'test_service_name'
+SERVICE_TEMPLATE_NAME = 'test_service_template_name'
+WORKFLOW_NAME = 'test_workflow_name'
+EXECUTION_NAME = 'test_execution_name'
 TASK_RETRY_INTERVAL = 1
 TASK_MAX_ATTEMPTS = 1
 
+DEPENDENCY_NODE_TEMPLATE_NAME = 'dependency_node_template'
 DEPENDENCY_NODE_NAME = 'dependency_node'
-DEPENDENCY_NODE_INSTANCE_NAME = 'dependency_node_instance'
+DEPENDENT_NODE_TEMPLATE_NAME = 'dependent_node_template'
 DEPENDENT_NODE_NAME = 'dependent_node'
-DEPENDENT_NODE_INSTANCE_NAME = 'dependent_node_instance'
-RELATIONSHIP_NAME = 'relationship'
-RELATIONSHIP_INSTANCE_NAME = 'relationship_instance'
 
 
-def get_dependency_node(deployment):
-    return model.NodeTemplate(
-        name=DEPENDENCY_NODE_NAME,
-        type_name='test_node_type',
-        type_hierarchy=[],
-        default_instances=1,
-        min_instances=1,
-        max_instances=1,
-        service_template=deployment.service_template,
+def create_service_template():
+    now = datetime.now()
+    return models.ServiceTemplate(
+        name=SERVICE_TEMPLATE_NAME,
+        description=None,
+        created_at=now,
+        updated_at=now,
+        main_file_name='main_file_name',
+        node_types=models.Type(variant='node', name='test_node_type'),
+        group_types=models.Type(variant='group', name='test_group_type'),
+        policy_types=models.Type(variant='policy', name='test_policy_type'),
+        relationship_types=models.Type(variant='relationship', name='test_relationship_type'),
+        capability_types=models.Type(variant='capability', name='test_capability_type'),
+        artifact_types=models.Type(variant='artifact', name='test_artifact_type'),
+        interface_types=models.Type(variant='interface', name='test_interface_type')
     )
 
 
-def get_dependency_node_instance(dependency_node, deployment):
-    return model.Node(
-        name=DEPENDENCY_NODE_INSTANCE_NAME,
-        service_instance=deployment,
-        runtime_properties={'ip': '1.1.1.1'},
-        node_template=dependency_node,
-        state='',
-        scaling_groups=[]
+def create_service(service_template):
+    now = datetime.utcnow()
+    return models.Service(
+        name=SERVICE_NAME,
+        service_template=service_template,
+        description='',
+        created_at=now,
+        updated_at=now,
+        permalink='',
+        scaling_groups={},
     )
 
 
-def get_relationship(target):
-    requirement_template = model.RequirementTemplate(target_node_template_name=target.name)
-    capability_template = model.CapabilityTemplate()
-
-    return requirement_template, capability_template
+def create_dependency_node_template(service_template):
+    node_type = service_template.node_types.get_descendant('test_node_type')
+    capability_type = service_template.capability_types.get_descendant('test_capability_type')
 
+    capability_template = models.CapabilityTemplate(
+        name='capability',
+        type=capability_type
+    )
 
-def get_relationship_instance(source_instance, target_instance):
-    return model.Relationship(
-        target_node=target_instance,
-        source_node=source_instance,
+    node_template = models.NodeTemplate(
+        name=DEPENDENCY_NODE_TEMPLATE_NAME,
+        type=node_type,
+        capability_templates=_dictify(capability_template),
+        default_instances=1,
+        min_instances=1,
+        max_instances=1,
+        service_template=service_template
     )
+    service_template.node_templates[node_template.name] = node_template
+    return node_template
 
 
-def get_dependent_node(deployment, requirement_template, capability_template):
-    operation_templates = [model.OperationTemplate(implementation=op,
-                                                   service_template=deployment.service_template)
-                           for op in operations.NODE_OPERATIONS]
-    interface_template = model.InterfaceTemplate(operation_templates=operation_templates)
+def create_dependent_node_template(service_template, dependency_node_template):
+    the_type = service_template.node_types.get_descendant('test_node_type')
 
-    return model.NodeTemplate(
-        name=DEPENDENT_NODE_NAME,
-        type_name='test_node_type',
-        type_hierarchy=[],
+    operation_templates = dict((op, models.OperationTemplate(
+        name=op,
+        implementation='test'))
+                               for _, op in operations.NODE_OPERATIONS)
+    interface_template = models.InterfaceTemplate(
+        type=service_template.interface_types.get_descendant('test_interface_type'),
+        operation_templates=operation_templates)
+
+    requirement_template = models.RequirementTemplate(
+        name='requirement',
+        target_node_template=dependency_node_template
+    )
+
+    node_template = models.NodeTemplate(
+        name=DEPENDENT_NODE_TEMPLATE_NAME,
+        type=the_type,
         default_instances=1,
         min_instances=1,
         max_instances=1,
-        service_template=deployment.service_template,
-        interface_templates=[interface_template],
+        interface_templates=_dictify(interface_template),
         requirement_templates=[requirement_template],
-        capability_templates=[capability_template],
+        service_template=service_template
+    )
+    service_template.node_templates[node_template.name] = node_template
+    return node_template
+
+
+def create_dependency_node(dependency_node_template, service):
+    node = models.Node(
+        name=DEPENDENCY_NODE_NAME,
+        type=dependency_node_template.type,
+        runtime_properties={'ip': '1.1.1.1'},
+        version=None,
+        node_template=dependency_node_template,
+        state='',
+        scaling_groups=[],
+        service=service
     )
+    service.nodes[node.name] = node
+    return node
 
 
-def get_dependent_node_instance(dependent_node, deployment):
-    return model.Node(
-        name=DEPENDENT_NODE_INSTANCE_NAME,
-        service_instance=deployment,
+def create_dependent_node(dependent_node_template, service):
+    node = models.Node(
+        name=DEPENDENT_NODE_NAME,
+        type=dependent_node_template.type,
         runtime_properties={},
-        node_template=dependent_node,
+        version=None,
+        node_template=dependent_node_template,
         state='',
         scaling_groups=[],
+        service=service
     )
+    service.nodes[node.name] = node
+    return node
 
 
-def get_blueprint():
-    now = datetime.now()
-    return model.ServiceTemplate(
-        plan={},
-        name=BLUEPRINT_NAME,
-        description=None,
-        created_at=now,
-        updated_at=now,
-        main_file_name='main_file_name'
+def create_relationship(source, target):
+    return models.Relationship(
+        source_node=source,
+        target_node=target
     )
 
 
-def get_execution(deployment):
-    return model.Execution(
-        service_instance=deployment,
-        status=model.Execution.STARTED,
-        workflow_name=WORKFLOW_NAME,
-        started_at=datetime.utcnow(),
-        parameters=None
+def create_interface_template(service_template, interface_name, operation_name,
+                              operation_kwargs=None, interface_kwargs=None):
+    the_type = service_template.interface_types.get_descendant('test_interface_type')
+    operation_template = models.OperationTemplate(
+        name=operation_name,
+        **(operation_kwargs or {})
+    )
+    return models.InterfaceTemplate(
+        type=the_type,
+        operation_templates=_dictify(operation_template),
+        name=interface_name,
+        **(interface_kwargs or {})
     )
 
 
-def get_deployment(blueprint):
-    now = datetime.utcnow()
-    return model.ServiceInstance(
-        name=DEPLOYMENT_NAME,
-        service_template=blueprint,
-        description='',
-        created_at=now,
-        updated_at=now,
-        workflows={},
-        permalink='',
-        policy_triggers={},
-        policy_types={},
-        scaling_groups={},
+def create_interface(service, interface_name, operation_name, operation_kwargs=None,
+                     interface_kwargs=None):
+    the_type = service.service_template.interface_types.get_descendant('test_interface_type')
+    operation = models.Operation(
+        name=operation_name,
+        **(operation_kwargs or {})
+    )
+    return models.Interface(
+        type=the_type,
+        operations=_dictify(operation),
+        name=interface_name,
+        **(interface_kwargs or {})
+    )
+
+
+def create_execution(service):
+    return models.Execution(
+        service=service,
+        status=models.Execution.STARTED,
+        workflow_name=WORKFLOW_NAME,
+        started_at=datetime.utcnow(),
+        parameters=None
     )
 
 
-def get_plugin(package_name='package', package_version='0.1'):
-    return model.Plugin(
+def create_plugin(package_name='package', package_version='0.1'):
+    return models.Plugin(
+        name='test_plugin',
         archive_name='archive_name',
         distribution='distribution',
         distribution_release='dist_release',
@@ -155,26 +208,20 @@ def get_plugin(package_name='package', package_version='0.1'):
     )
 
 
-def get_interface_template(operation_name, operation_kwargs=None, interface_kwargs=None):
-    operation_template = model.OperationTemplate(
-        name=operation_name,
-        **(operation_kwargs or {})
-
-    )
-    return model.InterfaceTemplate(
-        operation_templates=[operation_template],
-        name=operation_name.rsplit('.', 1)[0],
-        **(interface_kwargs or {})
+def create_plugin_specification(package_name='package', package_version='0.1'):
+    return models.PluginSpecification(
+        name='test_plugin',
+        archive_name='archive_name',
+        distribution='distribution',
+        distribution_release='dist_release',
+        distribution_version='dist_version',
+        package_name=package_name,
+        package_source='source',
+        package_version=package_version,
+        supported_platform='any',
+        supported_py_versions=['python27']
     )
 
 
-def get_interface(operation_name,
-                  operation_kwargs=None,
-                  interface_kwargs=None,
-                  edge=None):
-    operation = model.Operation(name=operation_name, **(operation_kwargs or {}))
-    interface_name = operation_name.rsplit('.', 1)[0]
-    return model.Interface(operations=[operation],
-                           name=interface_name,
-                           edge=edge,
-                           **(interface_kwargs or {}))
+def _dictify(item):
+    return dict(((item.name, item),))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/mock/operations.py
----------------------------------------------------------------------
diff --git a/tests/mock/operations.py b/tests/mock/operations.py
index 5495f6a..c752a8e 100644
--- a/tests/mock/operations.py
+++ b/tests/mock/operations.py
@@ -14,46 +14,46 @@
 # limitations under the License.
 
 NODE_OPERATIONS_INSTALL = [
-    'tosca.interfaces.node.lifecycle.Standard.create',
-    'tosca.interfaces.node.lifecycle.Standard.configure',
-    'tosca.interfaces.node.lifecycle.Standard.start',
+    ('Standard', 'create'),
+    ('Standard', 'configure'),
+    ('Standard', 'start')
 ]
 
 NODE_OPERATIONS_UNINSTALL = [
-    'tosca.interfaces.node.lifecycle.Standard.stop',
-    'tosca.interfaces.node.lifecycle.Standard.delete',
+    ('Standard', 'stop'),
+    ('Standard', 'delete')
 ]
 
 NODE_OPERATIONS = NODE_OPERATIONS_INSTALL + NODE_OPERATIONS_UNINSTALL
 
 RELATIONSHIP_OPERATIONS_INSTALL = [
-    'tosca.interfaces.relationship.Configure.pre_configure_source',
-    'tosca.interfaces.relationship.Configure.pre_configure_target',
-    'tosca.interfaces.relationship.Configure.add_source',
-    'tosca.interfaces.relationship.Configure.add_target',
+    ('Configure', 'pre_configure_source'),
+    ('Configure', 'pre_configure_target'),
+    ('Configure', 'add_source'),
+    ('Configure', 'add_target')
 ]
 
 RELATIONSHIP_OPERATIONS_UNINSTALL = [
-    'tosca.interfaces.relationship.Configure.remove_target',
-    'tosca.interfaces.relationship.Configure.target_changed',
+    ('Configure', 'remove_target'),
+    ('Configure', 'target_changed')
 ]
 
 RELATIONSHIP_OPERATIONS = RELATIONSHIP_OPERATIONS_INSTALL + RELATIONSHIP_OPERATIONS_UNINSTALL
 
 OPERATIONS_INSTALL = [
-    'tosca.interfaces.node.lifecycle.Standard.create',
-    'tosca.interfaces.relationship.Configure.pre_configure_source',
-    'tosca.interfaces.relationship.Configure.pre_configure_target',
-    'tosca.interfaces.node.lifecycle.Standard.configure',
-    'tosca.interfaces.node.lifecycle.Standard.start',
-    'tosca.interfaces.relationship.Configure.add_source',
-    'tosca.interfaces.relationship.Configure.add_target',
-    'tosca.interfaces.relationship.Configure.target_changed',
+    ('Standard', 'create'),
+    ('Configure', 'pre_configure_source'),
+    ('Configure', 'pre_configure_target'),
+    ('Standard', 'configure'),
+    ('Standard', 'start'),
+    ('Configure', 'add_source'),
+    ('Configure', 'add_target'),
+    ('Configure', 'target_changed')
 ]
 
 OPERATIONS_UNINSTALL = [
-    'tosca.interfaces.relationship.Configure.remove_target',
-    'tosca.interfaces.relationship.Configure.target_changed',
-    'tosca.interfaces.node.lifecycle.Standard.stop',
-    'tosca.interfaces.node.lifecycle.Standard.delete',
+    ('Configure', 'remove_target'),
+    ('Configure', 'target_changed'),
+    ('Standard', 'stop'),
+    ('Standard', 'delete')
 ]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/mock/topology.py
----------------------------------------------------------------------
diff --git a/tests/mock/topology.py b/tests/mock/topology.py
index d3e8b7b..7ccc885 100644
--- a/tests/mock/topology.py
+++ b/tests/mock/topology.py
@@ -13,74 +13,60 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from aria.storage.modeling import model
+from aria.modeling import models as aria_models
 
 from . import models
 
 
 def create_simple_topology_single_node(model_storage, create_operation):
-    service_template = models.get_blueprint()
-    model_storage.service_template.put(service_template)
-
-    service_instance = models.get_deployment(service_template)
-    model_storage.service_instance.put(service_instance)
+    service_template = models.create_service_template()
+    service = models.create_service(service_template)
 
-    node_template = models.get_dependency_node(service_instance)
-    node_template.interface_templates = [models.get_interface_template(
-        'tosca.interfaces.node.lifecycle.Standard.create',
+    node_template = models.create_dependency_node_template(service_template)
+    interface_template = models.create_interface_template(
+        service_template,
+        'Standard', 'create',
         operation_kwargs=dict(
             implementation=create_operation,
-            inputs=[model.Parameter(name='key', str_value='create', type='str'),
-                    model.Parameter(name='value', str_value=str(True), type='bool')]
-        )
-    )]
-    model_storage.node_template.put(node_template)
+            inputs={'key': aria_models.Parameter.wrap('key', 'create'),
+                    'value': aria_models.Parameter.wrap('value', True)})
+    )
+    node_template.interface_templates[interface_template.name] = interface_template                 # pylint: disable=unsubscriptable-object
 
-    node = models.get_dependency_node_instance(node_template, service_instance)
-    node.interfaces = [models.get_interface(
-        'tosca.interfaces.node.lifecycle.Standard.create',
+    node = models.create_dependency_node(node_template, service)
+    interface = models.create_interface(
+        service,
+        'Standard', 'create',
         operation_kwargs=dict(
             implementation=create_operation,
-            inputs=[model.Parameter(name='key', str_value='create', type='str'),
-                    model.Parameter(name='value', str_value=str(True), type='bool')])
-    )]
-    model_storage.node.put(node)
-
+            inputs={'key': aria_models.Parameter.wrap('key', 'create'),
+                    'value': aria_models.Parameter.wrap('value', True)})
+    )
+    node.interfaces[interface.name] = interface                                                     # pylint: disable=unsubscriptable-object
 
-def create_simple_topology_two_nodes(model_storage):
-    blueprint = models.get_blueprint()
-    model_storage.service_template.put(blueprint)
-    deployment = models.get_deployment(blueprint)
-    model_storage.service_instance.put(deployment)
+    model_storage.service_template.put(service_template)
+    model_storage.service.put(service)
 
-    #################################################################################
-    # Creating a simple deployment with node -> node as a graph
 
-    dependency_node = models.get_dependency_node(deployment)
-    model_storage.node_template.put(dependency_node)
-    storage_dependency_node = model_storage.node_template.get(dependency_node.id)
+def create_simple_topology_two_nodes(model_storage):
+    service_template = models.create_service_template()
+    service = models.create_service(service_template)
 
-    dependency_node_instance = models.get_dependency_node_instance(storage_dependency_node,
-                                                                   deployment)
-    model_storage.node.put(dependency_node_instance)
-    storage_dependency_node_instance = model_storage.node.get(dependency_node_instance.id)
+    # Creating a simple service with node -> node as a graph
 
-    req_template, cap_template = models.get_relationship(storage_dependency_node)
-    model_storage.requirement_template.put(req_template)
-    model_storage.capability_template.put(cap_template)
+    dependency_node_template = models.create_dependency_node_template(service_template)
+    dependent_node_template = models.create_dependent_node_template(service_template,
+                                                                    dependency_node_template)
 
-    dependent_node = models.get_dependent_node(deployment, req_template, cap_template)
-    model_storage.node_template.put(dependent_node)
-    storage_dependent_node = model_storage.node_template.get(dependent_node.id)
+    dependency_node = models.create_dependency_node(dependency_node_template, service)
+    dependent_node = models.create_dependent_node(dependent_node_template, service)
 
-    dependent_node_instance = models.get_dependent_node_instance(storage_dependent_node, deployment)
-    model_storage.node.put(dependent_node_instance)
-    storage_dependent_node_instance = model_storage.node.get(dependent_node_instance.id)
+    dependent_node.outbound_relationships.append(models.create_relationship(                        # pylint: disable=no-member
+        source=dependent_node,
+        target=dependency_node
+    ))
 
-    relationship_instance = models.get_relationship_instance(
-        target_instance=storage_dependency_node_instance,
-        source_instance=storage_dependent_node_instance
-    )
-    model_storage.relationship.put(relationship_instance)
+    model_storage.service_template.put(service_template)
+    model_storage.service.put(service)
 
-    return deployment.id
+    return service.id

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/tests/modeling/__init__.py b/tests/modeling/__init__.py
new file mode 100644
index 0000000..072ef54
--- /dev/null
+++ b/tests/modeling/__init__.py
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from sqlalchemy import (
+    Column,
+    Text,
+    Integer,
+)
+
+from aria.modeling import (
+    models,
+    types as modeling_types,
+    mixins
+)
+
+
+class MockModel(models.aria_declarative_base, mixins.ModelMixin): #pylint: disable=abstract-method
+    __tablename__ = 'mock_model'
+    model_dict = Column(modeling_types.Dict)
+    model_list = Column(modeling_types.List)
+    value = Column(Integer)
+    name = Column(Text)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/modeling/test_mixins.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_mixins.py b/tests/modeling/test_mixins.py
new file mode 100644
index 0000000..7795b57
--- /dev/null
+++ b/tests/modeling/test_mixins.py
@@ -0,0 +1,219 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+import sqlalchemy
+
+from aria.storage import (
+    ModelStorage,
+    sql_mapi
+)
+from aria import modeling
+from aria.modeling.exceptions import ValueFormatException
+
+from ..storage import (
+    release_sqlite_storage,
+    init_inmemory_model_storage
+)
+from . import MockModel
+from ..mock import (
+    models,
+    context as mock_context
+)
+
+
+@pytest.fixture
+def storage():
+    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
+                                initiator=init_inmemory_model_storage)
+    base_storage.register(MockModel)
+    yield base_storage
+    release_sqlite_storage(base_storage)
+
+
+@pytest.fixture(scope='module', autouse=True)
+def module_cleanup():
+    modeling.models.aria_declarative_base.metadata.remove(MockModel.__table__)                      # pylint: disable=no-member
+
+
+@pytest.fixture
+def context(tmpdir):
+    ctx = mock_context.simple(str(tmpdir))
+    yield ctx
+    release_sqlite_storage(ctx.model)
+
+
+def test_inner_dict_update(storage):
+    inner_dict = {'inner_value': 1}
+
+    mock_model = MockModel(model_dict={'inner_dict': inner_dict, 'value': 0})
+    storage.mock_model.put(mock_model)
+
+    storage_mm = storage.mock_model.get(mock_model.id)
+    assert storage_mm == mock_model
+
+    storage_mm.model_dict['inner_dict']['inner_value'] = 2
+    storage_mm.model_dict['value'] = -1
+    storage.mock_model.update(storage_mm)
+    storage_mm = storage.mock_model.get(storage_mm.id)
+
+    assert storage_mm.model_dict['inner_dict']['inner_value'] == 2
+    assert storage_mm.model_dict['value'] == -1
+
+
+def test_inner_list_update(storage):
+    mock_model = MockModel(model_list=[0, [1]])
+    storage.mock_model.put(mock_model)
+
+    storage_mm = storage.mock_model.get(mock_model.id)
+    assert storage_mm == mock_model
+
+    storage_mm.model_list[1][0] = 'new_inner_value'
+    storage_mm.model_list[0] = 'new_value'
+    storage.mock_model.update(storage_mm)
+    storage_mm = storage.mock_model.get(storage_mm.id)
+
+    assert storage_mm.model_list[1][0] == 'new_inner_value'
+    assert storage_mm.model_list[0] == 'new_value'
+
+
+def test_model_to_dict(context):
+    service = context.service
+    service = service.to_dict()
+
+    expected_keys = [
+        'description',
+        'created_at',
+        'permalink',
+        'scaling_groups',
+        'updated_at'
+    ]
+
+    for expected_key in expected_keys:
+        assert expected_key in service
+
+
+def test_relationship_model_ordering(context):
+    service = context.model.service.get_by_name(models.SERVICE_NAME)
+    source_node = context.model.node.get_by_name(models.DEPENDENT_NODE_NAME)
+    target_node = context.model.node.get_by_name(models.DEPENDENCY_NODE_NAME)
+
+    new_node_template = modeling.models.NodeTemplate(
+        name='new_node_template',
+        type=source_node.type,
+        default_instances=1,
+        min_instances=1,
+        max_instances=1,
+        service_template=service.service_template
+    )
+
+    new_node = modeling.models.Node(
+        name='new_node',
+        type=source_node.type,
+        runtime_properties={},
+        service=service,
+        version=None,
+        node_template=new_node_template,
+        state='',
+        scaling_groups=[]
+    )
+
+    source_node.outbound_relationships.append(modeling.models.Relationship(
+        source_node=source_node,
+        target_node=new_node,
+    ))
+
+    new_node.outbound_relationships.append(modeling.models.Relationship(                            # pylint: disable=no-member
+        source_node=new_node,
+        target_node=target_node,
+    ))
+
+    context.model.node_template.put(new_node_template)
+    context.model.node.put(new_node)
+    context.model.node.refresh(source_node)
+    context.model.node.refresh(target_node)
+
+    def flip_and_assert(node, direction):
+        """
+        Reversed the order of relationships and assert effects took place.
+        :param node: the node instance to operate on
+        :param direction: the type of relationships to flip (inbound/outbound)
+        :return:
+        """
+        assert direction in ('inbound', 'outbound')
+
+        relationships = getattr(node, direction + '_relationships')
+        assert len(relationships) == 2
+
+        reversed_relationship = list(reversed(relationships))
+        assert relationships != reversed_relationship
+
+        relationships[:] = reversed_relationship
+        context.model.node.update(node)
+        assert relationships == reversed_relationship
+
+    flip_and_assert(source_node, 'outbound')
+    flip_and_assert(target_node, 'inbound')
+
+
+class StrictClass(modeling.models.aria_declarative_base, modeling.mixins.ModelMixin):
+    __tablename__ = 'strict_class'
+
+    strict_dict = sqlalchemy.Column(modeling.types.StrictDict(basestring, basestring))
+    strict_list = sqlalchemy.Column(modeling.types.StrictList(basestring))
+
+
+def test_strict_dict():
+
+    strict_class = StrictClass()
+
+    def assert_strict(sc):
+        with pytest.raises(ValueFormatException):
+            sc.strict_dict = {'key': 1}
+
+        with pytest.raises(ValueFormatException):
+            sc.strict_dict = {1: 'value'}
+
+        with pytest.raises(ValueFormatException):
+            sc.strict_dict = {1: 1}
+
+    assert_strict(strict_class)
+    strict_class.strict_dict = {'key': 'value'}
+    assert strict_class.strict_dict == {'key': 'value'}
+
+    assert_strict(strict_class)
+    with pytest.raises(ValueFormatException):
+        strict_class.strict_dict['key'] = 1
+    with pytest.raises(ValueFormatException):
+        strict_class.strict_dict[1] = 'value'
+    with pytest.raises(ValueFormatException):
+        strict_class.strict_dict[1] = 1
+
+
+def test_strict_list():
+    strict_class = StrictClass()
+
+    def assert_strict(sc):
+        with pytest.raises(ValueFormatException):
+            sc.strict_list = [1]
+
+    assert_strict(strict_class)
+    strict_class.strict_list = ['item']
+    assert strict_class.strict_list == ['item']
+
+    assert_strict(strict_class)
+    with pytest.raises(ValueFormatException):
+        strict_class.strict_list[0] = 1

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/modeling/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_model_storage.py b/tests/modeling/test_model_storage.py
new file mode 100644
index 0000000..bb778d4
--- /dev/null
+++ b/tests/modeling/test_model_storage.py
@@ -0,0 +1,102 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from aria.storage import (
+    ModelStorage,
+    exceptions,
+    sql_mapi
+)
+from aria import (application_model_storage, modeling)
+from ..storage import (release_sqlite_storage, init_inmemory_model_storage)
+
+from . import MockModel
+
+
+@pytest.fixture
+def storage():
+    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
+                                initiator=init_inmemory_model_storage)
+    base_storage.register(MockModel)
+    yield base_storage
+    release_sqlite_storage(base_storage)
+
+
+@pytest.fixture(scope='module', autouse=True)
+def module_cleanup():
+    modeling.models.aria_declarative_base.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
+
+
+def test_storage_base(storage):
+    with pytest.raises(AttributeError):
+        storage.non_existent_attribute()
+
+
+def test_model_storage(storage):
+    mock_model = MockModel(value=0, name='model_name')
+    storage.mock_model.put(mock_model)
+
+    assert storage.mock_model.get_by_name('model_name') == mock_model
+
+    assert [mm_from_storage for mm_from_storage in storage.mock_model.iter()] == [mock_model]
+    assert [mm_from_storage for mm_from_storage in storage.mock_model] == [mock_model]
+
+    storage.mock_model.delete(mock_model)
+    with pytest.raises(exceptions.StorageError):
+        storage.mock_model.get(mock_model.id)
+
+
+def test_application_storage_factory():
+    storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                        initiator=init_inmemory_model_storage)
+
+    assert storage.service_template
+    assert storage.node_template
+    assert storage.group_template
+    assert storage.policy_template
+    assert storage.substitution_template
+    assert storage.substitution_template_mapping
+    assert storage.requirement_template
+    assert storage.relationship_template
+    assert storage.capability_template
+    assert storage.interface_template
+    assert storage.operation_template
+    assert storage.artifact_template
+
+    assert storage.service
+    assert storage.node
+    assert storage.group
+    assert storage.policy
+    assert storage.substitution
+    assert storage.substitution_mapping
+    assert storage.relationship
+    assert storage.capability
+    assert storage.interface
+    assert storage.operation
+    assert storage.artifact
+
+    assert storage.execution
+    assert storage.service_update
+    assert storage.service_update_step
+    assert storage.service_modification
+    assert storage.plugin
+    assert storage.task
+
+    assert storage.parameter
+    assert storage.type
+    assert storage.metadata
+
+    release_sqlite_storage(storage)


[02/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index b2b67fc..6f97952 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -23,7 +23,7 @@ from aria.orchestrator import (
     workflow,
     operation,
 )
-from aria.storage.modeling import model
+from aria.modeling import models
 from aria.orchestrator.workflows import (
     api,
     exceptions,
@@ -60,15 +60,19 @@ class BaseTest(object):
             max_attempts=None,
             retry_interval=None,
             ignore_failure=None):
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node.interfaces = [mock.models.get_interface(
-            'aria.interfaces.lifecycle.create',
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+        interface = mock.models.create_interface(
+            node.service,
+            'aria.interfaces.lifecycle',
+            'create',
             operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
                                                                                  func=func))
-        )]
-        return api.task.OperationTask.node(
-            instance=node,
-            name='aria.interfaces.lifecycle.create',
+        )
+        node.interfaces[interface.name] = interface
+        return api.task.OperationTask.for_node(
+            node=node,
+            interface_name='aria.interfaces.lifecycle',
+            operation_name='create',
             inputs=inputs,
             max_attempts=max_attempts,
             retry_interval=retry_interval,
@@ -147,7 +151,7 @@ class TestEngine(BaseTest):
         execution = workflow_context.execution
         assert execution.started_at <= execution.ended_at <= datetime.utcnow()
         assert execution.error is None
-        assert execution.status == model.Execution.TERMINATED
+        assert execution.status == models.Execution.TERMINATED
 
     def test_single_task_successful_execution(self, workflow_context, executor):
         @workflow
@@ -176,7 +180,7 @@ class TestEngine(BaseTest):
         execution = workflow_context.execution
         assert execution.started_at <= execution.ended_at <= datetime.utcnow()
         assert execution.error is not None
-        assert execution.status == model.Execution.FAILED
+        assert execution.status == models.Execution.FAILED
 
     def test_two_tasks_execution_order(self, workflow_context, executor):
         @workflow
@@ -241,7 +245,7 @@ class TestCancel(BaseTest):
         execution = workflow_context.execution
         assert execution.started_at <= execution.ended_at <= datetime.utcnow()
         assert execution.error is None
-        assert execution.status == model.Execution.CANCELLED
+        assert execution.status == models.Execution.CANCELLED
 
     def test_cancel_pending_execution(self, workflow_context, executor):
         @workflow
@@ -252,7 +256,7 @@ class TestCancel(BaseTest):
                            executor=executor)
         eng.cancel_execution()
         execution = workflow_context.execution
-        assert execution.status == model.Execution.CANCELLED
+        assert execution.status == models.Execution.CANCELLED
 
 
 class TestRetries(BaseTest):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 26564c5..f3ce92f 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -19,6 +19,7 @@ from datetime import (
 
 import pytest
 
+from aria.modeling import models
 from aria.orchestrator.context import workflow as workflow_context
 from aria.orchestrator.workflows import (
     api,
@@ -28,8 +29,10 @@ from aria.orchestrator.workflows import (
 
 from tests import mock, storage
 
-OP_NAME = 'tosca.interfaces.node.lifecycle.Standard.create'
-RELATIONSHIP_OP_NAME = 'tosca.interfaces.relationship.Configure.pre_configure'
+NODE_INTERFACE_NAME = 'Standard'
+NODE_OPERATION_NAME = 'create'
+RELATIONSHIP_INTERFACE_NAME = 'Configure'
+RELATIONSHIP_OPERATION_NAME = 'pre_configure'
 
 
 @pytest.fixture
@@ -37,15 +40,22 @@ def ctx(tmpdir):
     context = mock.context.simple(str(tmpdir))
 
     relationship = context.model.relationship.list()[0]
-    relationship.interfaces = [
-        mock.models.get_interface(RELATIONSHIP_OP_NAME, edge='source'),
-        mock.models.get_interface(RELATIONSHIP_OP_NAME, edge='target')
-    ]
+    interface = mock.models.create_interface(
+        relationship.source_node.service,
+        RELATIONSHIP_INTERFACE_NAME,
+        RELATIONSHIP_OPERATION_NAME
+    )
+    relationship.interfaces[interface.name] = interface
     context.model.relationship.update(relationship)
 
-    dependent_node = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependent_node.interfaces = [mock.models.get_interface(OP_NAME)]
-    context.model.node.update(dependent_node)
+    node = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        NODE_INTERFACE_NAME,
+        NODE_OPERATION_NAME
+    )
+    node.interfaces[interface.name] = interface
+    context.model.node.update(node)
 
     yield context
     storage.release_sqlite_storage(context.model)
@@ -55,39 +65,49 @@ class TestOperationTask(object):
 
     def _create_node_operation_task(self, ctx, node):
         with workflow_context.current.push(ctx):
-            api_task = api.task.OperationTask.node(
-                instance=node,
-                name='tosca.interfaces.node.lifecycle.Standard.create')
+            api_task = api.task.OperationTask.for_node(
+                node=node,
+                interface_name=NODE_INTERFACE_NAME,
+                operation_name=NODE_OPERATION_NAME)
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
-    def _create_relationship_operation_task(self, ctx, relationship, operation_name, edge):
+    def _create_relationship_operation_task(self, ctx, relationship, runs_on):
         with workflow_context.current.push(ctx):
-            api_task = api.task.OperationTask.relationship(
-                instance=relationship, name=operation_name, edge=edge)
+            api_task = api.task.OperationTask.for_relationship(
+                relationship=relationship,
+                interface_name=RELATIONSHIP_INTERFACE_NAME,
+                operation_name=RELATIONSHIP_OPERATION_NAME,
+                runs_on=runs_on)
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
     def test_node_operation_task_creation(self, ctx):
-        storage_plugin = mock.models.get_plugin(package_name='p1', package_version='0.1')
-        storage_plugin_other = mock.models.get_plugin(package_name='p0', package_version='0.0')
-        ctx.model.plugin.put(storage_plugin_other)
+        storage_plugin = mock.models.create_plugin(
+            package_name='p1', package_version='0.1')
+        storage_plugin_specification = mock.models.create_plugin_specification(
+            package_name='p1', package_version='0.1')
+        storage_plugin_specification_other = mock.models.create_plugin(
+            package_name='p0', package_version='0.0')
         ctx.model.plugin.put(storage_plugin)
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        ctx.model.plugin_specification.put(storage_plugin_specification_other)
+        ctx.model.plugin_specification.put(storage_plugin_specification)
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
         node_template = node.node_template
-        plugin_name = 'plugin1'
-        node_template.plugins = [{'name': 'plugin1',
-                                  'package_name': 'p1',
-                                  'package_version': '0.1'}]
-        node.interfaces = [mock.models.get_interface(
-            'tosca.interfaces.node.lifecycle.Standard.create',
-            operation_kwargs=dict(plugin='plugin1')
-        )]
+        node_template.plugin_specifications[storage_plugin_specification.name] = \
+            storage_plugin_specification
+        interface = mock.models.create_interface(
+            node.service,
+            NODE_INTERFACE_NAME,
+            NODE_OPERATION_NAME,
+            operation_kwargs=dict(plugin_specification=storage_plugin_specification)
+        )
+        node.interfaces[interface.name] = interface
         ctx.model.node_template.update(node_template)
         ctx.model.node.update(node)
         api_task, core_task = self._create_node_operation_task(ctx, node)
         storage_task = ctx.model.task.get_by_name(core_task.name)
-        assert storage_task.plugin_name == plugin_name
+        assert storage_task.plugin is storage_plugin
         assert storage_task.execution_name == ctx.execution.name
         assert storage_task.runs_on == core_task.context.node
         assert core_task.model_task == storage_task
@@ -101,17 +121,17 @@ class TestOperationTask(object):
         relationship = ctx.model.relationship.list()[0]
         ctx.model.relationship.update(relationship)
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship, RELATIONSHIP_OP_NAME, 'source')
+            ctx, relationship, models.Task.RUNS_ON_SOURCE)
         assert core_task.model_task.runs_on == relationship.source_node
 
     def test_target_relationship_operation_task_creation(self, ctx):
         relationship = ctx.model.relationship.list()[0]
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship, RELATIONSHIP_OP_NAME, 'target')
+            ctx, relationship, models.Task.RUNS_ON_TARGET)
         assert core_task.model_task.runs_on == relationship.target_node
 
     def test_operation_task_edit_locked_attribute(self, ctx):
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
         _, core_task = self._create_node_operation_task(ctx, node)
         now = datetime.utcnow()
@@ -127,7 +147,7 @@ class TestOperationTask(object):
             core_task.due_at = now
 
     def test_operation_task_edit_attributes(self, ctx):
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
         _, core_task = self._create_node_operation_task(ctx, node)
         future_time = datetime.utcnow() + timedelta(seconds=3)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index 167004e..0a95d43 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -23,10 +23,16 @@ from tests import storage
 
 
 def test_task_graph_into_execution_graph(tmpdir):
-    operation_name = 'tosca.interfaces.node.lifecycle.Standard.create'
+    interface_name = 'Standard'
+    operation_name = 'create'
     task_context = mock.context.simple(str(tmpdir))
-    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    node.interfaces = [mock.models.get_interface(operation_name)]
+    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
+        operation_name
+    )
+    node.interfaces[interface.name] = interface
     task_context.model.node.update(node)
 
     def sub_workflow(name, **_):
@@ -34,14 +40,17 @@ def test_task_graph_into_execution_graph(tmpdir):
 
     with context.workflow.current.push(task_context):
         test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
-        simple_before_task = api.task.OperationTask.node(instance=node,
-                                                         name=operation_name)
-        simple_after_task = api.task.OperationTask.node(instance=node,
-                                                        name=operation_name)
+        simple_before_task = api.task.OperationTask.for_node(node=node,
+                                                             interface_name=interface_name,
+                                                             operation_name=operation_name)
+        simple_after_task = api.task.OperationTask.for_node(node=node,
+                                                            interface_name=interface_name,
+                                                            operation_name=operation_name)
 
         inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
-        inner_task = api.task.OperationTask.node(instance=node,
-                                                 name=operation_name)
+        inner_task = api.task.OperationTask.for_node(node=node,
+                                                     interface_name=interface_name,
+                                                     operation_name=operation_name)
         inner_task_graph.add_tasks(inner_task)
 
     test_task_graph.add_tasks(simple_before_task)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 8da801e..0a2280d 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -28,7 +28,7 @@ except ImportError:
     _celery = None
     app = None
 
-from aria.storage.modeling import model
+from aria.modeling import models
 from aria.orchestrator import events
 from aria.orchestrator.workflows.executor import (
     thread,
@@ -43,7 +43,8 @@ def test_execute(executor):
     expected_value = 'value'
     successful_task = MockTask(mock_successful_task)
     failing_task = MockTask(mock_failing_task)
-    task_with_inputs = MockTask(mock_task_with_input, inputs=dict(input='value'))
+    task_with_inputs = MockTask(mock_task_with_input, inputs={'input': models.Parameter.wrap(
+        'input', 'value')})
 
     for task in [successful_task, failing_task, task_with_inputs]:
         executor.execute(task)
@@ -98,14 +99,15 @@ class MockContext(object):
 
 class MockTask(object):
 
-    INFINITE_RETRIES = model.Task.INFINITE_RETRIES
+    INFINITE_RETRIES = models.Task.INFINITE_RETRIES
 
     def __init__(self, func, inputs=None):
         self.states = []
         self.exception = None
         self.id = str(uuid.uuid4())
         name = func.__name__
-        implementation = 'tests.orchestrator.workflows.executor.test_executor.{name}'.format(
+        implementation = '{module}.{name}'.format(
+            module=__name__,
             name=name)
         self.implementation = implementation
         self.logger = logging.getLogger()
@@ -117,7 +119,7 @@ class MockTask(object):
         self.plugin_fk = None
         self.ignore_failure = False
 
-        for state in model.Task.STATES:
+        for state in models.Task.STATES:
             setattr(self, state.upper(), state)
 
     @contextmanager
@@ -151,6 +153,7 @@ def register_signals():
     def failure_handler(task, exception, *args, **kwargs):
         task.states.append('failure')
         task.exception = exception
+
     events.start_task_signal.connect(start_handler)
     events.on_success_task_signal.connect(success_handler)
     events.on_failure_task_signal.connect(failure_handler)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 2d43261..3cd1c47 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -22,8 +22,8 @@ from contextlib import contextmanager
 import pytest
 
 from aria import application_model_storage
+from aria.modeling import models as aria_models
 from aria.storage import sql_mapi
-from aria.storage.modeling import model as aria_model
 from aria.orchestrator import (
     events,
     plugin
@@ -126,7 +126,7 @@ class MockContext(object):
 
 class MockTask(object):
 
-    INFINITE_RETRIES = aria_model.Task.INFINITE_RETRIES
+    INFINITE_RETRIES = aria_models.Task.INFINITE_RETRIES
 
     def __init__(self, plugin, implementation):
         self.id = str(uuid.uuid4())
@@ -141,7 +141,7 @@ class MockTask(object):
         self.plugin = plugin
         self.ignore_failure = False
 
-        for state in aria_model.Task.STATES:
+        for state in aria_models.Task.STATES:
             setattr(self, state.upper(), state)
 
     @contextmanager

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index ad3cb76..6d0eb5b 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -71,9 +71,9 @@ def _test_update_and_refresh(ctx, lock_files, key, first_value, second_value):
 
 def _test(context, executor, lock_files, func, expected_failure):
     def _node(ctx):
-        return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
-    op_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
+    interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
 
     key = 'key'
     first_value = 'value1'
@@ -86,17 +86,26 @@ def _test(context, executor, lock_files, func, expected_failure):
     }
 
     node = _node(context)
-    node.interfaces = [mock.models.get_interface(
-        op_name,
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
+        operation_name,
         operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__))
-    )]
+    )
+    node.interfaces[interface.name] = interface
     context.model.node.update(node)
 
     @workflow
     def mock_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.node(instance=node, name=op_name, inputs=inputs),
-            api.task.OperationTask.node(instance=node, name=op_name, inputs=inputs)
+            api.task.OperationTask.for_node(node=node,
+                                            interface_name=interface_name,
+                                            operation_name=operation_name,
+                                            inputs=inputs),
+            api.task.OperationTask.for_node(node=node,
+                                            interface_name=interface_name,
+                                            operation_name=operation_name,
+                                            inputs=inputs)
         )
 
     signal = events.on_failure_task_signal

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index ac95554..1c4cda6 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -29,27 +29,34 @@ from tests import storage
 def test_decorate_extension(context, executor):
     inputs = {'input1': 1, 'input2': 2}
 
-    def get_node_instance(ctx):
-        return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    def get_node(ctx):
+        return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
     @workflow
     def mock_workflow(ctx, graph):
-        node_instance = get_node_instance(ctx)
-        op = 'test.op'
-        node_instance.interfaces = [mock.models.get_interface(
-            op,
+        node = get_node(ctx)
+        interface_name = 'test_interface'
+        operation_name = 'operation'
+        interface = mock.models.create_interface(
+            ctx.service,
+            interface_name,
+            operation_name,
             operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
                                                                   _mock_operation.__name__))
-        )]
-        task = api.task.OperationTask.node(instance=node_instance, name=op, inputs=inputs)
+        )
+        node.interfaces[interface.name] = interface
+        task = api.task.OperationTask.for_node(node=node,
+                                               interface_name=interface_name,
+                                               operation_name=operation_name,
+                                               inputs=inputs)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
-    out = get_node_instance(context).runtime_properties['out']
-    assert out['wrapper_inputs'] == inputs
-    assert out['function_inputs'] == inputs
+    out = get_node(context).runtime_properties['out']
+    assert out['wrapper_inputs'] == dict((k, v.value) for k, v in inputs.iteritems())
+    assert out['function_inputs'] == dict((k, v.value) for k, v in inputs.iteritems())
 
 
 @extension.process_executor

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index d3b3300..a3957c3 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -45,7 +45,7 @@ def test_track_changes_of_failed_operation(context, executor):
 
 
 def _assert_tracked_changes_are_applied(context):
-    instance = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    instance = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     assert instance.runtime_properties == _TEST_RUNTIME_PROPERTIES
 
 
@@ -67,15 +67,15 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
     }
 
     expected_initial = context.model.node.get_by_name(
-        mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
+        mock.models.DEPENDENCY_NODE_NAME).runtime_properties
 
     out = _run_workflow(context=context, executor=executor, op_func=_mock_updating_operation,
                         inputs=inputs)
 
     expected_after_update = expected_initial.copy()
-    expected_after_update.update(inputs['committed'])
+    expected_after_update.update(inputs['committed'].value) # pylint: disable=no-member
     expected_after_change = expected_after_update.copy()
-    expected_after_change.update(inputs['changed_but_refreshed'])
+    expected_after_change.update(inputs['changed_but_refreshed'].value) # pylint: disable=no-member
     expected_after_refresh = expected_after_update
 
     assert out['initial'] == expected_initial
@@ -87,19 +87,27 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
 def _run_workflow(context, executor, op_func, inputs=None):
     @workflow
     def mock_workflow(ctx, graph):
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node.interfaces = [mock.models.get_interface(
-            'test.op', operation_kwargs=dict(implementation=_operation_mapping(op_func)))]
-        task = api.task.OperationTask.node(instance=node,
-                                           name='test.op',
-                                           inputs=inputs or {})
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+        interface_name = 'test_interface'
+        operation_name = 'operation'
+        interface = mock.models.create_interface(
+            ctx.service,
+            interface_name,
+            operation_name,
+            operation_kwargs=dict(implementation=_operation_mapping(op_func))
+        )
+        node.interfaces[interface.name] = interface
+        task = api.task.OperationTask.for_node(node=node,
+                                               interface_name=interface_name,
+                                               operation_name=operation_name,
+                                               inputs=inputs or {})
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
     return context.model.node.get_by_name(
-        mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties.get('out')
+        mock.models.DEPENDENCY_NODE_NAME).runtime_properties.get('out')
 
 
 @operation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/parser/utils.py
----------------------------------------------------------------------
diff --git a/tests/parser/utils.py b/tests/parser/utils.py
index 994aac6..8460de8 100644
--- a/tests/parser/utils.py
+++ b/tests/parser/utils.py
@@ -21,10 +21,10 @@ from aria.parser.consumption import (
     ConsumerChain,
     Read,
     Validate,
-    Model,
+    ServiceTemplate,
     Types,
     Inputs,
-    Instance
+    ServiceInstance
 )
 from aria.utils.imports import import_fullname
 
@@ -66,14 +66,14 @@ def create_consumer(context, consumer_class_name):
         dumper = None
     elif consumer_class_name == 'presentation':
         dumper = consumer.consumers[0]
-    elif consumer_class_name == 'model':
-        consumer.append(Model)
+    elif consumer_class_name == 'template':
+        consumer.append(ServiceTemplate)
     elif consumer_class_name == 'types':
-        consumer.append(Model, Types)
+        consumer.append(ServiceTemplate, Types)
     elif consumer_class_name == 'instance':
-        consumer.append(Model, Inputs, Instance)
+        consumer.append(ServiceTemplate, Inputs, ServiceInstance)
     else:
-        consumer.append(Model, Inputs, Instance)
+        consumer.append(ServiceTemplate, Inputs, ServiceInstance)
         consumer.append(import_fullname(consumer_class_name))
 
     if dumper is None:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/workflows.py
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/workflows.py b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/workflows.py
index fff78bf..94ee824 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/workflows.py
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/workflows.py
@@ -1,6 +1,12 @@
 
 from aria import workflow
 from aria.orchestrator.workflows.api.task import OperationTask
+from aria.orchestrator.workflows.exceptions import TaskException
+
+
+INTERFACE_NAME = 'Maintenance'
+ENABLE_OPERATION_NAME = 'enable'
+DISABLE_OPERATION_NAME = 'disable'
 
 
 @workflow
@@ -9,8 +15,11 @@ def maintenance(ctx, graph, enabled):
     Custom workflow to call the operations on the Maintenance interface.
     """
 
-    operation = 'Maintenance.enable' if enabled else 'Maintenance.disable'
-
     for node in ctx.model.node.iter():
-        for interface in node.interfaces.filter_by(name='Maintenance', type_name='Maintenance'):
-            graph.add_tasks(OperationTask.node(instance=node, name=operation))
+        try:
+            graph.add_tasks(OperationTask.for_node(node=node,
+                                                   interface_name=INTERFACE_NAME,
+                                                   operation_name=ENABLE_OPERATION_NAME if enabled
+                                                   else DISABLE_OPERATION_NAME))
+        except TaskException:
+            pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index f95bdb2..c5d7678 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -19,27 +19,11 @@ from tempfile import mkdtemp
 from sqlalchemy import (
     create_engine,
     orm,
-    Column,
-    Text,
-    Integer,
     pool,
     MetaData
 )
 
-
-from aria.storage.modeling import (
-    model,
-    structure,
-    type as aria_type
-)
-
-
-class MockModel(structure.ModelMixin, model.aria_declarative_base): #pylint: disable=abstract-method
-    __tablename__ = 'mock_model'
-    model_dict = Column(aria_type.Dict)
-    model_list = Column(aria_type.List)
-    value = Column(Integer)
-    name = Column(Text)
+from aria.modeling import models
 
 
 class TestFileSystem(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/storage/test_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_instrumentation.py b/tests/storage/test_instrumentation.py
index 7f0eb02..673103e 100644
--- a/tests/storage/test_instrumentation.py
+++ b/tests/storage/test_instrumentation.py
@@ -16,18 +16,19 @@
 import pytest
 from sqlalchemy import Column, Text, Integer, event
 
+from aria.modeling import (
+    mixins,
+    types as modeling_types,
+    models
+)
+from aria.modeling.exceptions import ValueFormatException
 from aria.storage import (
     ModelStorage,
     sql_mapi,
-    instrumentation,
-    exceptions,
-)
-from aria.storage.modeling import (
-    model,
-    type as aria_type,
-    structure,
+    instrumentation
 )
-from ..storage import release_sqlite_storage, init_inmemory_model_storage
+
+from . import release_sqlite_storage, init_inmemory_model_storage
 
 STUB = instrumentation._STUB
 Value = instrumentation._Value
@@ -280,8 +281,8 @@ class TestInstrumentation(object):
     def test_track_changes_to_strict_dict(self, storage):
         model_kwargs = dict(strict_dict={'key': 'value'},
                             strict_list=['item'])
-        mode_instance = StrictMockModel(**model_kwargs)
-        storage.strict_mock_model.put(mode_instance)
+        model_instance = StrictMockModel(**model_kwargs)
+        storage.strict_mock_model.put(model_instance)
 
         instrument = self._track_changes({
             StrictMockModel.strict_dict: dict,
@@ -290,28 +291,28 @@ class TestInstrumentation(object):
 
         assert not instrument.tracked_changes
 
-        storage_model_instance = storage.strict_mock_model.get(mode_instance.id)
+        storage_model_instance = storage.strict_mock_model.get(model_instance.id)
 
-        with pytest.raises(exceptions.StorageError):
+        with pytest.raises(ValueFormatException):
             storage_model_instance.strict_dict = {1: 1}
 
-        with pytest.raises(exceptions.StorageError):
+        with pytest.raises(ValueFormatException):
             storage_model_instance.strict_dict = {'hello': 1}
 
-        with pytest.raises(exceptions.StorageError):
+        with pytest.raises(ValueFormatException):
             storage_model_instance.strict_dict = {1: 'hello'}
 
         storage_model_instance.strict_dict = {'hello': 'world'}
         assert storage_model_instance.strict_dict == {'hello': 'world'}
 
-        with pytest.raises(exceptions.StorageError):
+        with pytest.raises(ValueFormatException):
             storage_model_instance.strict_list = [1]
         storage_model_instance.strict_list = ['hello']
         assert storage_model_instance.strict_list == ['hello']
 
         assert instrument.tracked_changes == {
             'strict_mock_model': {
-                mode_instance.id: {
+                model_instance.id: {
                     'strict_dict': Value(STUB, {'hello': 'world'}),
                     'strict_list': Value(STUB, ['hello']),
                 }
@@ -336,27 +337,27 @@ def storage():
     release_sqlite_storage(result)
 
 
-class _MockModel(structure.ModelMixin):
+class _MockModel(mixins.ModelMixin):
     name = Column(Text)
-    dict1 = Column(aria_type.Dict)
-    dict2 = Column(aria_type.Dict)
-    list1 = Column(aria_type.List)
-    list2 = Column(aria_type.List)
+    dict1 = Column(modeling_types.Dict)
+    dict2 = Column(modeling_types.Dict)
+    list1 = Column(modeling_types.List)
+    list2 = Column(modeling_types.List)
     int1 = Column(Integer)
     int2 = Column(Integer)
     string2 = Column(Text)
 
 
-class MockModel1(_MockModel, model.aria_declarative_base):
+class MockModel1(_MockModel, models.aria_declarative_base):
     __tablename__ = 'mock_model_1'
 
 
-class MockModel2(_MockModel, model.aria_declarative_base):
+class MockModel2(_MockModel, models.aria_declarative_base):
     __tablename__ = 'mock_model_2'
 
 
-class StrictMockModel(structure.ModelMixin, model.aria_declarative_base):
+class StrictMockModel(mixins.ModelMixin, models.aria_declarative_base):
     __tablename__ = 'strict_mock_model'
 
-    strict_dict = Column(aria_type.StrictDict(basestring, basestring))
-    strict_list = Column(aria_type.StrictList(basestring))
+    strict_dict = Column(modeling_types.StrictDict(basestring, basestring))
+    strict_list = Column(modeling_types.StrictList(basestring))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
deleted file mode 100644
index f88080a..0000000
--- a/tests/storage/test_model_storage.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from aria.storage import (
-    ModelStorage,
-    exceptions,
-    sql_mapi,
-    modeling,
-)
-from aria import application_model_storage
-from ..storage import release_sqlite_storage, init_inmemory_model_storage
-
-from . import MockModel
-
-
-@pytest.fixture
-def storage():
-    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
-                                initiator=init_inmemory_model_storage)
-    base_storage.register(MockModel)
-    yield base_storage
-    release_sqlite_storage(base_storage)
-
-
-@pytest.fixture(scope='module', autouse=True)
-def module_cleanup():
-    modeling.model.aria_declarative_base.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
-
-
-def test_storage_base(storage):
-    with pytest.raises(AttributeError):
-        storage.non_existent_attribute()
-
-
-def test_model_storage(storage):
-    mock_model = MockModel(value=0, name='model_name')
-    storage.mock_model.put(mock_model)
-
-    assert storage.mock_model.get_by_name('model_name') == mock_model
-
-    assert [mm_from_storage for mm_from_storage in storage.mock_model.iter()] == [mock_model]
-    assert [mm_from_storage for mm_from_storage in storage.mock_model] == [mock_model]
-
-    storage.mock_model.delete(mock_model)
-    with pytest.raises(exceptions.StorageError):
-        storage.mock_model.get(mock_model.id)
-
-
-def test_application_storage_factory():
-    storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                        initiator=init_inmemory_model_storage)
-
-    assert storage.parameter
-    assert storage.mapping_template
-    assert storage.substitution_template
-    assert storage.service_template
-    assert storage.node_template
-    assert storage.group_template
-    assert storage.interface_template
-    assert storage.operation_template
-    assert storage.artifact_template
-    assert storage.policy_template
-    assert storage.group_policy_template
-    assert storage.group_policy_trigger_template
-    assert storage.requirement_template
-    assert storage.capability_template
-
-    assert storage.mapping
-    assert storage.substitution
-    assert storage.service_instance
-    assert storage.node
-    assert storage.group
-    assert storage.interface
-    assert storage.operation
-    assert storage.capability
-    assert storage.artifact
-    assert storage.policy
-    assert storage.group_policy
-    assert storage.group_policy_trigger
-    assert storage.relationship
-
-    assert storage.execution
-    assert storage.service_instance_update
-    assert storage.service_instance_update_step
-    assert storage.service_instance_modification
-    assert storage.plugin
-    assert storage.task
-
-    release_sqlite_storage(storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
deleted file mode 100644
index 81b4122..0000000
--- a/tests/storage/test_models.py
+++ /dev/null
@@ -1,875 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from datetime import datetime
-from contextlib import contextmanager
-
-import pytest
-
-from aria import application_model_storage
-from aria.storage import (
-    exceptions,
-    sql_mapi,
-)
-from aria.storage.modeling.model import (
-    ServiceTemplate,
-    ServiceInstance,
-    ServiceInstanceUpdate,
-    ServiceInstanceUpdateStep,
-    ServiceInstanceModification,
-    Execution,
-    Task,
-    Plugin,
-    Relationship,
-    NodeTemplate,
-    Node,
-    Parameter
-)
-
-from tests import mock
-from ..storage import release_sqlite_storage, init_inmemory_model_storage
-
-
-@contextmanager
-def sql_storage(storage_func):
-    storage = None
-    try:
-        storage = storage_func()
-        yield storage
-    finally:
-        if storage:
-            release_sqlite_storage(storage)
-
-
-def _empty_storage():
-    return application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                     initiator=init_inmemory_model_storage)
-
-
-def _service_template_storage():
-    storage = _empty_storage()
-    service_template = mock.models.get_blueprint()
-    storage.service_template.put(service_template)
-    return storage
-
-
-def _service_instance_storage():
-    storage = _service_template_storage()
-    service_instance = mock.models.get_deployment(
-        storage.service_template.get_by_name(mock.models.BLUEPRINT_NAME))
-    storage.service_instance.put(service_instance)
-    return storage
-
-
-def _service_instance_update_storage():
-    storage = _service_instance_storage()
-    service_instance_update = ServiceInstanceUpdate(
-        service_instance=storage.service_instance.list()[0],
-        created_at=now,
-        service_instance_plan={},
-    )
-    storage.service_instance_update.put(service_instance_update)
-    return storage
-
-
-def _node_template_storage():
-    storage = _service_instance_storage()
-    node_template = mock.models.get_dependency_node(storage.service_instance.list()[0])
-    storage.node_template.put(node_template)
-    return storage
-
-
-def _nodes_storage():
-    storage = _node_template_storage()
-    service_instance = storage.service_instance.get_by_name(mock.models.DEPLOYMENT_NAME)
-
-    dependency_node_template = storage.node_template.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    dependency_node = mock.models.get_dependency_node_instance(dependency_node_template,
-                                                               service_instance)
-
-    req_template, cap_template = mock.models.get_relationship(dependency_node)
-    storage.requirement_template.put(req_template)
-    storage.capability_template.put(cap_template)
-
-    dependent_node_template = mock.models.get_dependent_node(service_instance,
-                                                             req_template,
-                                                             cap_template)
-    dependent_node = mock.models.get_dependent_node_instance(dependent_node_template,
-                                                             service_instance)
-    storage.node.put(dependency_node)
-    storage.node.put(dependent_node)
-    return storage
-
-
-def _execution_storage():
-    storage = _service_instance_storage()
-    execution = mock.models.get_execution(storage.service_instance.list()[0])
-    plugin = mock.models.get_plugin()
-    storage.execution.put(execution)
-    storage.plugin.put(plugin)
-    return storage
-
-
-@pytest.fixture
-def empty_storage():
-    with sql_storage(_empty_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def service_template_storage():
-    with sql_storage(_service_template_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def service_instance_storage():
-    with sql_storage(_service_instance_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def service_instance_update_storage():
-    with sql_storage(_service_instance_update_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def node_template_storage():
-    with sql_storage(_node_template_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def nodes_storage():
-    with sql_storage(_nodes_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def execution_storage():
-    with sql_storage(_execution_storage) as storage:
-        yield storage
-
-
-m_cls = type('MockClass')
-now = datetime.utcnow()
-
-
-def _test_model(is_valid, storage, model_cls, model_kwargs):
-    if is_valid:
-        model = model_cls(**model_kwargs)
-        getattr(storage, model_cls.__modelname__).put(model)
-        return model
-    else:
-        with pytest.raises((exceptions.StorageError, TypeError),):
-            getattr(storage, model_cls.__modelname__).put(model_cls(**model_kwargs))
-
-
-class TestServiceTemplate(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, plan, description, created_at, updated_at, main_file_name',
-        [
-            (False, None, 'description', now, now, '/path'),
-            (False, {}, {}, now, now, '/path'),
-            (False, {}, 'description', 'error', now, '/path'),
-            (False, {}, 'description', now, 'error', '/path'),
-            (False, {}, 'description', now, now, {}),
-            (True, {}, 'description', now, now, '/path'),
-        ]
-    )
-    def test_blueprint_model_creation(self, empty_storage, is_valid, plan, description, created_at,
-                                      updated_at, main_file_name):
-        _test_model(is_valid=is_valid,
-                    storage=empty_storage,
-                    model_cls=ServiceTemplate,
-                    model_kwargs=dict(
-                        plan=plan,
-                        description=description,
-                        created_at=created_at,
-                        updated_at=updated_at,
-                        main_file_name=main_file_name)
-                   )
-
-
-class TestServiceInstance(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, name, created_at, description, inputs, permalink, policy_triggers, '
-        'policy_types, outputs, scaling_groups, updated_at, workflows',
-        [
-            (False, m_cls, now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
-            (False, 'name', m_cls, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
-            (False, 'name', now, m_cls, [], 'perlnk', {}, {}, [], {}, now, {}),
-            (False, 'name', now, 'desc', [], m_cls, {}, {}, [], {}, now, {}),
-            (False, 'name', now, 'desc', [], 'perlnk', m_cls, {}, [], {}, now, {}),
-            (False, 'name', now, 'desc', [], 'perlnk', {}, m_cls, [], {}, now, {}),
-            (False, 'name', now, 'desc', [], 'perlnk', {}, {}, m_cls, {}, now, {}),
-            (False, 'name', now, 'desc', [], 'perlnk', {}, {}, [], m_cls, now, {}),
-            (False, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, m_cls, {}),
-            (False, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, m_cls),
-
-            (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
-            (True, None, now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
-            (True, 'name', now, None, [], 'perlnk', {}, {}, [], {}, now, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, {}),
-            (True, 'name', now, 'desc', [], None, {}, {}, [], {}, now, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', None, {}, [], {}, now, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', {}, None, [], {}, now, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], None, now, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, None, {}),
-            (True, 'name', now, 'desc', [], 'perlnk', {}, {}, [], {}, now, None),
-        ]
-    )
-    def test_deployment_model_creation(self, service_instance_storage, is_valid, name, created_at,
-                                       description, inputs, permalink, policy_triggers,
-                                       policy_types, outputs, scaling_groups, updated_at,
-                                       workflows):
-        service_instance = _test_model(
-            is_valid=is_valid,
-            storage=service_instance_storage,
-            model_cls=ServiceInstance,
-            model_kwargs=dict(
-                name=name,
-                service_template=service_instance_storage.service_template.list()[0],
-                created_at=created_at,
-                description=description,
-                inputs=inputs,
-                permalink=permalink,
-                policy_triggers=policy_triggers,
-                policy_types=policy_types,
-                outputs=outputs,
-                scaling_groups=scaling_groups,
-                updated_at=updated_at,
-                workflows=workflows
-            ))
-        if is_valid:
-            assert service_instance.service_template == \
-                   service_instance_storage.service_template.list()[0]
-
-
-class TestExecution(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, '
-        'status, workflow_name',
-        [
-            (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, m_cls, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, now, m_cls, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, now, now, m_cls, False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, now, now, 'error', False, m_cls, Execution.STARTED, 'wf_name'),
-            (False, now, now, now, 'error', False, {}, m_cls, 'wf_name'),
-            (False, now, now, now, 'error', False, {}, Execution.STARTED, m_cls),
-
-            (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
-        ]
-    )
-    def test_execution_model_creation(self, service_instance_storage, is_valid, created_at,
-                                      started_at, ended_at, error, is_system_workflow, parameters,
-                                      status, workflow_name):
-        execution = _test_model(
-            is_valid=is_valid,
-            storage=service_instance_storage,
-            model_cls=Execution,
-            model_kwargs=dict(
-                service_instance=service_instance_storage.service_instance.list()[0],
-                created_at=created_at,
-                started_at=started_at,
-                ended_at=ended_at,
-                error=error,
-                is_system_workflow=is_system_workflow,
-                parameters=parameters,
-                status=status,
-                workflow_name=workflow_name,
-            ))
-        if is_valid:
-            assert execution.service_instance == service_instance_storage.service_instance.list()[0]
-            assert execution.service_template == service_instance_storage.service_template.list()[0]
-
-    def test_execution_status_transition(self):
-        def create_execution(status):
-            execution = Execution(
-                id='e_id',
-                workflow_name='w_name',
-                status=status,
-                parameters={},
-                created_at=now,
-            )
-            return execution
-
-        valid_transitions = {
-            Execution.PENDING: [Execution.STARTED,
-                                Execution.CANCELLED,
-                                Execution.PENDING],
-            Execution.STARTED: [Execution.FAILED,
-                                Execution.TERMINATED,
-                                Execution.CANCELLED,
-                                Execution.CANCELLING,
-                                Execution.STARTED],
-            Execution.CANCELLING: [Execution.FAILED,
-                                   Execution.TERMINATED,
-                                   Execution.CANCELLED,
-                                   Execution.CANCELLING],
-            Execution.FAILED: [Execution.FAILED],
-            Execution.TERMINATED: [Execution.TERMINATED],
-            Execution.CANCELLED: [Execution.CANCELLED]
-        }
-
-        invalid_transitions = {
-            Execution.PENDING: [Execution.FAILED,
-                                Execution.TERMINATED,
-                                Execution.CANCELLING],
-            Execution.STARTED: [Execution.PENDING],
-            Execution.CANCELLING: [Execution.PENDING,
-                                   Execution.STARTED],
-            Execution.FAILED: [Execution.PENDING,
-                               Execution.STARTED,
-                               Execution.TERMINATED,
-                               Execution.CANCELLED,
-                               Execution.CANCELLING],
-            Execution.TERMINATED: [Execution.PENDING,
-                                   Execution.STARTED,
-                                   Execution.FAILED,
-                                   Execution.CANCELLED,
-                                   Execution.CANCELLING],
-            Execution.CANCELLED: [Execution.PENDING,
-                                  Execution.STARTED,
-                                  Execution.FAILED,
-                                  Execution.TERMINATED,
-                                  Execution.CANCELLING],
-        }
-
-        for current_status, valid_transitioned_statues in valid_transitions.items():
-            for transitioned_status in valid_transitioned_statues:
-                execution = create_execution(current_status)
-                execution.status = transitioned_status
-
-        for current_status, invalid_transitioned_statues in invalid_transitions.items():
-            for transitioned_status in invalid_transitioned_statues:
-                execution = create_execution(current_status)
-                with pytest.raises(ValueError):
-                    execution.status = transitioned_status
-
-
-class TestServiceInstanceUpdate(object):
-    @pytest.mark.parametrize(
-        'is_valid, created_at, deployment_plan, service_instance_update_node_instances, '
-        'service_instance_update_service_instance, service_instance_update_nodes, '
-        'modified_entity_ids, state',
-        [
-            (False, m_cls, {}, {}, {}, [], {}, 'state'),
-            (False, now, m_cls, {}, {}, [], {}, 'state'),
-            (False, now, {}, m_cls, {}, [], {}, 'state'),
-            (False, now, {}, {}, m_cls, [], {}, 'state'),
-            (False, now, {}, {}, {}, m_cls, {}, 'state'),
-            (False, now, {}, {}, {}, [], m_cls, 'state'),
-            (False, now, {}, {}, {}, [], {}, m_cls),
-
-            (True, now, {}, {}, {}, [], {}, 'state'),
-            (True, now, {}, None, {}, [], {}, 'state'),
-            (True, now, {}, {}, None, [], {}, 'state'),
-            (True, now, {}, {}, {}, None, {}, 'state'),
-            (True, now, {}, {}, {}, [], None, 'state'),
-            (True, now, {}, {}, {}, [], {}, None),
-        ]
-    )
-    def test_service_instance_update_model_creation(
-            self, service_instance_storage, is_valid, created_at, deployment_plan,
-            service_instance_update_node_instances, service_instance_update_service_instance,
-            service_instance_update_nodes, modified_entity_ids, state):
-        service_instance_update = _test_model(
-            is_valid=is_valid,
-            storage=service_instance_storage,
-            model_cls=ServiceInstanceUpdate,
-            model_kwargs=dict(
-                service_instance=service_instance_storage.service_instance.list()[0],
-                created_at=created_at,
-                service_instance_plan=deployment_plan,
-                service_instance_update_node_instances=service_instance_update_node_instances,
-                service_instance_update_service_instance=service_instance_update_service_instance,
-                service_instance_update_nodes=service_instance_update_nodes,
-                modified_entity_ids=modified_entity_ids,
-                state=state,
-            ))
-        if is_valid:
-            assert service_instance_update.service_instance == \
-                   service_instance_storage.service_instance.list()[0]
-
-
-class TestServiceInstanceUpdateStep(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, action, entity_id, entity_type',
-        [
-            (False, m_cls, 'id', ServiceInstanceUpdateStep.ENTITY_TYPES.NODE),
-            (False, ServiceInstanceUpdateStep.ACTION_TYPES.ADD, m_cls,
-             ServiceInstanceUpdateStep.ENTITY_TYPES.NODE),
-            (False, ServiceInstanceUpdateStep.ACTION_TYPES.ADD, 'id', m_cls),
-
-            (True, ServiceInstanceUpdateStep.ACTION_TYPES.ADD, 'id',
-             ServiceInstanceUpdateStep.ENTITY_TYPES.NODE)
-        ]
-    )
-    def test_deployment_update_step_model_creation(self, service_instance_update_storage, is_valid,
-                                                   action, entity_id, entity_type):
-        service_instance_update_step = _test_model(
-            is_valid=is_valid,
-            storage=service_instance_update_storage,
-            model_cls=ServiceInstanceUpdateStep,
-            model_kwargs=dict(
-                service_instance_update=
-                service_instance_update_storage.service_instance_update.list()[0],
-                action=action,
-                entity_id=entity_id,
-                entity_type=entity_type
-            ))
-        if is_valid:
-            assert service_instance_update_step.service_instance_update == \
-                   service_instance_update_storage.service_instance_update.list()[0]
-
-    def test_deployment_update_step_order(self):
-        add_node = ServiceInstanceUpdateStep(
-            id='add_step',
-            action='add',
-            entity_type='node',
-            entity_id='node_id')
-
-        modify_node = ServiceInstanceUpdateStep(
-            id='modify_step',
-            action='modify',
-            entity_type='node',
-            entity_id='node_id')
-
-        remove_node = ServiceInstanceUpdateStep(
-            id='remove_step',
-            action='remove',
-            entity_type='node',
-            entity_id='node_id')
-
-        for step in (add_node, modify_node, remove_node):
-            assert hash((step.id, step.entity_id)) == hash(step)
-
-        assert remove_node < modify_node < add_node
-        assert not remove_node > modify_node > add_node
-
-        add_rel = ServiceInstanceUpdateStep(
-            id='add_step',
-            action='add',
-            entity_type='relationship',
-            entity_id='relationship_id')
-
-        remove_rel = ServiceInstanceUpdateStep(
-            id='remove_step',
-            action='remove',
-            entity_type='relationship',
-            entity_id='relationship_id')
-
-        assert remove_rel < remove_node < add_node < add_rel
-        assert not add_node < None
-
-
-class TestDeploymentModification(object):
-    @pytest.mark.parametrize(
-        'is_valid, context, created_at, ended_at, modified_node_templates, nodes, status',
-        [
-            (False, m_cls, now, now, {}, {}, ServiceInstanceModification.STARTED),
-            (False, {}, m_cls, now, {}, {}, ServiceInstanceModification.STARTED),
-            (False, {}, now, m_cls, {}, {}, ServiceInstanceModification.STARTED),
-            (False, {}, now, now, m_cls, {}, ServiceInstanceModification.STARTED),
-            (False, {}, now, now, {}, m_cls, ServiceInstanceModification.STARTED),
-            (False, {}, now, now, {}, {}, m_cls),
-
-            (True, {}, now, now, {}, {}, ServiceInstanceModification.STARTED),
-            (True, {}, now, None, {}, {}, ServiceInstanceModification.STARTED),
-            (True, {}, now, now, None, {}, ServiceInstanceModification.STARTED),
-            (True, {}, now, now, {}, None, ServiceInstanceModification.STARTED),
-        ]
-    )
-    def test_deployment_modification_model_creation(
-            self, service_instance_storage, is_valid, context, created_at, ended_at,
-            modified_node_templates, nodes, status):
-        deployment_modification = _test_model(
-            is_valid=is_valid,
-            storage=service_instance_storage,
-            model_cls=ServiceInstanceModification,
-            model_kwargs=dict(
-                service_instance=service_instance_storage.service_instance.list()[0],
-                context=context,
-                created_at=created_at,
-                ended_at=ended_at,
-                modified_nodes=modified_node_templates,
-                node_instances=nodes,
-                status=status,
-            ))
-        if is_valid:
-            assert deployment_modification.service_instance == \
-                   service_instance_storage.service_instance.list()[0]
-
-
-class TestNodeTemplate(object):
-    @pytest.mark.parametrize(
-        'is_valid, name, default_instances, max_instances, min_instances, plugins, properties, '
-        'type_name, type_hierarchy',
-        [
-            (False, m_cls, 1, 1, 1, [], [], 'type', []),
-            (False, 'name', m_cls, 1, 1, [], [], 'type', []),
-            (False, 'name', 1, m_cls, 1, [], [], 'type', []),
-            (False, 'name', 1, 1, m_cls, [], [], 'type', []),
-            (False, 'name', 1, 1, 1, m_cls, [], 'type', []),
-            (False, 'name', 1, 1, 1, [], [], m_cls, []),
-            (False, 'name', 1, 1, 1, [], [], 'type', m_cls),
-            #
-            (True, 'name', 1, 1, 1, [], [], 'type', []),
-            (True, 'name', 1, 1, 1, None, [], 'type', []),
-            (True, 'name', 1, 1, 1, [], [], 'type', None),
-        ]
-    )
-    def test_node_model_creation(self, service_instance_storage, is_valid, name,
-                                 default_instances, max_instances, min_instances, plugins,
-                                 properties, type_name, type_hierarchy):
-        node_template = _test_model(
-            is_valid=is_valid,
-            storage=service_instance_storage,
-            model_cls=NodeTemplate,
-            model_kwargs=dict(
-                name=name,
-                default_instances=default_instances,
-                max_instances=max_instances,
-                min_instances=min_instances,
-                plugins=plugins,
-                properties=properties,
-                type_name=type_name,
-                type_hierarchy=type_hierarchy,
-                service_template=service_instance_storage.service_template.list()[0]
-            ))
-        if is_valid:
-            assert node_template.service_template == \
-                   service_instance_storage.service_template.list()[0]
-
-
-class TestNode(object):
-    @pytest.mark.parametrize(
-        'is_valid, name, runtime_properties, scaling_groups, state, version',
-        [
-            (False, m_cls, {}, [], 'state', 1),
-            (False, 'name', m_cls, [], 'state', 1),
-            (False, 'name', {}, m_cls, 'state', 1),
-            (False, 'name', {}, [], m_cls, 1),
-            (False, m_cls, {}, [], 'state', m_cls),
-
-            (True, 'name', {}, [], 'state', 1),
-            (True, None, {}, [], 'state', 1),
-            (True, 'name', None, [], 'state', 1),
-            (True, 'name', {}, None, 'state', 1),
-            (True, 'name', {}, [], 'state', None),
-        ]
-    )
-    def test_node_instance_model_creation(self, node_template_storage, is_valid, name,
-                                          runtime_properties, scaling_groups, state, version):
-        node_instance = _test_model(
-            is_valid=is_valid,
-            storage=node_template_storage,
-            model_cls=Node,
-            model_kwargs=dict(
-                node_template=node_template_storage.node_template.list()[0],
-                name=name,
-                runtime_properties=runtime_properties,
-                scaling_groups=scaling_groups,
-                state=state,
-                version=version,
-                service_instance=node_template_storage.service_instance.list()[0]
-            ))
-        if is_valid:
-            assert node_instance.node_template == node_template_storage.node_template.list()[0]
-            assert node_instance.service_instance == \
-                   node_template_storage.service_instance.list()[0]
-
-
-class TestNodeInstanceIP(object):
-
-    ip = '1.1.1.1'
-
-    def test_ip_on_none_hosted_node_instance(self, service_instance_storage):
-        node_template = self._node_template(service_instance_storage, ip='not considered')
-        node = self._node(service_instance_storage,
-                          node_template,
-                          is_host=False,
-                          ip='not considered')
-        assert node.ip is None
-
-    def test_property_ip_on_host_node_instance(self, service_instance_storage):
-        node_template = self._node_template(service_instance_storage, ip=self.ip)
-        node = self._node(service_instance_storage, node_template, is_host=True, ip=None)
-        assert node.ip == self.ip
-
-    def test_runtime_property_ip_on_host_node_instance(self, service_instance_storage):
-        node_template = self._node_template(service_instance_storage, ip='not considered')
-        node = self._node(service_instance_storage, node_template, is_host=True, ip=self.ip)
-        assert node.ip == self.ip
-
-    def test_no_ip_configured_on_host_node_instance(self, service_instance_storage):
-        node_template = self._node_template(service_instance_storage, ip=None)
-        node = self._node(service_instance_storage, node_template, is_host=True, ip=None)
-        assert node.ip is None
-
-    def test_runtime_property_on_hosted_node_instance(self, service_instance_storage):
-        host_node_template = self._node_template(service_instance_storage, ip=None)
-        host_node = self._node(service_instance_storage,
-                               host_node_template,
-                               is_host=True,
-                               ip=self.ip)
-        node_template = self._node_template(service_instance_storage, ip=None)
-        node = self._node(service_instance_storage,
-                          node_template,
-                          is_host=False,
-                          ip=None,
-                          host_fk=host_node.id)
-        assert node.ip == self.ip
-
-    def _node_template(self, storage, ip):
-        kwargs = dict(
-            name='node_template',
-            default_instances=1,
-            max_instances=1,
-            min_instances=1,
-            type_name='',
-            service_template=storage.service_template.list()[0]
-        )
-        if ip:
-            kwargs['properties'] = [Parameter(name='ip', type='str', str_value=str(ip))]
-        node = NodeTemplate(**kwargs)
-        storage.node_template.put(node)
-        return node
-
-    def _node(self, storage, node, is_host, ip, host_fk=None):
-        kwargs = dict(
-            name='node',
-            node_template=node,
-            runtime_properties={},
-            state='',
-            service_instance=storage.service_instance.list()[0]
-        )
-        if ip:
-            kwargs['runtime_properties']['ip'] = ip
-        if is_host:
-            kwargs['host_fk'] = 1
-        elif host_fk:
-            kwargs['host_fk'] = host_fk
-        node_instance = Node(**kwargs)
-        storage.node.put(node_instance)
-        return node_instance
-
-
-class TestRelationshipInstance(object):
-    @pytest.mark.parametrize(
-        'is_valid, source_requirement_index, target_node_id, target_capability_name, type_name, '
-        'template_name, type_hierarchy, source_position, target_position',
-        [
-            (False, m_cls, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], 0,
-             0),
-            (False, 0, m_cls, 'target_cap_name', 'type_name', 'template_name', [], 0, 0),
-            (False, 0, 'target_node_id', m_cls, 'type_name', 'template_name', [], 0, 0),
-            (False, 0, 'target_node_id', 'target_cap_name', m_cls, 'template_name', [], 0, 0),
-            (False, 0, 'target_node_id', 'target_cap_name', 'type_name', m_cls, [], 0, 0),
-            (False, 0, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', m_cls, 0,
-             0),
-            (False, 0, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], m_cls,
-             0),
-            (False, 0, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], 0,
-             m_cls),
-
-            (True, 0, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], 0, 0),
-            (True, None, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], 0,
-             0),
-            (True, 0, None, 'target_cap_name', 'type_name', 'template_name', [], 0, 0),
-            (True, 0, 'target_node_id', None, 'type_name', 'template_name', [], 0, 0),
-            (True, 0, 'target_node_id', 'target_cap_name', None, 'template_name', [], 0, 0),
-            (True, 0, 'target_node_id', 'target_cap_name', 'type_name', None, [], 0, 0),
-            (True, 0, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], None,
-             0),
-            (True, 0, 'target_node_id', 'target_cap_name', 'type_name', 'template_name', [], 0,
-             None),
-
-        ]
-    )
-    def test_relationship(self, nodes_storage, is_valid, source_requirement_index, target_node_id,
-                          target_capability_name, type_name, template_name, type_hierarchy,
-                          source_position, target_position):
-        _test_model(is_valid=is_valid,
-                    storage=nodes_storage,
-                    model_cls=Relationship,
-                    model_kwargs=dict(
-                        source_requirement_index=source_requirement_index,
-                        target_node_id=target_node_id,
-                        target_capability_name=target_capability_name,
-                        type_name=type_name,
-                        template_name=template_name,
-                        type_hierarchy=type_hierarchy,
-                        source_position=source_position,
-                        target_position=target_position
-                    ))
-
-
-class TestPlugin(object):
-    @pytest.mark.parametrize(
-        'is_valid, archive_name, distribution, distribution_release, '
-        'distribution_version, package_name, package_source, '
-        'package_version, supported_platform, supported_py_versions, uploaded_at, wheels',
-        [
-            (False, m_cls, 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', m_cls, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', m_cls, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', m_cls, 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', m_cls, 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', m_cls, 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', m_cls,
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', m_cls, [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', m_cls, now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], m_cls, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], now, m_cls),
-
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', None, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', None, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', None, 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', None, 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', None,
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', None, [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', None, now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], now, []),
-        ]
-    )
-    def test_plugin_model_creation(self, empty_storage, is_valid, archive_name, distribution,
-                                   distribution_release, distribution_version,
-                                   package_name, package_source, package_version,
-                                   supported_platform, supported_py_versions, uploaded_at, wheels):
-        _test_model(is_valid=is_valid,
-                    storage=empty_storage,
-                    model_cls=Plugin,
-                    model_kwargs=dict(
-                        archive_name=archive_name,
-                        distribution=distribution,
-                        distribution_release=distribution_release,
-                        distribution_version=distribution_version,
-                        package_name=package_name,
-                        package_source=package_source,
-                        package_version=package_version,
-                        supported_platform=supported_platform,
-                        supported_py_versions=supported_py_versions,
-                        uploaded_at=uploaded_at,
-                        wheels=wheels,
-                    ))
-
-
-class TestTask(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, status, due_at, started_at, ended_at, max_attempts, retry_count, '
-        'retry_interval, ignore_failure, name, operation_mapping, inputs, plugin_id',
-        [
-            (False, m_cls, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, m_cls, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, m_cls, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, m_cls, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, m_cls, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, m_cls, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, m_cls, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, m_cls, 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', m_cls, {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', m_cls, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, m_cls),
-
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, None, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, None, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, None, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, None, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, None, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, None, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, None, 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', None, {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', None, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, None),
-        ]
-    )
-    def test_task_model_creation(self, execution_storage, is_valid, status, due_at, started_at,
-                                 ended_at, max_attempts, retry_count, retry_interval,
-                                 ignore_failure, name, operation_mapping, inputs, plugin_id):
-        task = _test_model(
-            is_valid=is_valid,
-            storage=execution_storage,
-            model_cls=Task,
-            model_kwargs=dict(
-                status=status,
-                execution=execution_storage.execution.list()[0],
-                due_at=due_at,
-                started_at=started_at,
-                ended_at=ended_at,
-                max_attempts=max_attempts,
-                retry_count=retry_count,
-                retry_interval=retry_interval,
-                ignore_failure=ignore_failure,
-                name=name,
-                implementation=operation_mapping,
-                inputs=inputs,
-                plugin_fk=plugin_id,
-            ))
-        if is_valid:
-            assert task.execution == execution_storage.execution.list()[0]
-            if task.plugin:
-                assert task.plugin == execution_storage.plugin.list()[0]
-
-    def test_task_max_attempts_validation(self):
-        def create_task(max_attempts):
-            Task(execution_fk='eid',
-                 name='name',
-                 implementation='',
-                 inputs={},
-                 max_attempts=max_attempts)
-        create_task(max_attempts=1)
-        create_task(max_attempts=2)
-        create_task(max_attempts=Task.INFINITE_RETRIES)
-        with pytest.raises(ValueError):
-            create_task(max_attempts=0)
-        with pytest.raises(ValueError):
-            create_task(max_attempts=-2)


[05/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/template_elements.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/template_elements.py b/aria/storage/modeling/template_elements.py
deleted file mode 100644
index 4212b15..0000000
--- a/aria/storage/modeling/template_elements.py
+++ /dev/null
@@ -1,1387 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from copy import deepcopy
-from types import FunctionType
-
-from sqlalchemy import (
-    Column,
-    Text,
-    Integer,
-    DateTime,
-    Boolean,
-)
-from sqlalchemy.ext.associationproxy import association_proxy
-from sqlalchemy.ext.declarative import declared_attr
-
-from aria.parser import validation
-from aria.utils import collections, formatting, console
-
-from . import (
-    utils,
-    instance_elements,
-    structure,
-    type as aria_type
-)
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-
-
-# region Element templates
-
-
-class ServiceTemplateBase(structure.ModelMixin):
-
-    __tablename__ = 'service_template'
-
-    __private_fields__ = ['substitution_template_fk']
-
-    description = Column(Text)
-    metadata = Column(Text)
-
-    # region orchestrator required columns
-
-    created_at = Column(DateTime, nullable=False, index=True)
-    main_file_name = Column(Text)
-    plan = Column(aria_type.Dict, nullable=False)
-    updated_at = Column(DateTime)
-
-    # endregion
-
-    # region foreign keys
-    @declared_attr
-    def substitution_template_fk(cls):
-        return cls.foreign_key('substitution_template', nullable=True)
-
-    # endregion
-
-    # region one-to-one relationships
-    @declared_attr
-    def substitution_template(cls):
-        return cls.one_to_one_relationship('substitution_template')
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def inputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='inputs')
-
-    @declared_attr
-    def outputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='outputs')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('description', self.description),
-            ('metadata', formatting.as_raw(self.metadata)),
-            ('node_templates', formatting.as_raw_list(self.node_templates)),
-            ('group_templates', formatting.as_raw_list(self.group_templates)),
-            ('policy_templates', formatting.as_raw_list(self.policy_templates)),
-            ('substitution_template', formatting.as_raw(self.substitution_template)),
-            ('inputs', formatting.as_raw_dict(self.inputs)),
-            ('outputs', formatting.as_raw_dict(self.outputs)),
-            ('operation_templates', formatting.as_raw_list(self.operation_templates))))
-
-    def instantiate(self, context, container):
-        service_instance = instance_elements.ServiceInstanceBase()
-        context.modeling.instance = service_instance
-
-        service_instance.description = deepcopy_with_locators(self.description)
-
-        if self.metadata is not None:
-            service_instance.metadata = self.metadata.instantiate(context, container)
-
-        for node_template in self.node_templates.itervalues():
-            for _ in range(node_template.default_instances):
-                node = node_template.instantiate(context, container)
-                service_instance.nodes[node.id] = node
-
-        utils.instantiate_dict(context, self, service_instance.groups, self.group_templates)
-        utils.instantiate_dict(context, self, service_instance.policies, self.policy_templates)
-        utils.instantiate_dict(context, self, service_instance.operations, self.operation_templates)
-
-        if self.substitution_template is not None:
-            service_instance.substitution = self.substitution_template.instantiate(context,
-                                                                                   container)
-
-        utils.instantiate_dict(context, self, service_instance.inputs, self.inputs)
-        utils.instantiate_dict(context, self, service_instance.outputs, self.outputs)
-
-        for name, the_input in context.modeling.inputs.iteritems():
-            if name not in service_instance.inputs:
-                context.validation.report('input "%s" is not supported' % name)
-            else:
-                service_instance.inputs[name].value = the_input
-
-        return service_instance
-
-    def validate(self, context):
-        if self.metadata is not None:
-            self.metadata.validate(context)
-        utils.validate_dict_values(context, self.node_templates)
-        utils.validate_dict_values(context, self.group_templates)
-        utils.validate_dict_values(context, self.policy_templates)
-        if self.substitution_template is not None:
-            self.substitution_template.validate(context)
-        utils.validate_dict_values(context, self.inputs)
-        utils.validate_dict_values(context, self.outputs)
-        utils.validate_dict_values(context, self.operation_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.metadata is not None:
-            self.metadata.coerce_values(context, container, report_issues)
-        utils.coerce_dict_values(context, container, self.node_templates, report_issues)
-        utils.coerce_dict_values(context, container, self.group_templates, report_issues)
-        utils.coerce_dict_values(context, container, self.policy_templates, report_issues)
-        if self.substitution_template is not None:
-            self.substitution_template.coerce_values(context, container, report_issues)
-        utils.coerce_dict_values(context, container, self.inputs, report_issues)
-        utils.coerce_dict_values(context, container, self.outputs, report_issues)
-        utils.coerce_dict_values(context, container, self.operation_templates, report_issues)
-
-    def dump(self, context):
-        if self.description is not None:
-            console.puts(context.style.meta(self.description))
-        if self.metadata is not None:
-            self.metadata.dump(context)
-        for node_template in self.node_templates.itervalues():
-            node_template.dump(context)
-        for group_template in self.group_templates.itervalues():
-            group_template.dump(context)
-        for policy_template in self.policy_templates.itervalues():
-            policy_template.dump(context)
-        if self.substitution_template is not None:
-            self.substitution_template.dump(context)
-        dump_parameters(context, self.inputs, 'Inputs')
-        dump_parameters(context, self.outputs, 'Outputs')
-        utils.dump_dict_values(context, self.operation_templates, 'Operation templates')
-
-
-class InterfaceTemplateBase(structure.ModelMixin):
-    __tablename__ = 'interface_template'
-
-    __private_fields__ = ['node_template_fk',
-                          'group_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def node_template_fk(cls):
-        return cls.foreign_key('node_template', nullable=True)
-
-    @declared_attr
-    def group_template_fk(cls):
-        return cls.foreign_key('group_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-
-    # region many-to-one relationship
-    @declared_attr
-    def node_template(cls):
-        return cls.many_to_one_relationship('node_template')
-
-    @declared_attr
-    def group_template(cls):
-        return cls.many_to_one_relationship('group_template')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def inputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='inputs')
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('inputs', formatting.as_raw_dict(self.properties)),  # pylint: disable=no-member
-            # TODO fix self.properties reference
-            ('operation_templates', formatting.as_raw_list(self.operation_templates))))
-
-    def instantiate(self, context, container):
-        interface = instance_elements.InterfaceBase(self.name, self.type_name)
-        interface.description = deepcopy_with_locators(self.description)
-        utils.instantiate_dict(context, container, interface.inputs, self.inputs)
-        utils.instantiate_dict(context, container, interface.operations, self.operation_templates)
-        return interface
-
-    def validate(self, context):
-        if self.type_name:
-            if context.modeling.interface_types.get_descendant(self.type_name) is None:
-                context.validation.report('interface "%s" has an unknown type: %s'
-                                          % (self.name, formatting.safe_repr(self.type_name)),
-                                          level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.inputs)
-        utils.validate_dict_values(context, self.operation_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.inputs, report_issues)
-        utils.coerce_dict_values(context, container, self.operation_templates, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Interface type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.inputs, 'Inputs')
-            utils.dump_dict_values(context, self.operation_templates, 'Operation templates')
-
-
-class OperationTemplateBase(structure.ModelMixin):
-    __tablename__ = 'operation_template'
-
-    __private_fields__ = ['service_template_fk',
-                          'interface_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        return cls.foreign_key('service_template', nullable=True)
-
-    @declared_attr
-    def interface_template_fk(cls):
-        return cls.foreign_key('interface_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text)
-    implementation = Column(Text)
-    dependencies = Column(aria_type.StrictList(item_cls=basestring))
-    executor = Column(Text)
-    max_retries = Column(Integer)
-    retry_interval = Column(Integer)
-
-    # region orchestrator required columns
-    plugin = Column(Text)
-    operation = Column(Boolean)
-
-    # region many-to-one relationships
-    @declared_attr
-    def service_template(cls):
-        return cls.many_to_one_relationship('service_template')
-
-    @declared_attr
-    def interface_template(cls):
-        return cls.many_to_one_relationship('interface_template')
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def inputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='inputs')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_retries', self.max_retries),
-            ('retry_interval', self.retry_interval),
-            ('inputs', formatting.as_raw_dict(self.inputs))))
-
-    def instantiate(self, context, container):
-        operation = instance_elements.OperationBase(self.name)
-        operation.description = deepcopy_with_locators(self.description)
-        operation.implementation = self.implementation
-        operation.dependencies = self.dependencies
-        operation.executor = self.executor
-        operation.max_retries = self.max_retries
-        operation.retry_interval = self.retry_interval
-        utils.instantiate_dict(context, container, operation.inputs, self.inputs)
-        return operation
-
-    def validate(self, context):
-        utils.validate_dict_values(context, self.inputs)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.inputs, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.implementation is not None:
-                console.puts('Implementation: %s' % context.style.literal(self.implementation))
-            if self.dependencies:
-                console.puts('Dependencies: %s' % ', '.join(
-                    (str(context.style.literal(v)) for v in self.dependencies)))
-            if self.executor is not None:
-                console.puts('Executor: %s' % context.style.literal(self.executor))
-            if self.max_retries is not None:
-                console.puts('Max retries: %s' % context.style.literal(self.max_retries))
-            if self.retry_interval is not None:
-                console.puts('Retry interval: %s' % context.style.literal(self.retry_interval))
-            dump_parameters(context, self.inputs, 'Inputs')
-
-
-class ArtifactTemplateBase(structure.ModelMixin):
-    """
-    A file associated with a :class:`NodeTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`source_path`: Source path (CSAR or repository)
-    * :code:`target_path`: Path at destination machine
-    * :code:`repository_url`: Repository URL
-    * :code:`repository_credential`: Dict of string
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'artifact_template'
-
-    __private_fields__ = ['node_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def node_template_fk(cls):
-        return cls.foreign_key('node_template')
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    source_path = Column(Text)
-    target_path = Column(Text)
-    repository_url = Column(Text)
-    repository_credential = Column(aria_type.StrictDict(basestring, basestring))
-
-    # region many-to-one relationship
-    @declared_attr
-    def node_template(cls):
-        return cls.many_to_one_relationship('node_template')
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('source_path', self.source_path),
-            ('target_path', self.target_path),
-            ('repository_url', self.repository_url),
-            ('repository_credential', formatting.as_agnostic(self.repository_credential)),
-            ('properties', formatting.as_raw_dict(self.properties.iteritems()))))
-
-    def instantiate(self, context, container):
-        artifact = instance_elements.ArtifactBase(self.name, self.type_name, self.source_path)
-        artifact.description = deepcopy_with_locators(self.description)
-        artifact.target_path = self.target_path
-        artifact.repository_url = self.repository_url
-        artifact.repository_credential = self.repository_credential
-        utils.instantiate_dict(context, container, artifact.properties, self.properties)
-        return artifact
-
-    def validate(self, context):
-        if context.modeling.artifact_types.get_descendant(self.type_name) is None:
-            context.validation.report('artifact "%s" has an unknown type: %s'
-                                      % (self.name, formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Artifact type: %s' % context.style.type(self.type_name))
-            console.puts('Source path: %s' % context.style.literal(self.source_path))
-            if self.target_path is not None:
-                console.puts('Target path: %s' % context.style.literal(self.target_path))
-            if self.repository_url is not None:
-                console.puts('Repository URL: %s' % context.style.literal(self.repository_url))
-            if self.repository_credential:
-                console.puts('Repository credential: %s'
-                             % context.style.literal(self.repository_credential))
-            dump_parameters(context, self.properties)
-
-
-class PolicyTemplateBase(structure.ModelMixin):
-    """
-    Policies can be applied to zero or more :class:`NodeTemplate` or :class:`GroupTemplate`
-    instances.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`target_node_template_names`: Must be represented in the :class:`ServiceModel`
-    * :code:`target_group_template_names`: Must be represented in the :class:`ServiceModel`
-    """
-    __tablename__ = 'policy_template'
-
-    __private_fields__ = ['service_templaet_fk',
-                          'group_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        return cls.foreign_key('service_template')
-
-    @declared_attr
-    def group_template_fk(cls):
-        return cls.foreign_key('group_template')
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    target_node_template_names = Column(aria_type.StrictList(basestring))
-    target_group_template_names = Column(aria_type.StrictList(basestring))
-
-    # region many-to-one relationship
-    @declared_attr
-    def service_template(cls):
-        return cls.many_to_one_relationship('service_template')
-
-    @declared_attr
-    def group_template(cls):
-        return cls.many_to_one_relationship('group_template')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('target_node_template_names', self.target_node_template_names),
-            ('target_group_template_names', self.target_group_template_names)))
-
-    def instantiate(self, context, *args, **kwargs):
-        policy = instance_elements.PolicyBase(self.name, self.type_name)
-        utils.instantiate_dict(context, self, policy.properties, self.properties)
-        for node_template_name in self.target_node_template_names:
-            policy.target_node_ids.extend(
-                context.modeling.instance.get_node_ids(node_template_name))
-        for group_template_name in self.target_group_template_names:
-            policy.target_group_ids.extend(
-                context.modeling.instance.get_group_ids(group_template_name))
-        return policy
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('policy template "%s" has an unknown type: %s'
-                                      % (self.name, formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, self, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts('Policy template: %s' % context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            if self.target_node_template_names:
-                console.puts('Target node templates: %s' % ', '.join(
-                    (str(context.style.node(v)) for v in self.target_node_template_names)))
-            if self.target_group_template_names:
-                console.puts('Target group templates: %s' % ', '.join(
-                    (str(context.style.node(v)) for v in self.target_group_template_names)))
-
-
-class GroupPolicyTemplateBase(structure.ModelMixin):
-    """
-    Policies applied to groups.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`triggers`: Dict of :class:`GroupPolicyTrigger`
-    """
-
-    __tablename__ = 'group_policy_template'
-
-    __private_fields__ = ['group_template_fk']
-
-    # region foreign keys
-    @declared_attr
-    def group_template_fk(cls):
-        return cls.foreign_key('group_template')
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('triggers', formatting.as_raw_list(self.triggers))))
-
-    def instantiate(self, context, container):
-        group_policy = instance_elements.GroupPolicyBase(self.name, self.type_name)
-        group_policy.description = deepcopy_with_locators(self.description)
-        utils.instantiate_dict(context, container, group_policy.properties, self.properties)
-        utils.instantiate_dict(context, container, group_policy.triggers, self.triggers)
-        return group_policy
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('group policy "%s" has an unknown type: %s'
-                                      % (self.name, formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.triggers)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-        utils.coerce_dict_values(context, container, self.triggers, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Group policy type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            utils.dump_dict_values(context, self.triggers, 'Triggers')
-
-
-class GroupPolicyTriggerTemplateBase(structure.ModelMixin):
-    """
-    Triggers for :class:`GroupPolicyTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'group_policy_trigger_template'
-
-    __private_fields__ = ['group_policy_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def group_policy_template_fk(cls):
-        return cls.foreign_key('group_policy_template')
-
-    # endregion
-
-    description = Column(Text)
-    implementation = Column(Text)
-
-    # region many-to-one relationship
-    @declared_attr
-    def group_policy_template(cls):
-        return cls.many_to_one_relationship('group_policy_template')
-
-    # endregion
-
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def instantiate(self, context, container):
-        group_policy_trigger = instance_elements.GroupPolicyTriggerBase(self.name,
-                                                                        self.implementation)
-        group_policy_trigger.description = deepcopy_with_locators(self.description)
-        utils.instantiate_dict(context, container, group_policy_trigger.properties,
-                               self.properties)
-        return group_policy_trigger
-
-    def validate(self, context):
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Implementation: %s' % context.style.literal(self.implementation))
-            dump_parameters(context, self.properties)
-
-
-class MappingTemplateBase(structure.ModelMixin):
-    """
-    Used by :class:`SubstitutionTemplate` to map a capability or a requirement to a node.
-
-    Properties:
-
-    * :code:`mapped_name`: Exposed capability or requirement name
-    * :code:`node_template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`name`: Name of capability or requirement at the node template
-    """
-    __tablename__ = 'mapping_template'
-
-    mapped_name = Column(Text)
-    node_template_name = Column(Text)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('mapped_name', self.mapped_name),
-            ('node_template_name', self.node_template_name),
-            ('name', self.name)))
-
-    def instantiate(self, context, *args, **kwargs):
-        nodes = context.modeling.instance.find_nodes(self.node_template_name)
-        if len(nodes) == 0:
-            context.validation.report(
-                'mapping "%s" refer to node template "%s" but there are no '
-                'node instances' % (self.mapped_name,
-                                    self.node_template_name),
-                level=validation.Issue.BETWEEN_INSTANCES)
-            return None
-        return instance_elements.MappingBase(self.mapped_name, nodes[0].id, self.name)
-
-    def validate(self, context):
-        if self.node_template_name not in context.modeling.model.node_templates:
-            context.validation.report('mapping "%s" refers to an unknown node template: %s'
-                                      % (
-                                          self.mapped_name,
-                                          formatting.safe_repr(self.node_template_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-    def dump(self, context):
-        console.puts('%s -> %s.%s' % (context.style.node(self.mapped_name),
-                                      context.style.node(self.node_template_name),
-                                      context.style.node(self.name)))
-
-
-class SubstitutionTemplateBase(structure.ModelMixin):
-    """
-    Used to substitute a single node for the entire deployment.
-
-    Properties:
-
-    * :code:`node_type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`capability_templates`: Dict of :class:`MappingTemplate`
-    * :code:`requirement_templates`: Dict of :class:`MappingTemplate`
-    """
-    __tablename__ = 'substitution_template'
-    node_type_name = Column(Text)
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def capability_templates(cls):
-        return cls.many_to_many_relationship('mapping_template',
-                                             table_prefix='capability_templates',
-                                             relationship_kwargs=dict(lazy='dynamic'))
-
-    @declared_attr
-    def requirement_templates(cls):
-        return cls.many_to_many_relationship('mapping_template',
-                                             table_prefix='requirement_templates',
-                                             relationship_kwargs=dict(lazy='dynamic'))
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('node_type_name', self.node_type_name),
-            ('capability_templates', formatting.as_raw_list(self.capability_templates)),
-            ('requirement_templates', formatting.as_raw_list(self.requirement_templates))))
-
-    def instantiate(self, context, container):
-        substitution = instance_elements.SubstitutionBase(self.node_type_name)
-        utils.instantiate_dict(context, container, substitution.capabilities,
-                               self.capability_templates)
-        utils.instantiate_dict(context, container, substitution.requirements,
-                               self.requirement_templates)
-        return substitution
-
-    def validate(self, context):
-        if context.modeling.node_types.get_descendant(self.node_type_name) is None:
-            context.validation.report('substitution template has an unknown type: %s'
-                                      % formatting.safe_repr(self.node_type_name),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.capability_templates)
-        utils.validate_dict_values(context, self.requirement_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, self, self.capability_templates, report_issues)
-        utils.coerce_dict_values(context, self, self.requirement_templates, report_issues)
-
-    def dump(self, context):
-        console.puts('Substitution template:')
-        with context.style.indent:
-            console.puts('Node type: %s' % context.style.type(self.node_type_name))
-            utils.dump_dict_values(context, self.capability_templates,
-                                   'Capability template mappings')
-            utils.dump_dict_values(context, self.requirement_templates,
-                                   'Requirement template mappings')
-
-
-# endregion
-
-# region Node templates
-
-class NodeTemplateBase(structure.ModelMixin):
-    __tablename__ = 'node_template'
-
-    __private_fields__ = ['service_template_fk',
-                          'host_fk']
-
-    # region foreign_keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        return cls.foreign_key('service_template')
-
-    @declared_attr
-    def host_fk(cls):
-        return cls.foreign_key('node_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    default_instances = Column(Integer, default=1)
-    min_instances = Column(Integer, default=0)
-    max_instances = Column(Integer, default=None)
-    target_node_template_constraints = Column(aria_type.StrictList(FunctionType))
-
-    # region orchestrator required columns
-
-    plugins = Column(aria_type.List)
-    type_hierarchy = Column(aria_type.List)
-
-    @declared_attr
-    def host(cls):
-        return cls.relationship_to_self('host_fk')
-
-    @declared_attr
-    def service_template_name(cls):
-        return association_proxy('service_template', cls.name_column_name())
-
-    # endregion
-
-    # region many-to-one relationship
-    @declared_attr
-    def service_template(cls):
-        return cls.many_to_one_relationship('service_template')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    def is_target_node_valid(self, target_node_template):
-        if self.target_node_template_constraints:
-            for node_type_constraint in self.target_node_template_constraints:
-                if not node_type_constraint(target_node_template, self):
-                    return False
-        return True
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('default_instances', self.default_instances),
-            ('min_instances', self.min_instances),
-            ('max_instances', self.max_instances),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interface_templates', formatting.as_raw_list(self.interface_templates)),
-            ('artifact_templates', formatting.as_raw_list(self.artifact_templates)),
-            ('capability_templates', formatting.as_raw_list(self.capability_templates)),
-            ('requirement_templates', formatting.as_raw_list(self.requirement_templates))))
-
-    def instantiate(self, context, *args, **kwargs):
-        node = instance_elements.NodeBase(context, self.type_name, self.name)
-        utils.instantiate_dict(context, node, node.properties, self.properties)
-        utils.instantiate_dict(context, node, node.interfaces, self.interface_templates)
-        utils.instantiate_dict(context, node, node.artifacts, self.artifact_templates)
-        utils.instantiate_dict(context, node, node.capabilities, self.capability_templates)
-        return node
-
-    def validate(self, context):
-        if context.modeling.node_types.get_descendant(self.type_name) is None:
-            context.validation.report('node template "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.interface_templates)
-        utils.validate_dict_values(context, self.artifact_templates)
-        utils.validate_dict_values(context, self.capability_templates)
-        utils.validate_list_values(context, self.requirement_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, self, self.properties, report_issues)
-        utils.coerce_dict_values(context, self, self.interface_templates, report_issues)
-        utils.coerce_dict_values(context, self, self.artifact_templates, report_issues)
-        utils.coerce_dict_values(context, self, self.capability_templates, report_issues)
-        utils.coerce_list_values(context, self, self.requirement_templates, report_issues)
-
-    def dump(self, context):
-        console.puts('Node template: %s' % context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            console.puts('Instances: %d (%d%s)'
-                         % (self.default_instances,
-                            self.min_instances,
-                            (' to %d' % self.max_instances
-                             if self.max_instances is not None
-                             else ' or more')))
-            dump_parameters(context, self.properties)
-            utils.dump_interfaces(context, self.interface_templates)
-            utils.dump_dict_values(context, self.artifact_templates, 'Artifact tempaltes')
-            utils.dump_dict_values(context, self.capability_templates, 'Capability templates')
-            utils.dump_list_values(context, self.requirement_templates, 'Requirement templates')
-
-
-class GroupTemplateBase(structure.ModelMixin):
-    """
-    A template for creating zero or more :class:`Group` instances.
-
-    Groups are logical containers for zero or more nodes that allow applying zero or more
-    :class:`GroupPolicy` instances to the nodes together.
-
-    Properties:
-
-    * :code:`name`: Name (will be used as a prefix for group IDs)
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interface_templates`: Dict of :class:`InterfaceTemplate`
-    * :code:`policy_templates`: Dict of :class:`GroupPolicyTemplate`
-    * :code:`member_node_template_names`: Must be represented in the :class:`ServiceModel`
-    * :code:`member_group_template_names`: Must be represented in the :class:`ServiceModel`
-    """
-    __tablename__ = 'group_template'
-
-    __private_fields__ = ['service_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def service_template_fk(cls):
-        return cls.foreign_key('service_template')
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    member_node_template_names = Column(aria_type.StrictList(basestring))
-    member_group_template_names = Column(aria_type.StrictList(basestring))
-
-    # region many-to-one relationship
-    @declared_attr
-    def service_template(cls):
-        return cls.many_to_one_relationship('service_template')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interface_templates', formatting.as_raw_list(self.interface_templates)),
-            ('policy_templates', formatting.as_raw_list(self.policy_templates)),
-            ('member_node_template_names', self.member_node_template_names),
-            ('member_group_template_names', self.member_group_template_names1)))
-
-    def instantiate(self, context, *args, **kwargs):
-        group = instance_elements.GroupBase(context, self.type_name, self.name)
-        utils.instantiate_dict(context, self, group.properties, self.properties)
-        utils.instantiate_dict(context, self, group.interfaces, self.interface_templates)
-        utils.instantiate_dict(context, self, group.policies, self.policy_templates)
-        for member_node_template_name in self.member_node_template_names:
-            group.member_node_ids += \
-                context.modeling.instance.get_node_ids(member_node_template_name)
-        for member_group_template_name in self.member_group_template_names:
-            group.member_group_ids += \
-                context.modeling.instance.get_group_ids(member_group_template_name)
-        return group
-
-    def validate(self, context):
-        if context.modeling.group_types.get_descendant(self.type_name) is None:
-            context.validation.report('group template "%s" has an unknown type: %s'
-                                      % (self.name, formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.interface_templates)
-        utils.validate_dict_values(context, self.policy_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, self, self.properties, report_issues)
-        utils.coerce_dict_values(context, self, self.interface_templates, report_issues)
-        utils.coerce_dict_values(context, self, self.policy_templates, report_issues)
-
-    def dump(self, context):
-        console.puts('Group template: %s' % context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.type_name:
-                console.puts('Type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            utils.dump_interfaces(context, self.interface_templates)
-            utils.dump_dict_values(context, self.policy_templates, 'Policy templates')
-            if self.member_node_template_names:
-                console.puts('Member node templates: %s' % ', '.join(
-                    (str(context.style.node(v)) for v in self.member_node_template_names)))
-
-
-# endregion
-
-# region Relationship templates
-
-class RequirementTemplateBase(structure.ModelMixin):
-    """
-    A requirement for a :class:`NodeTemplate`. During instantiation will be matched with a
-    capability of another
-    node.
-
-    Requirements may optionally contain a :class:`RelationshipTemplate` that will be created between
-    the nodes.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`target_node_type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`target_node_template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`target_node_template_constraints`: List of :class:`FunctionType`
-    * :code:`target_capability_type_name`: Type of capability in target node
-    * :code:`target_capability_name`: Name of capability in target node
-    * :code:`relationship_template`: :class:`RelationshipTemplate`
-    """
-    __tablename__ = 'requirement_template'
-
-    __private_fields__ = ['node_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def node_template_fk(cls):
-        return cls.foreign_key('node_template', nullable=True)
-
-    # endregion
-
-
-    target_node_type_name = Column(Text)
-    target_node_template_name = Column(Text)
-    target_node_template_constraints = Column(aria_type.StrictList(FunctionType))
-    target_capability_type_name = Column(Text)
-    target_capability_name = Column(Text)
-    # CHECK: ???
-    relationship_template = Column(Text)  # optional
-
-    # region many-to-one relationship
-    @declared_attr
-    def node_template(cls):
-        return cls.many_to_one_relationship('node_template')
-    # endregion
-
-    def instantiate(self, context, container):
-        raise NotImplementedError
-
-    def find_target(self, context, source_node_template):
-        # We might already have a specific node template, so we'll just verify it
-        if self.target_node_template_name is not None:
-            target_node_template = \
-                context.modeling.model.node_templates.get(self.target_node_template_name)
-
-            if not source_node_template.is_target_node_valid(target_node_template):
-                context.validation.report('requirement "%s" of node template "%s" is for node '
-                                          'template "%s" but it does not match constraints'
-                                          % (self.name,
-                                             self.target_node_template_name,
-                                             source_node_template.name),
-                                          level=validation.Issue.BETWEEN_TYPES)
-                return None, None
-
-            if self.target_capability_type_name is not None \
-                    or self.target_capability_name is not None:
-                target_node_capability = self.find_target_capability(context,
-                                                                     source_node_template,
-                                                                     target_node_template)
-                if target_node_capability is None:
-                    return None, None
-            else:
-                target_node_capability = None
-
-            return target_node_template, target_node_capability
-
-        # Find first node that matches the type
-        elif self.target_node_type_name is not None:
-            for target_node_template in context.modeling.model.node_templates.itervalues():
-                if not context.modeling.node_types.is_descendant(self.target_node_type_name,
-                                                                 target_node_template.type_name):
-                    continue
-
-                if not source_node_template.is_target_node_valid(target_node_template):
-                    continue
-
-                target_node_capability = self.find_target_capability(context,
-                                                                     source_node_template,
-                                                                     target_node_template)
-                if target_node_capability is None:
-                    continue
-
-                return target_node_template, target_node_capability
-
-        return None, None
-
-    def find_target_capability(self, context, source_node_template, target_node_template):
-        for capability_template in target_node_template.capability_templates.itervalues():
-            if capability_template.satisfies_requirement(context,
-                                                         source_node_template,
-                                                         self,
-                                                         target_node_template):
-                return capability_template
-        return None
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('target_node_type_name', self.target_node_type_name),
-            ('target_node_template_name', self.target_node_template_name),
-            ('target_capability_type_name', self.target_capability_type_name),
-            ('target_capability_name', self.target_capability_name),
-            ('relationship_template', formatting.as_raw(self.relationship_template))))
-
-    def validate(self, context):
-        node_types = context.modeling.node_types
-        capability_types = context.modeling.capability_types
-        if self.target_node_type_name \
-                and node_types.get_descendant(self.target_node_type_name) is None:
-            context.validation.report('requirement "%s" refers to an unknown node type: %s'
-                                      % (self.name,
-                                         formatting.safe_repr(self.target_node_type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-        if self.target_capability_type_name and \
-                capability_types.get_descendant(self.target_capability_type_name is None):
-            context.validation.report('requirement "%s" refers to an unknown capability type: %s'
-                                      % (self.name,
-                                         formatting.safe_repr(self.target_capability_type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-        if self.relationship_template:
-            self.relationship_template.validate(context)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.relationship_template is not None:
-            self.relationship_template.coerce_values(context, container, report_issues)
-
-    def dump(self, context):
-        if self.name:
-            console.puts(context.style.node(self.name))
-        else:
-            console.puts('Requirement:')
-        with context.style.indent:
-            if self.target_node_type_name is not None:
-                console.puts('Target node type: %s'
-                             % context.style.type(self.target_node_type_name))
-            elif self.target_node_template_name is not None:
-                console.puts('Target node template: %s'
-                             % context.style.node(self.target_node_template_name))
-            if self.target_capability_type_name is not None:
-                console.puts('Target capability type: %s'
-                             % context.style.type(self.target_capability_type_name))
-            elif self.target_capability_name is not None:
-                console.puts('Target capability name: %s'
-                             % context.style.node(self.target_capability_name))
-            if self.target_node_template_constraints:
-                console.puts('Target node template constraints:')
-                with context.style.indent:
-                    for constraint in self.target_node_template_constraints:
-                        console.puts(context.style.literal(constraint))
-            if self.relationship_template:
-                console.puts('Relationship:')
-                with context.style.indent:
-                    self.relationship_template.dump(context)
-
-
-class CapabilityTemplateBase(structure.ModelMixin):
-    """
-    A capability of a :class:`NodeTemplate`. Nodes expose zero or more capabilities that can be
-    matched with :class:`Requirement` instances of other nodes.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`min_occurrences`: Minimum number of requirement matches required
-    * :code:`max_occurrences`: Maximum number of requirement matches allowed
-    * :code:`valid_source_node_type_names`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'capability_template'
-
-    __private_fields__ = ['node_template_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def node_template_fk(cls):
-        return cls.foreign_key('node_template', nullable=True)
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    min_occurrences = Column(Integer, default=None)  # optional
-    max_occurrences = Column(Integer, default=None)  # optional
-    # CHECK: type?
-    valid_source_node_type_names = Column(Text)
-
-    # region many-to-one relationship
-    @declared_attr
-    def node_template(cls):
-        return cls.many_to_one_relationship('node_template')
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    def satisfies_requirement(self,
-                              context,
-                              source_node_template,
-                              requirement,
-                              target_node_template):
-        # Do we match the required capability type?
-        capability_types = context.modeling.capability_types
-        if not capability_types.is_descendant(requirement.target_capability_type_name,
-                                              self.type_name):
-            return False
-
-        # Are we in valid_source_node_type_names?
-        if self.valid_source_node_type_names:
-            for valid_source_node_type_name in self.valid_source_node_type_names:
-                if not context.modeling.node_types.is_descendant(valid_source_node_type_name,
-                                                                 source_node_template.type_name):
-                    return False
-
-        # Apply requirement constraints
-        if requirement.target_node_template_constraints:
-            for node_type_constraint in requirement.target_node_template_constraints:
-                if not node_type_constraint(target_node_template, source_node_template):
-                    return False
-
-        return True
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('min_occurrences', self.min_occurrences),
-            ('max_occurrences', self.max_occurrences),
-            ('valid_source_node_type_names', self.valid_source_node_type_names),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def instantiate(self, context, container):
-        capability = instance_elements.CapabilityBase(self.name, self.type_name)
-        capability.min_occurrences = self.min_occurrences
-        capability.max_occurrences = self.max_occurrences
-        utils.instantiate_dict(context, container, capability.properties, self.properties)
-        return capability
-
-    def validate(self, context):
-        if context.modeling.capability_types.get_descendant(self.type_name) is None:
-            context.validation.report('capability "%s" refers to an unknown type: %s'
-                                      % (self.name, formatting.safe_repr(self.type)),  # pylint: disable=no-member
-                                      #  TODO fix self.type reference
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, self, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            console.puts(
-                'Occurrences: %d%s'
-                % (self.min_occurrences or 0, (' to %d' % self.max_occurrences)
-                   if self.max_occurrences is not None else ' or more'))
-            if self.valid_source_node_type_names:
-                console.puts('Valid source node types: %s'
-                             % ', '.join((str(context.style.type(v))
-                                          for v in self.valid_source_node_type_names)))
-            dump_parameters(context, self.properties)
-
-# endregion
-
-
-def dump_parameters(context, parameters, name='Properties'):
-    if not parameters:
-        return
-    console.puts('%s:' % name)
-    with context.style.indent:
-        for parameter_name, parameter in parameters.items():
-            if parameter.type_name is not None:
-                console.puts('%s = %s (%s)' % (context.style.property(parameter_name),
-                                               context.style.literal(parameter.value),
-                                               context.style.type(parameter.type_name)))
-            else:
-                console.puts('%s = %s' % (context.style.property(parameter_name),
-                                          context.style.literal(parameter.value)))
-            if parameter.description:
-                console.puts(context.style.meta(parameter.description))
-
-
-# TODO (left for tal): Move following two methods to some place parser specific
-def deepcopy_with_locators(value):
-    """
-    Like :code:`deepcopy`, but also copies over locators.
-    """
-
-    res = deepcopy(value)
-    copy_locators(res, value)
-    return res
-
-
-def copy_locators(target, source):
-    """
-    Copies over :code:`_locator` for all elements, recursively.
-
-    Assumes that target and source have exactly the same list/dict structure.
-    """
-
-    locator = getattr(source, '_locator', None)
-    if locator is not None:
-        try:
-            setattr(target, '_locator', locator)
-        except AttributeError:
-            pass
-
-    if isinstance(target, list) and isinstance(source, list):
-        for i, _ in enumerate(target):
-            copy_locators(target[i], source[i])
-    elif isinstance(target, dict) and isinstance(source, dict):
-        for k, v in target.items():
-            copy_locators(v, source[k])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/type.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/type.py b/aria/storage/modeling/type.py
deleted file mode 100644
index 9e3de3d..0000000
--- a/aria/storage/modeling/type.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-from collections import namedtuple
-
-from sqlalchemy import (
-    TypeDecorator,
-    VARCHAR,
-    event
-)
-from sqlalchemy.ext import mutable
-
-from .. import exceptions
-
-
-class _MutableType(TypeDecorator):
-    """
-    Dict representation of type.
-    """
-    @property
-    def python_type(self):
-        raise NotImplementedError
-
-    def process_literal_param(self, value, dialect):
-        pass
-
-    impl = VARCHAR
-
-    def process_bind_param(self, value, dialect):
-        if value is not None:
-            value = json.dumps(value)
-        return value
-
-    def process_result_value(self, value, dialect):
-        if value is not None:
-            value = json.loads(value)
-        return value
-
-
-class Dict(_MutableType):
-    @property
-    def python_type(self):
-        return dict
-
-
-class List(_MutableType):
-    @property
-    def python_type(self):
-        return list
-
-
-class _StrictDictMixin(object):
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-        try:
-            if not isinstance(value, cls):
-                if isinstance(value, dict):
-                    for k, v in value.items():
-                        cls._assert_strict_key(k)
-                        cls._assert_strict_value(v)
-                    return cls(value)
-                return mutable.MutableDict.coerce(key, value)
-            else:
-                return value
-        except ValueError as e:
-            raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e)))
-
-    def __setitem__(self, key, value):
-        self._assert_strict_key(key)
-        self._assert_strict_value(value)
-        super(_StrictDictMixin, self).__setitem__(key, value)
-
-    def setdefault(self, key, value):
-        self._assert_strict_key(key)
-        self._assert_strict_value(value)
-        super(_StrictDictMixin, self).setdefault(key, value)
-
-    def update(self, *args, **kwargs):
-        for k, v in kwargs.items():
-            self._assert_strict_key(k)
-            self._assert_strict_value(v)
-        super(_StrictDictMixin, self).update(*args, **kwargs)
-
-    @classmethod
-    def _assert_strict_key(cls, key):
-        if cls._key_cls is not None and not isinstance(key, cls._key_cls):
-            raise exceptions.StorageError("Key type was set strictly to {0}, but was {1}".format(
-                cls._key_cls, type(key)
-            ))
-
-    @classmethod
-    def _assert_strict_value(cls, value):
-        if cls._value_cls is not None and not isinstance(value, cls._value_cls):
-            raise exceptions.StorageError("Value type was set strictly to {0}, but was {1}".format(
-                cls._value_cls, type(value)
-            ))
-
-
-class _MutableDict(mutable.MutableDict):
-    """
-    Enables tracking for dict values.
-    """
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-        try:
-            return mutable.MutableDict.coerce(key, value)
-        except ValueError as e:
-            raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e)))
-
-
-class _StrictListMixin(object):
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-        try:
-            if not isinstance(value, cls):
-                if isinstance(value, list):
-                    for item in value:
-                        cls._assert_item(item)
-                    return cls(value)
-                return mutable.MutableList.coerce(key, value)
-            else:
-                return value
-        except ValueError as e:
-            raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e)))
-
-    def __setitem__(self, index, value):
-        """Detect list set events and emit change events."""
-        self._assert_item(value)
-        super(_StrictListMixin, self).__setitem__(index, value)
-
-    def append(self, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).append(item)
-
-    def extend(self, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).extend(item)
-
-    def insert(self, index, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).insert(index, item)
-
-    @classmethod
-    def _assert_item(cls, item):
-        if cls._item_cls is not None and not isinstance(item, cls._item_cls):
-            raise exceptions.StorageError("Key type was set strictly to {0}, but was {1}".format(
-                cls._item_cls, type(item)
-            ))
-
-
-class _MutableList(mutable.MutableList):
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-        try:
-            return mutable.MutableList.coerce(key, value)
-        except ValueError as e:
-            raise exceptions.StorageError('SQL Storage error: {0}'.format(str(e)))
-
-_StrictDictID = namedtuple('_StrictDictID', 'key_cls, value_cls')
-_StrictValue = namedtuple('_StrictValue', 'type_cls, listener_cls')
-
-
-class _StrictDict(object):
-    """
-    This entire class functions as a factory for strict dicts and their listeners.
-    No type class, and no listener type class is created more than once. If a relevant type class
-    exists it is returned.
-    """
-    _strict_map = {}
-
-    def __call__(self, key_cls=None, value_cls=None):
-        strict_dict_map_key = _StrictDictID(key_cls=key_cls, value_cls=value_cls)
-        if strict_dict_map_key not in self._strict_map:
-            key_cls_name = getattr(key_cls, '__name__', str(key_cls))
-            value_cls_name = getattr(value_cls, '__name__', str(value_cls))
-            # Creating the type class itself. this class would be returned (used by the sqlalchemy
-            # Column).
-            strict_dict_cls = type(
-                'StrictDict_{0}_{1}'.format(key_cls_name, value_cls_name),
-                (Dict, ),
-                {}
-            )
-            # Creating the type listening class.
-            # The new class inherits from both the _MutableDict class and the _StrictDictMixin,
-            # while setting the necessary _key_cls and _value_cls as class attributes.
-            listener_cls = type(
-                'StrictMutableDict_{0}_{1}'.format(key_cls_name, value_cls_name),
-                (_StrictDictMixin, _MutableDict),
-                {'_key_cls': key_cls, '_value_cls': value_cls}
-            )
-            self._strict_map[strict_dict_map_key] = _StrictValue(type_cls=strict_dict_cls,
-                                                                 listener_cls=listener_cls)
-
-        return self._strict_map[strict_dict_map_key].type_cls
-
-StrictDict = _StrictDict()
-
-
-class _StrictList(object):
-    """
-    This entire class functions as a factory for strict lists and their listeners.
-    No type class, and no listener type class is created more than once. If a relevant type class
-    exists it is returned.
-    """
-    _strict_map = {}
-
-    def __call__(self, item_cls=None):
-
-        if item_cls not in self._strict_map:
-            item_cls_name = getattr(item_cls, '__name__', str(item_cls))
-            # Creating the type class itself. this class would be returned (used by the sqlalchemy
-            # Column).
-            strict_list_cls = type(
-                'StrictList_{0}'.format(item_cls_name),
-                (List, ),
-                {}
-            )
-            # Creating the type listening class.
-            # The new class inherits from both the _MutableList class and the _StrictListMixin,
-            # while setting the necessary _item_cls as class attribute.
-            listener_cls = type(
-                'StrictMutableList_{0}'.format(item_cls_name),
-                (_StrictListMixin, _MutableList),
-                {'_item_cls': item_cls}
-            )
-            self._strict_map[item_cls] = _StrictValue(type_cls=strict_list_cls,
-                                                      listener_cls=listener_cls)
-
-        return self._strict_map[item_cls].type_cls
-
-StrictList = _StrictList()
-
-
-def _mutable_association_listener(mapper, cls):
-    strict_dict_type_to_listener = \
-        dict((v.type_cls, v.listener_cls) for v in _StrictDict._strict_map.values())
-
-    strict_list_type_to_listener = \
-        dict((v.type_cls, v.listener_cls) for v in _StrictList._strict_map.values())
-
-    for prop in mapper.column_attrs:
-        column_type = prop.columns[0].type
-        # Dict Listeners
-        if type(column_type) in strict_dict_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
-            strict_dict_type_to_listener[type(column_type)].associate_with_attribute(
-                getattr(cls, prop.key))
-        elif isinstance(column_type, Dict):
-            _MutableDict.associate_with_attribute(getattr(cls, prop.key))
-
-        # List Listeners
-        if type(column_type) in strict_list_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
-            strict_list_type_to_listener[type(column_type)].associate_with_attribute(
-                getattr(cls, prop.key))
-        elif isinstance(column_type, List):
-            _MutableList.associate_with_attribute(getattr(cls, prop.key))
-_LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_listener)
-
-
-def _register_mutable_association_listener():
-    event.listen(*_LISTENER_ARGS)
-
-
-def remove_mutable_association_listener():
-    """
-    Remove the event listener that associates ``Dict`` and ``List`` column types with
-    ``MutableDict`` and ``MutableList``, respectively.
-
-    This call must happen before any model instance is instantiated.
-    This is because once it does, that would trigger the listener we are trying to remove.
-    Once it is triggered, many other listeners will then be registered.
-    At that point, it is too late.
-
-    The reason this function exists is that the association listener, interferes with ARIA change
-    tracking instrumentation, so a way to disable it is required.
-
-    Note that the event listener this call removes is registered by default.
-    """
-    if event.contains(*_LISTENER_ARGS):
-        event.remove(*_LISTENER_ARGS)
-
-_register_mutable_association_listener()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/utils.py b/aria/storage/modeling/utils.py
deleted file mode 100644
index 75e34f5..0000000
--- a/aria/storage/modeling/utils.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from random import randrange
-
-from shortuuid import ShortUUID
-
-from ...utils.console import puts
-
-
-# UUID = ShortUUID() # default alphabet is base57, which is alphanumeric without visually ambiguous
-# characters; ID length is 22
-UUID = ShortUUID(alphabet='abcdefghijklmnopqrstuvwxyz0123456789')  # alphanumeric; ID length is 25
-
-
-def generate_id_string(length=None):
-    """
-    A random string with a strong guarantee of universal uniqueness (uses UUID).
-
-    The default length is 25 characters.
-    """
-
-    the_id = UUID.uuid()
-    if length is not None:
-        the_id = the_id[:length]
-    return the_id
-
-
-def generate_hex_string():
-    """
-    A random string of 5 hex digits with no guarantee of universal uniqueness.
-    """
-
-    return '%05x' % randrange(16 ** 5)
-
-
-def validate_dict_values(context, the_dict):
-    if not the_dict:
-        return
-    validate_list_values(context, the_dict.itervalues())
-
-
-def validate_list_values(context, the_list):
-    if not the_list:
-        return
-    for value in the_list:
-        value.validate(context)
-
-
-def coerce_dict_values(context, container, the_dict, report_issues=False):
-    if not the_dict:
-        return
-    coerce_list_values(context, container, the_dict.itervalues(), report_issues)
-
-
-def coerce_list_values(context, container, the_list, report_issues=False):
-    if not the_list:
-        return
-    for value in the_list:
-        value.coerce_values(context, container, report_issues)
-
-
-def instantiate_dict(context, container, the_dict, from_dict):
-    if not from_dict:
-        return
-    for name, value in from_dict.iteritems():
-        value = value.instantiate(context, container)
-        if value is not None:
-            the_dict[name] = value
-
-
-def dump_list_values(context, the_list, name):
-    if not the_list:
-        return
-    puts('%s:' % name)
-    with context.style.indent:
-        for value in the_list:
-            value.dump(context)
-
-
-def dump_dict_values(context, the_dict, name):
-    if not the_dict:
-        return
-    dump_list_values(context, the_dict.itervalues(), name)
-
-
-def dump_parameters(context, parameters, name='Properties'):
-    if not parameters:
-        return
-    puts('%s:' % name)
-    with context.style.indent:
-        for parameter_name, parameter in parameters.iteritems():
-            if parameter.type_name is not None:
-                puts('%s = %s (%s)' % (context.style.property(parameter_name),
-                                       context.style.literal(parameter.value),
-                                       context.style.type(parameter.type_name)))
-            else:
-                puts('%s = %s' % (context.style.property(parameter_name),
-                                  context.style.literal(parameter.value)))
-            if parameter.description:
-                puts(context.style.meta(parameter.description))
-
-
-def dump_interfaces(context, interfaces, name='Interfaces'):
-    if not interfaces:
-        return
-    puts('%s:' % name)
-    with context.style.indent:
-        for interface in interfaces.itervalues():
-            interface.dump(context)
-
-
-def pluralize(noun):
-    if noun.endswith('s'):
-        return '{0}es'.format(noun)
-    elif noun.endswith('y'):
-        return '{0}ies'.format(noun[:-1])
-    else:
-        return '{0}s'.format(noun)
-
-
-class classproperty(object):                                                                        # pylint: disable=invalid-name
-    def __init__(self, f):
-        self._func = f
-
-    def __get__(self, instance, owner):
-        return self._func(owner)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage_initializer.py
----------------------------------------------------------------------
diff --git a/aria/storage_initializer.py b/aria/storage_initializer.py
deleted file mode 100644
index 8c154df..0000000
--- a/aria/storage_initializer.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from datetime import datetime
-from threading import RLock
-
-from .storage.modeling import model
-from .orchestrator import operation
-from .utils.formatting import safe_repr
-from .utils.console import puts, Colored
-
-
-def initialize_storage(context, model_storage, deployment_id):
-    blueprint = _create_blueprint(context)
-    model_storage.blueprint.put(blueprint)
-
-    deployment = _create_deployment(context, blueprint, deployment_id)
-    model_storage.deployment.put(deployment)
-
-    # Create nodes and node instances
-    for node_template in context.modeling.model.node_templates.values():
-        model_storage.node_template.put(node_template)
-
-        for a_node in context.modeling.instance.find_nodes(node_template.name):
-            node = _create_node_instance(deployment, node_template, a_node)
-            model_storage.node.put(node)
-
-    # Create relationships
-    for node_template in context.modeling.model.node_templates.values():
-        for index, requirement_template in enumerate(node_template.requirement_templates):
-            # We are currently limited only to requirements for specific node templates!
-            if requirement_template.target_node_template_name:
-                model_storage.requirement_template.put(requirement_template)
-
-                for node in context.modeling.instance.find_nodes(node_template.name):
-                    for relationship_model in node.relationships:
-                        if relationship_model.source_requirement_index == index:
-                            source_instance = \
-                                model_storage.node.get_by_name(node.id)
-                            target_instance = \
-                                model_storage.node.get_by_name(
-                                    relationship_model.target_node_id)
-                            relationship = \
-                                _create_relationship_instance(source_instance, target_instance)
-                            model_storage.relationship.put(relationship)
-
-
-def _create_blueprint(context):
-    now = datetime.utcnow()
-    main_file_name = unicode(context.presentation.location)
-    try:
-        name = context.modeling.model.metadata.values.get('template_name')
-    except AttributeError:
-        name = None
-
-    return model.ServiceTemplate(
-        plan={},
-        name=name or main_file_name,
-        description=context.modeling.model.description or '',
-        created_at=now,
-        updated_at=now,
-        main_file_name=main_file_name
-    )
-
-
-def _create_deployment(context, service_template, service_instance_id):
-    now = datetime.utcnow()
-    return model.ServiceInstance(
-        name='{0}_{1}'.format(service_template.name, service_instance_id),
-        service_template=service_template,
-        description=context.modeling.instance.description or '',
-        created_at=now,
-        updated_at=now,
-        workflows={},
-        permalink='',
-        policy_triggers={},
-        scaling_groups={}
-    )
-
-
-def _create_node_instance(service_instance, node, node_model):
-    return model.Node(
-        service_instance=service_instance,
-        name=node_model.id,
-        runtime_properties={},
-        node_template=node,
-        state='',
-        scaling_groups=[]
-    )
-
-
-def _create_relationship_instance(source_instance, target_instance):
-    return model.Relationship(
-        source_node=source_instance,
-        target_node=target_instance
-    )
-
-
-_TERMINAL_LOCK = RLock()
-
-
-@operation
-def _dry_node(ctx, _plugin, _implementation, **kwargs):
-    with _TERMINAL_LOCK:
-        print '> node instance: %s' % Colored.red(ctx.node.name)
-        _dump_implementation(_plugin, _implementation)
-
-
-@operation
-def _dry_relationship(ctx, _plugin, _implementation, **kwargs):
-    with _TERMINAL_LOCK:
-        puts('> relationship instance: %s -> %s' % (
-            Colored.red(ctx.relationship.source_node_instance.name),
-            Colored.red(ctx.relationship.target_node_instance.name)))
-        _dump_implementation(_plugin, _implementation)
-
-
-def _dump_implementation(plugin, implementation):
-    if plugin:
-        print '  plugin: %s' % Colored.magenta(plugin)
-    if implementation:
-        print '  implementation: %s' % Colored.yellow(safe_repr(implementation))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/utils/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/utils/exceptions.py b/aria/utils/exceptions.py
index a19eb78..9e3e80f 100644
--- a/aria/utils/exceptions.py
+++ b/aria/utils/exceptions.py
@@ -15,11 +15,14 @@
 
 import sys
 import linecache
+import traceback as tb
 
 import jsonpickle
 
-from clint.textui import indent
-from .console import (puts, Colored)
+from .console import (puts, indent, Colored)
+
+
+ENTRY_FORMAT = 'File "{filename}", line {lineno}, in {name}'
 
 
 def print_exception(e, full=True, cause=False, traceback=None):
@@ -27,14 +30,16 @@ def print_exception(e, full=True, cause=False, traceback=None):
     Prints the exception with nice colors and such.
     """
     def format_heading(e):
-        return '%s%s: %s' % (Colored.red('Caused by ') if cause else '', Colored.red(
-            e.__class__.__name__, bold=True), Colored.red(e))
+        return '{0}{1}: {2}'.format(
+            Colored.red('Caused by ') if cause else '',
+            Colored.red(e.__class__.__name__, bold=True),
+            Colored.red(e))
 
     puts(format_heading(e))
     if full:
         if cause:
             if traceback:
-                print_traceback(traceback)
+                print_traceback(traceback, True)
         else:
             print_traceback()
     if hasattr(e, 'cause') and e.cause:
@@ -42,7 +47,7 @@ def print_exception(e, full=True, cause=False, traceback=None):
         print_exception(e.cause, full=full, cause=True, traceback=traceback)
 
 
-def print_traceback(traceback=None):
+def print_traceback(traceback=None, print_last_stack=False):
     """
     Prints the traceback with nice colors and such.
     """
@@ -51,20 +56,37 @@ def print_traceback(traceback=None):
         _, _, traceback = sys.exc_info()
     while traceback is not None:
         frame = traceback.tb_frame
-        lineno = traceback.tb_lineno
         code = frame.f_code
         filename = code.co_filename
+        lineno = traceback.tb_lineno
         name = code.co_name
         with indent(2):
-            puts('File "%s", line %s, in %s' % (Colored.blue(filename),
-                                                Colored.cyan(lineno),
-                                                Colored.cyan(name)))
+            puts(ENTRY_FORMAT.format(filename=Colored.blue(filename),
+                                     lineno=Colored.cyan(lineno),
+                                     name=Colored.cyan(name)))
             linecache.checkcache(filename)
             line = linecache.getline(filename, lineno, frame.f_globals)
             if line:
                 with indent(2):
-                    puts(Colored.black(line.strip()))
+                    puts(line.strip())
         traceback = traceback.tb_next
+        if print_last_stack and (traceback is None):
+            # Print stack of *last* traceback
+            _print_stack(frame)
+
+
+def _print_stack(frame):
+    entries = tb.extract_stack(frame)
+    if not entries:
+        return
+    puts(Colored.red('Call stack:'))
+    with indent(2):
+        for filename, lineno, name, line in entries:
+            puts(ENTRY_FORMAT.format(filename=Colored.blue(filename),
+                                     lineno=Colored.cyan(lineno),
+                                     name=Colored.cyan(name)))
+            with indent(2):
+                puts(line)
 
 
 class _WrappedException(Exception):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index 3725bc7..8a223e9 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -13,14 +13,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import absolute_import  # so we can import standard 'collections'
-
 import json
 from types import MethodType
+
 from ruamel import yaml  # @UnresolvedImport
 
 from .collections import FrozenList, FrozenDict, StrictList, StrictDict, OrderedDict
 
+
+PLURALIZE_EXCEPTIONS = {}
+
+
 # Add our types to ruamel.yaml (for round trips)
 yaml.representer.RoundTripRepresenter.add_representer(
     FrozenList, yaml.representer.RoundTripRepresenter.represent_list)
@@ -108,6 +111,18 @@ def string_list_as_string(strings):
     return ', '.join('"%s"' % safe_str(v) for v in strings)
 
 
+def pluralize(noun):
+    plural = PLURALIZE_EXCEPTIONS.get(noun)
+    if plural is not None:
+        return plural
+    elif noun.endswith('s'):
+        return '{0}es'.format(noun)
+    elif noun.endswith('y'):
+        return '{0}ies'.format(noun[:-1])
+    else:
+        return '{0}s'.format(noun)
+
+
 def as_raw(value):
     """
     Converts values using their :code:`as_raw` property, if it exists, recursively.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/utils/uuid.py
----------------------------------------------------------------------
diff --git a/aria/utils/uuid.py b/aria/utils/uuid.py
new file mode 100644
index 0000000..1f340c6
--- /dev/null
+++ b/aria/utils/uuid.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 'uuid'
+
+from random import randrange
+from uuid import uuid4
+
+from shortuuid import ShortUUID
+
+
+# Alphanumeric without visually ambiguous characters; default length is 22
+UUID_BASE57 = ShortUUID()
+
+# Lower-case alphanumeric; default length is 25
+UUID_LOWERCASE_ALPHANUMERIC = ShortUUID(alphabet='abcdefghijklmnopqrstuvwxyz0123456789')
+
+
+def generate_uuid(length=None, variant='base57'):
+    """
+    A random string with varying degrees of guarantee of universal uniqueness.
+
+    :param variant: options are:
+                    * 'base57' (the default) uses a mix of upper and lowercase alphanumerics
+                      ensuring no visually ambiguous characters; default length 22
+                    * 'alphanumeric' uses lowercase alphanumeric; default length 25
+                    * 'uuid' user lowercase hexadecimal in the classic UUID format, including
+                      dashes; length is always 36
+                    * 'hex' uses lowercase hexadecimal characters but has no guarantee of
+                      uniqueness; default length of 5
+    """
+
+    if variant == 'base57':
+        the_id = UUID_BASE57.uuid()
+        if length is not None:
+            the_id = the_id[:length]
+
+    elif variant == 'alphanumeric':
+        the_id = UUID_LOWERCASE_ALPHANUMERIC.uuid()
+        if length is not None:
+            the_id = the_id[:length]
+
+    elif variant == 'uuid':
+        the_id = str(uuid4())
+
+    elif variant == 'hex':
+        length = length or 5
+        # See: http://stackoverflow.com/a/2782859
+        the_id = ('%0' + str(length) + 'x') % randrange(16 ** length)
+
+    else:
+        raise ValueError('unsupported UUID variant: {0}'.format(variant))
+
+    return the_id

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/docs/requirements.txt
----------------------------------------------------------------------
diff --git a/docs/requirements.txt b/docs/requirements.txt
index dfd5f79..72b28f1 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -10,5 +10,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-Sphinx==1.5b1
-sphinx_rtd_theme==0.1.10a0
+Sphinx==1.5.3
+sphinx_rtd_theme==0.2.4



[08/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/elements.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/elements.py b/aria/parser/modeling/elements.py
deleted file mode 100644
index 62bc7b8..0000000
--- a/aria/parser/modeling/elements.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.collections import StrictDict, OrderedDict
-from ...utils.console import puts
-from .utils import coerce_value
-
-
-class Function(object):
-    """
-    An intrinsic function.
-
-    Serves as a placeholder for a value that should eventually be derived
-    by calling the function.
-    """
-
-    @property
-    def as_raw(self):
-        raise NotImplementedError
-
-    def _evaluate(self, context, container):
-        raise NotImplementedError
-
-    def __deepcopy__(self, memo):
-        # Circumvent cloning in order to maintain our state
-        return self
-
-
-class Element(object):
-    """
-    Base class for :class:`ServiceInstance` elements.
-
-    All elements support validation, diagnostic dumping, and representation as
-    raw data (which can be translated into JSON or YAML) via :code:`as_raw`.
-    """
-
-    @property
-    def as_raw(self):
-        raise NotImplementedError
-
-    def validate(self, context):
-        pass
-
-    def coerce_values(self, context, container, report_issues):
-        pass
-
-    def dump(self, context):
-        pass
-
-
-class ModelElement(Element):
-    """
-    Base class for :class:`ServiceModel` elements.
-
-    All model elements can be instantiated into :class:`ServiceInstance` elements.
-    """
-
-    def instantiate(self, context, container):
-        raise NotImplementedError
-
-
-class Parameter(ModelElement):
-    """
-    Represents a typed value.
-
-    This class is used by both service model and service instance elements.
-    """
-
-    def __init__(self, type_name, value, description):
-        self.type_name = type_name
-        self.value = value
-        self.description = description
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('type_name', self.type_name),
-            ('value', self.value),
-            ('description', self.description)))
-
-    def instantiate(self, context, container):
-        return Parameter(self.type_name, self.value, self.description)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.value is not None:
-            self.value = coerce_value(context, container, self.value, report_issues)
-
-
-class Metadata(ModelElement):
-    """
-    Custom values associated with the deployment template and its plans.
-
-    This class is used by both service model and service instance elements.
-
-    Properties:
-
-    * :code:`values`: Dict of custom values
-    """
-
-    def __init__(self):
-        self.values = StrictDict(key_class=basestring)
-
-    @property
-    def as_raw(self):
-        return self.values
-
-    def instantiate(self, context, container):
-        metadata = Metadata()
-        metadata.values.update(self.values)
-        return metadata
-
-    def dump(self, context):
-        puts('Metadata:')
-        with context.style.indent:
-            for name, value in self.values.iteritems():
-                puts('%s: %s' % (name, context.style.meta(value)))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/exceptions.py b/aria/parser/modeling/exceptions.py
deleted file mode 100644
index c7f08df..0000000
--- a/aria/parser/modeling/exceptions.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...exceptions import AriaException
-
-
-class CannotEvaluateFunctionException(AriaException):
-    """
-    ARIA modeling exception: cannot evaluate the function at this time.
-    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/instance_elements.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/instance_elements.py b/aria/parser/modeling/instance_elements.py
deleted file mode 100644
index 8310d62..0000000
--- a/aria/parser/modeling/instance_elements.py
+++ /dev/null
@@ -1,1041 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.collections import StrictList, StrictDict, FrozenList, OrderedDict
-from ...utils.formatting import as_raw, as_raw_list, as_raw_dict, as_agnostic, safe_repr
-from ...utils.console import puts, indent
-from ..validation import Issue
-from .elements import Element, Parameter
-from .utils import (validate_dict_values, validate_list_values, coerce_dict_values,
-                    coerce_list_values, dump_list_values, dump_dict_values, dump_parameters,
-                    dump_interfaces)
-
-
-class ServiceInstance(Element):
-    """
-    A service instance is an instance of a :class:`ServiceModel`.
-
-    You will usually not create it programmatically, but instead instantiate
-    it from the model.
-
-    Properties:
-
-    * :code:`description`: Human-readable description
-    * :code:`metadata`: :class:`Metadata`
-    * :code:`nodes`: Dict of :class:`Node`
-    * :code:`groups`: Dict of :class:`Group`
-    * :code:`policies`: Dict of :class:`Policy`
-    * :code:`substitution`: :class:`Substitution`
-    * :code:`inputs`: Dict of :class:`Parameter`
-    * :code:`outputs`: Dict of :class:`Parameter`
-    * :code:`operations`: Dict of :class:`Operation`
-    """
-
-    def __init__(self):
-        self.description = None
-        self.metadata = None
-        self.nodes = StrictDict(key_class=basestring, value_class=Node)
-        self.groups = StrictDict(key_class=basestring, value_class=Group)
-        self.policies = StrictDict(key_class=basestring, value_class=Policy)
-        self.substitution = None
-        self.inputs = StrictDict(key_class=basestring, value_class=Parameter)
-        self.outputs = StrictDict(key_class=basestring, value_class=Parameter)
-        self.operations = StrictDict(key_class=basestring, value_class=Operation)
-
-    def satisfy_requirements(self, context):
-        satisfied = True
-        for node in self.nodes.itervalues():
-            if not node.satisfy_requirements(context):
-                satisfied = False
-        return satisfied
-
-    def validate_capabilities(self, context):
-        satisfied = True
-        for node in self.nodes.itervalues():
-            if not node.validate_capabilities(context):
-                satisfied = False
-        return satisfied
-
-    def find_nodes(self, node_template_name):
-        nodes = []
-        for node in self.nodes.itervalues():
-            if node.template_name == node_template_name:
-                nodes.append(node)
-        return FrozenList(nodes)
-
-    def get_node_ids(self, node_template_name):
-        return FrozenList((node.id for node in self.find_nodes(node_template_name)))
-
-    def find_groups(self, group_template_name):
-        groups = []
-        for group in self.groups.itervalues():
-            if group.template_name == group_template_name:
-                groups.append(group)
-        return FrozenList(groups)
-
-    def get_group_ids(self, group_template_name):
-        return FrozenList((group.id for group in self.find_groups(group_template_name)))
-
-    def is_node_a_target(self, context, target_node):
-        for node in self.nodes.itervalues():
-            if self._is_node_a_target(context, node, target_node):
-                return True
-        return False
-
-    def _is_node_a_target(self, context, source_node, target_node):
-        if source_node.relationships:
-            for relationship in source_node.relationships:
-                if relationship.target_node_id == target_node.id:
-                    return True
-                else:
-                    node = context.modeling.instance.nodes.get(relationship.target_node_id)
-                    if node is not None:
-                        if self._is_node_a_target(context, node, target_node):
-                            return True
-        return False
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('description', self.description),
-            ('metadata', as_raw(self.metadata)),
-            ('nodes', as_raw_list(self.nodes)),
-            ('groups', as_raw_list(self.groups)),
-            ('policies', as_raw_list(self.policies)),
-            ('substitution', as_raw(self.substitution)),
-            ('inputs', as_raw_dict(self.inputs)),
-            ('outputs', as_raw_dict(self.outputs)),
-            ('operations', as_raw_list(self.operations))))
-
-    def validate(self, context):
-        if self.metadata is not None:
-            self.metadata.validate(context)
-        validate_dict_values(context, self.nodes)
-        validate_dict_values(context, self.groups)
-        validate_dict_values(context, self.policies)
-        if self.substitution is not None:
-            self.substitution.validate(context)
-        validate_dict_values(context, self.inputs)
-        validate_dict_values(context, self.outputs)
-        validate_dict_values(context, self.operations)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.metadata is not None:
-            self.metadata.coerce_values(context, container, report_issues)
-        coerce_dict_values(context, container, self.nodes, report_issues)
-        coerce_dict_values(context, container, self.groups, report_issues)
-        coerce_dict_values(context, container, self.policies, report_issues)
-        if self.substitution is not None:
-            self.substitution.coerce_values(context, container, report_issues)
-        coerce_dict_values(context, container, self.inputs, report_issues)
-        coerce_dict_values(context, container, self.outputs, report_issues)
-        coerce_dict_values(context, container, self.operations, report_issues)
-
-    def dump(self, context):
-        if self.description is not None:
-            puts(context.style.meta(self.description))
-        if self.metadata is not None:
-            self.metadata.dump(context)
-        for node in self.nodes.itervalues():
-            node.dump(context)
-        for group in self.groups.itervalues():
-            group.dump(context)
-        for policy in self.policies.itervalues():
-            policy.dump(context)
-        if self.substitution is not None:
-            self.substitution.dump(context)
-        dump_parameters(context, self.inputs, 'Inputs')
-        dump_parameters(context, self.outputs, 'Outputs')
-        dump_dict_values(context, self.operations, 'Operations')
-
-    def dump_graph(self, context):
-        for node in self.nodes.itervalues():
-            if not self.is_node_a_target(context, node):
-                self._dump_graph_node(context, node)
-
-    def _dump_graph_node(self, context, node):
-        puts(context.style.node(node.id))
-        if node.relationships:
-            with context.style.indent:
-                for relationship in node.relationships:
-                    relationship_name = (context.style.node(relationship.template_name)
-                                         if relationship.template_name is not None
-                                         else context.style.type(relationship.type_name))
-                    capability_name = (context.style.node(relationship.target_capability_name)
-                                       if relationship.target_capability_name is not None
-                                       else None)
-                    if capability_name is not None:
-                        puts('-> %s %s' % (relationship_name, capability_name))
-                    else:
-                        puts('-> %s' % relationship_name)
-                    target_node = self.nodes.get(relationship.target_node_id)
-                    with indent(3):
-                        self._dump_graph_node(context, target_node)
-
-
-class Node(Element):
-    """
-    An instance of a :class:`NodeTemplate`.
-
-    Nodes may have zero or more :class:`Relationship` instances to other nodes.
-
-    Properties:
-
-    * :code:`id`: Unique ID (prefixed with the template name)
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interfaces`: Dict of :class:`Interface`
-    * :code:`artifacts`: Dict of :class:`Artifact`
-    * :code:`capabilities`: Dict of :class:`CapabilityTemplate`
-    * :code:`relationship`: List of :class:`Relationship`
-    """
-
-    def __init__(self, context, type_name, template_name):
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-        if not isinstance(template_name, basestring):
-            raise ValueError('must set template_name (string)')
-
-        self.id = '%s_%s' % (template_name, context.modeling.generate_id())
-        self.type_name = type_name
-        self.template_name = template_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.interfaces = StrictDict(key_class=basestring, value_class=Interface)
-        self.artifacts = StrictDict(key_class=basestring, value_class=Artifact)
-        self.capabilities = StrictDict(key_class=basestring, value_class=Capability)
-        self.relationships = StrictList(value_class=Relationship)
-
-    def satisfy_requirements(self, context):
-        node_template = context.modeling.model.node_templates.get(self.template_name)
-        satisfied = True
-        for i in range(len(node_template.requirement_templates)):
-            requirement_template = node_template.requirement_templates[i]
-
-            # Find target template
-            target_node_template, target_node_capability = \
-                requirement_template.find_target(context, node_template)
-            if target_node_template is not None:
-                satisfied = self._satisfy_capability(context,
-                                                     target_node_capability,
-                                                     target_node_template,
-                                                     requirement_template,
-                                                     requirement_template_index=i)
-            else:
-                context.validation.report('requirement "%s" of node "%s" has no target node '
-                                          'template' % (requirement_template.name,
-                                                        self.id),
-                                          level=Issue.BETWEEN_INSTANCES)
-                satisfied = False
-        return satisfied
-
-    def _satisfy_capability(self, context, target_node_capability, target_node_template,
-                            requirement_template, requirement_template_index):
-        # Find target nodes
-        target_nodes = context.modeling.instance.find_nodes(target_node_template.name)
-        if target_nodes:
-            target_node = None
-            target_capability = None
-
-            if target_node_capability is not None:
-                # Relate to the first target node that has capacity
-                for node in target_nodes:
-                    target_capability = node.capabilities.get(target_node_capability.name)
-                    if target_capability.relate():
-                        target_node = node
-                        break
-            else:
-                # Use first target node
-                target_node = target_nodes[0]
-
-            if target_node is not None:
-                if requirement_template.relationship_template is not None:
-                    relationship = \
-                        requirement_template.relationship_template.instantiate(context, self)
-                else:
-                    relationship = Relationship()
-                relationship.name = requirement_template.name
-                relationship.source_requirement_index = requirement_template_index
-                relationship.target_node_id = target_node.id
-                if target_capability is not None:
-                    relationship.target_capability_name = target_capability.name
-                self.relationships.append(relationship)
-            else:
-                context.validation.report('requirement "%s" of node "%s" targets node '
-                                          'template "%s" but its instantiated nodes do not '
-                                          'have enough capacity'
-                                          % (requirement_template.name,
-                                             self.id,
-                                             target_node_template.name),
-                                          level=Issue.BETWEEN_INSTANCES)
-                return False
-        else:
-            context.validation.report('requirement "%s" of node "%s" targets node template '
-                                      '"%s" but it has no instantiated nodes'
-                                      % (requirement_template.name,
-                                         self.id,
-                                         target_node_template.name),
-                                      level=Issue.BETWEEN_INSTANCES)
-            return False
-
-
-    def validate_capabilities(self, context):
-        satisfied = False
-        for capability in self.capabilities.itervalues():
-            if not capability.has_enough_relationships:
-                context.validation.report('capability "%s" of node "%s" requires at least %d '
-                                          'relationships but has %d'
-                                          % (capability.name,
-                                             self.id,
-                                             capability.min_occurrences,
-                                             capability.occurrences),
-                                          level=Issue.BETWEEN_INSTANCES)
-                satisfied = False
-        return satisfied
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('id', self.id),
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('interfaces', as_raw_list(self.interfaces)),
-            ('artifacts', as_raw_list(self.artifacts)),
-            ('capabilities', as_raw_list(self.capabilities)),
-            ('relationships', as_raw_list(self.relationships))))
-
-    def validate(self, context):
-        if len(self.id) > context.modeling.id_max_length:
-            context.validation.report('"%s" has an ID longer than the limit of %d characters: %d'
-                                      % (self.id,
-                                         context.modeling.id_max_length,
-                                         len(self.id)),
-                                      level=Issue.BETWEEN_INSTANCES)
-
-        # TODO: validate that node template is of type?
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.interfaces)
-        validate_dict_values(context, self.artifacts)
-        validate_dict_values(context, self.capabilities)
-        validate_list_values(context, self.relationships)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.properties, report_issues)
-        coerce_dict_values(context, self, self.interfaces, report_issues)
-        coerce_dict_values(context, self, self.artifacts, report_issues)
-        coerce_dict_values(context, self, self.capabilities, report_issues)
-        coerce_list_values(context, self, self.relationships, report_issues)
-
-    def dump(self, context):
-        puts('Node: %s' % context.style.node(self.id))
-        with context.style.indent:
-            puts('Template: %s' % context.style.node(self.template_name))
-            puts('Type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            dump_interfaces(context, self.interfaces)
-            dump_dict_values(context, self.artifacts, 'Artifacts')
-            dump_dict_values(context, self.capabilities, 'Capabilities')
-            dump_list_values(context, self.relationships, 'Relationships')
-
-
-class Capability(Element):
-    """
-    A capability of a :class:`Node`.
-
-    An instance of a :class:`CapabilityTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`min_occurrences`: Minimum number of requirement matches required
-    * :code:`max_occurrences`: Maximum number of requirement matches allowed
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('name must be a string or None')
-        if not isinstance(type_name, basestring):
-            raise ValueError('type_name must be a string or None')
-
-        self.name = name
-        self.type_name = type_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-
-        self.min_occurrences = None # optional
-        self.max_occurrences = None # optional
-        self.occurrences = 0
-
-    @property
-    def has_enough_relationships(self):
-        if self.min_occurrences is not None:
-            return self.occurrences >= self.min_occurrences
-        return True
-
-    def relate(self):
-        if self.max_occurrences is not None:
-            if self.occurrences == self.max_occurrences:
-                return False
-        self.occurrences += 1
-        return True
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type_name),
-            ('properties', as_raw_dict(self.properties))))
-
-    def validate(self, context):
-        if context.modeling.capability_types.get_descendant(self.type_name) is None:
-            context.validation.report('capability "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        with context.style.indent:
-            puts('Type: %s' % context.style.type(self.type_name))
-            puts('Occurrences: %s (%s%s)'
-                 % (self.occurrences,
-                    self.min_occurrences or 0,
-                    (' to %d' % self.max_occurrences)
-                    if self.max_occurrences is not None
-                    else ' or more'))
-            dump_parameters(context, self.properties)
-
-
-class Relationship(Element):
-    """
-    Connects :class:`Node` to another node.
-
-    An instance of a :class:`RelationshipTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name (usually the name of the requirement at the source node template)
-    * :code:`source_requirement_index`: Must be represented in the source node template
-    * :code:`target_node_id`: Must be represented in the :class:`ServiceInstance`
-    * :code:`target_capability_name`: Matches the capability at the target node
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`source_interfaces`: Dict of :class:`Interface`
-    * :code:`target_interfaces`: Dict of :class:`Interface`
-    """
-
-    def __init__(self, name=None,
-                 source_requirement_index=None,
-                 type_name=None,
-                 template_name=None):
-        if name is not None and not isinstance(name, basestring):
-            raise ValueError('name must be a string or None')
-        if source_requirement_index is not None and \
-                (not isinstance(source_requirement_index, int) or (source_requirement_index < 0)):
-            raise ValueError('source_requirement_index must be int > 0')
-        if type_name is not None and not isinstance(type_name, basestring):
-            raise ValueError('type_name must be a string or None')
-        if template_name is not None and not isinstance(template_name, basestring):
-            raise ValueError('template_name must be a string or None')
-
-        self.name = name
-        self.source_requirement_index = source_requirement_index
-        self.target_node_id = None
-        self.target_capability_name = None
-        self.type_name = type_name
-        self.template_name = template_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.source_interfaces = StrictDict(key_class=basestring, value_class=Interface)
-        self.target_interfaces = StrictDict(key_class=basestring, value_class=Interface)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('source_requirement_index', self.source_requirement_index),
-            ('target_node_id', self.target_node_id),
-            ('target_capability_name', self.target_capability_name),
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('source_interfaces', as_raw_list(self.source_interfaces)),
-            ('target_interfaces', as_raw_list(self.target_interfaces))))
-
-    def validate(self, context):
-        if self.type_name:
-            if context.modeling.relationship_types.get_descendant(self.type_name) is None:
-                context.validation.report('relationship "%s" has an unknown type: %s'
-                                          % (self.name,
-                                             safe_repr(self.type_name)),
-                                          level=Issue.BETWEEN_TYPES)
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.source_interfaces)
-        validate_dict_values(context, self.target_interfaces)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-        coerce_dict_values(context, container, self.source_interfaces, report_issues)
-        coerce_dict_values(context, container, self.target_interfaces, report_issues)
-
-    def dump(self, context):
-        if self.name:
-            if self.source_requirement_index is not None:
-                puts('%s (%d) ->' % (context.style.node(self.name), self.source_requirement_index))
-            else:
-                puts('%s ->' % context.style.node(self.name))
-        else:
-            puts('->')
-        with context.style.indent:
-            puts('Node: %s' % context.style.node(self.target_node_id))
-            if self.target_capability_name is not None:
-                puts('Capability: %s' % context.style.node(self.target_capability_name))
-            if self.type_name is not None:
-                puts('Relationship type: %s' % context.style.type(self.type_name))
-            if self.template_name is not None:
-                puts('Relationship template: %s' % context.style.node(self.template_name))
-            dump_parameters(context, self.properties)
-            dump_interfaces(context, self.source_interfaces, 'Source interfaces')
-            dump_interfaces(context, self.target_interfaces, 'Target interfaces')
-
-
-class Artifact(Element):
-    """
-    A file associated with a :class:`Node`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`source_path`: Source path (CSAR or repository)
-    * :code:`target_path`: Path at destination machine
-    * :code:`repository_url`: Repository URL
-    * :code:`repository_credential`: Dict of string
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name, type_name, source_path):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-        if not isinstance(source_path, basestring):
-            raise ValueError('must set source_path (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.source_path = source_path
-        self.target_path = None
-        self.repository_url = None
-        self.repository_credential = StrictDict(key_class=basestring, value_class=basestring)
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('source_path', self.source_path),
-            ('target_path', self.target_path),
-            ('repository_url', self.repository_url),
-            ('repository_credential', as_agnostic(self.repository_credential)),
-            ('properties', as_raw_dict(self.properties))))
-
-    def validate(self, context):
-        if context.modeling.artifact_types.get_descendant(self.type_name) is None:
-            context.validation.report('artifact "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Artifact type: %s' % context.style.type(self.type_name))
-            puts('Source path: %s' % context.style.literal(self.source_path))
-            if self.target_path is not None:
-                puts('Target path: %s' % context.style.literal(self.target_path))
-            if self.repository_url is not None:
-                puts('Repository URL: %s' % context.style.literal(self.repository_url))
-            if self.repository_credential:
-                puts('Repository credential: %s'
-                     % context.style.literal(self.repository_credential))
-            dump_parameters(context, self.properties)
-
-
-class Group(Element):
-    """
-    An instance of a :class:`GroupTemplate`.
-
-    Properties:
-
-    * :code:`id`: Unique ID (prefixed with the template name)
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interfaces`: Dict of :class:`Interface`
-    * :code:`policies`: Dict of :class:`GroupPolicy`
-    * :code:`member_node_ids`: Must be represented in the :class:`ServiceInstance`
-    * :code:`member_group_ids`: Must be represented in the :class:`ServiceInstance`
-    """
-
-    def __init__(self, context, type_name, template_name):
-        if not isinstance(template_name, basestring):
-            raise ValueError('must set template_name (string)')
-
-        self.id = '%s_%s' % (template_name, context.modeling.generate_id())
-        self.type_name = type_name
-        self.template_name = template_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.interfaces = StrictDict(key_class=basestring, value_class=Interface)
-        self.policies = StrictDict(key_class=basestring, value_class=GroupPolicy)
-        self.member_node_ids = StrictList(value_class=basestring)
-        self.member_group_ids = StrictList(value_class=basestring)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('id', self.id),
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('interfaces', as_raw_list(self.interfaces)),
-            ('policies', as_raw_list(self.policies)),
-            ('member_node_ids', self.member_node_ids),
-            ('member_group_ids', self.member_group_ids)))
-
-    def validate(self, context):
-        if context.modeling.group_types.get_descendant(self.type_name) is None:
-            context.validation.report('group "%s" has an unknown type: %s'
-                                      % (self.name,  # pylint: disable=no-member
-                                         # TODO fix self.name reference
-                                         safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.interfaces)
-        validate_dict_values(context, self.policies)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-        coerce_dict_values(context, container, self.interfaces, report_issues)
-        coerce_dict_values(context, container, self.policies, report_issues)
-
-    def dump(self, context):
-        puts('Group: %s' % context.style.node(self.id))
-        with context.style.indent:
-            puts('Type: %s' % context.style.type(self.type_name))
-            puts('Template: %s' % context.style.type(self.template_name))
-            dump_parameters(context, self.properties)
-            dump_interfaces(context, self.interfaces)
-            dump_dict_values(context, self.policies, 'Policies')
-            if self.member_node_ids:
-                puts('Member nodes:')
-                with context.style.indent:
-                    for node_id in self.member_node_ids:
-                        puts(context.style.node(node_id))
-
-
-class Policy(Element):
-    """
-    An instance of a :class:`PolicyTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`target_node_ids`: Must be represented in the :class:`ServiceInstance`
-    * :code:`target_group_ids`: Must be represented in the :class:`ServiceInstance`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-
-        self.name = name
-        self.type_name = type_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.target_node_ids = StrictList(value_class=basestring)
-        self.target_group_ids = StrictList(value_class=basestring)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('target_node_ids', self.target_node_ids),
-            ('target_group_ids', self.target_group_ids)))
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('policy "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        puts('Policy: %s' % context.style.node(self.name))
-        with context.style.indent:
-            puts('Type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            if self.target_node_ids:
-                puts('Target nodes:')
-                with context.style.indent:
-                    for node_id in self.target_node_ids:
-                        puts(context.style.node(node_id))
-            if self.target_group_ids:
-                puts('Target groups:')
-                with context.style.indent:
-                    for group_id in self.target_group_ids:
-                        puts(context.style.node(group_id))
-
-
-class GroupPolicy(Element):
-    """
-    Policies applied to groups.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`triggers`: Dict of :class:`GroupPolicyTrigger`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.triggers = StrictDict(key_class=basestring, value_class=GroupPolicyTrigger)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('triggers', as_raw_list(self.triggers))))
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('group policy "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.triggers)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-        coerce_dict_values(context, container, self.triggers, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Group policy type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            dump_dict_values(context, self.triggers, 'Triggers')
-
-
-class GroupPolicyTrigger(Element):
-    """
-    Triggers for :class:`GroupPolicy`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name, implementation):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(implementation, basestring):
-            raise ValueError('must set implementation (string)')
-
-        self.name = name
-        self.description = None
-        self.implementation = implementation
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('properties', as_raw_dict(self.properties))))
-
-    def validate(self, context):
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Implementation: %s' % context.style.literal(self.implementation))
-            dump_parameters(context, self.properties)
-
-
-class Mapping(Element):
-    """
-    An instance of a :class:`MappingTemplate`.
-
-    Properties:
-
-    * :code:`mapped_name`: Exposed capability or requirement name
-    * :code:`node_id`: Must be represented in the :class:`ServiceInstance`
-    * :code:`name`: Name of capability or requirement at the node
-    """
-
-    def __init__(self, mapped_name, node_id, name):
-        if not isinstance(mapped_name, basestring):
-            raise ValueError('must set mapped_name (string)')
-        if not isinstance(node_id, basestring):
-            raise ValueError('must set node_id (string)')
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.mapped_name = mapped_name
-        self.node_id = node_id
-        self.name = name
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('mapped_name', self.mapped_name),
-            ('node_id', self.node_id),
-            ('name', self.name)))
-
-    def dump(self, context):
-        puts('%s -> %s.%s'
-             % (context.style.node(self.mapped_name),
-                context.style.node(self.node_id),
-                context.style.node(self.name)))
-
-
-class Substitution(Element):
-    """
-    An instance of a :class:`SubstitutionTemplate`.
-
-    Properties:
-
-    * :code:`node_type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`capabilities`: Dict of :class:`Mapping`
-    * :code:`requirements`: Dict of :class:`Mapping`
-    """
-
-    def __init__(self, node_type_name):
-        if not isinstance(node_type_name, basestring):
-            raise ValueError('must set node_type_name (string)')
-
-        self.node_type_name = node_type_name
-        self.capabilities = StrictDict(key_class=basestring, value_class=Mapping)
-        self.requirements = StrictDict(key_class=basestring, value_class=Mapping)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('node_type_name', self.node_type_name),
-            ('capabilities', as_raw_list(self.capabilities)),
-            ('requirements', as_raw_list(self.requirements))))
-
-    def validate(self, context):
-        if context.modeling.node_types.get_descendant(self.node_type_name) is None:
-            context.validation.report('substitution "%s" has an unknown type: %s'
-                                      % (self.name,  # pylint: disable=no-member
-                                         # TODO fix self.name reference
-                                         safe_repr(self.node_type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.capabilities)
-        validate_dict_values(context, self.requirements)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.capabilities, report_issues)
-        coerce_dict_values(context, container, self.requirements, report_issues)
-
-    def dump(self, context):
-        puts('Substitution:')
-        with context.style.indent:
-            puts('Node type: %s' % context.style.type(self.node_type_name))
-            dump_dict_values(context, self.capabilities, 'Capability mappings')
-            dump_dict_values(context, self.requirements, 'Requirement mappings')
-
-
-class Interface(Element):
-    """
-    A typed set of :class:`Operation`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`inputs`: Dict of :class:`Parameter`
-    * :code:`operations`: Dict of :class:`Operation`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.inputs = StrictDict(key_class=basestring, value_class=Parameter)
-        self.operations = StrictDict(key_class=basestring, value_class=Operation)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('inputs', as_raw_dict(self.inputs)),
-            ('operations', as_raw_list(self.operations))))
-
-    def validate(self, context):
-        if self.type_name:
-            if context.modeling.interface_types.get_descendant(self.type_name) is None:
-                context.validation.report('interface "%s" has an unknown type: %s'
-                                          % (self.name,
-                                             safe_repr(self.type_name)),
-                                          level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.inputs)
-        validate_dict_values(context, self.operations)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.inputs, report_issues)
-        coerce_dict_values(context, container, self.operations, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Interface type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.inputs, 'Inputs')
-            dump_dict_values(context, self.operations, 'Operations')
-
-
-class Operation(Element):
-    """
-    An operation in a :class:`Interface`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`dependencies`: List of strings (interpreted by the orchestrator)
-    * :code:`executor`: Executor string (interpreted by the orchestrator)
-    * :code:`max_retries`: Maximum number of retries allowed in case of failure
-    * :code:`retry_interval`: Interval between retries
-    * :code:`inputs`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.name = name
-        self.description = None
-        self.implementation = None
-        self.dependencies = StrictList(value_class=basestring)
-        self.executor = None
-        self.max_retries = None
-        self.retry_interval = None
-        self.inputs = StrictDict(key_class=basestring, value_class=Parameter)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_retries', self.max_retries),
-            ('retry_interval', self.retry_interval),
-            ('inputs', as_raw_dict(self.inputs))))
-
-    def validate(self, context):
-        validate_dict_values(context, self.inputs)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.inputs, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.implementation is not None:
-                puts('Implementation: %s' % context.style.literal(self.implementation))
-            if self.dependencies:
-                puts('Dependencies: %s'
-                     % ', '.join((str(context.style.literal(v)) for v in self.dependencies)))
-            if self.executor is not None:
-                puts('Executor: %s' % context.style.literal(self.executor))
-            if self.max_retries is not None:
-                puts('Max retries: %s' % context.style.literal(self.max_retries))
-            if self.retry_interval is not None:
-                puts('Retry interval: %s' % context.style.literal(self.retry_interval))
-            dump_parameters(context, self.inputs, 'Inputs')


[11/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
new file mode 100644
index 0000000..b97c148
--- /dev/null
+++ b/aria/modeling/service_instance.py
@@ -0,0 +1,1564 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-lines, no-self-argument, no-member, abstract-method
+
+from sqlalchemy import (
+    Column,
+    Text,
+    Integer
+)
+from sqlalchemy import DateTime
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.declarative import declared_attr
+
+from .mixins import InstanceModelMixin
+from ..parser import validation
+from ..parser.consumption import ConsumptionContext
+from ..utils import collections, formatting, console
+from . import (
+    relationship,
+    utils,
+    types as modeling_types
+)
+
+
+class ServiceBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
+    """
+    A service is usually an instance of a :class:`ServiceTemplate`.
+
+    You will usually not create it programmatically, but instead instantiate it from a service
+    template.
+
+    :ivar name: Name (unique for this ARIA installation)
+    :vartype name: basestring
+    :ivar service_template: Template from which this service was instantiated (optional)
+    :vartype service_template: :class:`ServiceTemplate`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar meta_data: Custom annotations
+    :vartype meta_data: {basestring: :class:`Metadata`}
+    :ivar node: Nodes
+    :vartype node: {basestring: :class:`Node`}
+    :ivar groups: Groups of nodes
+    :vartype groups: {basestring: :class:`Group`}
+    :ivar policies: Policies
+    :vartype policies: {basestring: :class:`Policy`]}
+    :ivar substitution: The entire service can appear as a node
+    :vartype substitution: :class:`Substitution`
+    :ivar inputs: Externally provided parameters
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar outputs: These parameters are filled in after service installation
+    :vartype outputs: {basestring: :class:`Parameter`}
+    :ivar workflows: Custom workflows that can be performed on the service
+    :vartype workflows: {basestring: :class:`Operation`}
+    :ivar plugin_specifications: Plugins required to be installed
+    :vartype plugin_specifications: {basestring: :class:`PluginSpecification`}
+    :ivar created_at: Creation timestamp
+    :vartype created_at: :class:`datetime.datetime`
+    :ivar updated_at: Update timestamp
+    :vartype updated_at: :class:`datetime.datetime`
+
+    :ivar permalink: ??
+    :vartype permalink: basestring
+    :ivar scaling_groups: ??
+    :vartype scaling_groups: {}
+
+    :ivar modifications: Modifications of this service
+    :vartype modifications: [:class:`ServiceModification`]
+    :ivar updates: Updates of this service
+    :vartype updates: [:class:`ServiceUpdate`]
+    :ivar executions: Executions on this service
+    :vartype executions: [:class:`Execution`]
+    """
+
+    __tablename__ = 'service'
+
+    __private_fields__ = ['substitution_fk',
+                          'service_template_fk',
+                          'service_template_name']
+
+    @declared_attr
+    def service_template(cls):
+        return relationship.many_to_one(cls, 'service_template')
+
+    description = Column(Text)
+
+    @declared_attr
+    def meta_data(cls):
+        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
+        return relationship.many_to_many(cls, 'metadata', dict_key='name')
+
+    @declared_attr
+    def nodes(cls):
+        return relationship.one_to_many(cls, 'node', dict_key='name')
+
+    @declared_attr
+    def groups(cls):
+        return relationship.one_to_many(cls, 'group', dict_key='name')
+
+    @declared_attr
+    def policies(cls):
+        return relationship.one_to_many(cls, 'policy', dict_key='name')
+
+    @declared_attr
+    def substitution(cls):
+        return relationship.one_to_one(cls, 'substitution')
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    @declared_attr
+    def outputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='outputs', dict_key='name')
+
+    @declared_attr
+    def workflows(cls):
+        return relationship.one_to_many(cls, 'operation', dict_key='name')
+
+    @declared_attr
+    def plugin_specifications(cls):
+        return relationship.many_to_many(cls, 'plugin_specification')
+
+    created_at = Column(DateTime, nullable=False, index=True)
+    updated_at = Column(DateTime)
+
+    # region orchestration
+
+    permalink = Column(Text)
+    scaling_groups = Column(modeling_types.Dict)
+
+    # endregion
+
+    # region foreign keys
+
+    @declared_attr
+    def substitution_fk(cls):
+        """Service one-to-one to Substitution"""
+        return relationship.foreign_key('substitution', nullable=True)
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For Service many-to-one to ServiceTemplate"""
+        return relationship.foreign_key('service_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service_template', 'name')
+
+    # endregion
+
+    def satisfy_requirements(self):
+        satisfied = True
+        for node in self.nodes.itervalues():
+            if not node.satisfy_requirements():
+                satisfied = False
+        return satisfied
+
+    def validate_capabilities(self):
+        satisfied = True
+        for node in self.nodes.itervalues():
+            if not node.validate_capabilities():
+                satisfied = False
+        return satisfied
+
+    def is_node_a_target(self, target_node):
+        for node in self.nodes.itervalues():
+            if self._is_node_a_target(node, target_node):
+                return True
+        return False
+
+    def _is_node_a_target(self, source_node, target_node):
+        if source_node.outbound_relationships:
+            for the_relationship in source_node.outbound_relationships:
+                if the_relationship.target_node.name == target_node.name:
+                    return True
+                else:
+                    node = the_relationship.target_node
+                    if node is not None:
+                        if self._is_node_a_target(node, target_node):
+                            return True
+        return False
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('description', self.description),
+            ('metadata', formatting.as_raw_dict(self.meta_data)),
+            ('nodes', formatting.as_raw_list(self.nodes)),
+            ('groups', formatting.as_raw_list(self.groups)),
+            ('policies', formatting.as_raw_list(self.policies)),
+            ('substitution', formatting.as_raw(self.substitution)),
+            ('inputs', formatting.as_raw_dict(self.inputs)),
+            ('outputs', formatting.as_raw_dict(self.outputs)),
+            ('workflows', formatting.as_raw_list(self.workflows))))
+
+    def validate(self):
+        utils.validate_dict_values(self.meta_data)
+        utils.validate_dict_values(self.nodes)
+        utils.validate_dict_values(self.groups)
+        utils.validate_dict_values(self.policies)
+        if self.substitution is not None:
+            self.substitution.validate()
+        utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.outputs)
+        utils.validate_dict_values(self.workflows)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.meta_data, report_issues)
+        utils.coerce_dict_values(container, self.nodes, report_issues)
+        utils.coerce_dict_values(container, self.groups, report_issues)
+        utils.coerce_dict_values(container, self.policies, report_issues)
+        if self.substitution is not None:
+            self.substitution.coerce_values(container, report_issues)
+        utils.coerce_dict_values(container, self.inputs, report_issues)
+        utils.coerce_dict_values(container, self.outputs, report_issues)
+        utils.coerce_dict_values(container, self.workflows, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.description is not None:
+            console.puts(context.style.meta(self.description))
+        utils.dump_dict_values(self.meta_data, 'Metadata')
+        for node in self.nodes.itervalues():
+            node.dump()
+        for group in self.groups.itervalues():
+            group.dump()
+        for policy in self.policies.itervalues():
+            policy.dump()
+        if self.substitution is not None:
+            self.substitution.dump()
+        utils.dump_dict_values(self.inputs, 'Inputs')
+        utils.dump_dict_values(self.outputs, 'Outputs')
+        utils.dump_dict_values(self.workflows, 'Workflows')
+
+    def dump_graph(self):
+        for node in self.nodes.itervalues():
+            if not self.is_node_a_target(node):
+                self._dump_graph_node(node)
+
+    def _dump_graph_node(self, node):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(node.name))
+        if node.outbound_relationships:
+            with context.style.indent:
+                for the_relationship in node.outbound_relationships:
+                    relationship_name = context.style.property(the_relationship.name)
+                    if the_relationship.type is not None:
+                        relationship_type = context.style.type(the_relationship.type.name)
+                    else:
+                        relationship_type = None
+                    if the_relationship.target_capability is not None:
+                        capability_name = \
+                            context.style.node(the_relationship.target_capability.name)
+                    else:
+                        capability_name = None
+                    if capability_name is not None:
+                        if relationship_type is not None:
+                            console.puts('-> {0} ({1}) {2}'.format(relationship_name,
+                                                                   relationship_type,
+                                                                   capability_name))
+                        else:
+                            console.puts('-> {0} {1}'.format(relationship_name, capability_name))
+                    else:
+                        if relationship_type is not None:
+                            console.puts('-> {0} ({1})'.format(relationship_name,
+                                                               relationship_type))
+                        else:
+                            console.puts('-> {0}'.format(relationship_name))
+                    target_node = the_relationship.target_node
+                    with console.indent(3):
+                        self._dump_graph_node(target_node)
+
+
+class NodeBase(InstanceModelMixin): # pylint: disable=too-many-public-methods
+    """
+    Usually an instance of a :class:`NodeTemplate`.
+
+    Nodes may have zero or more :class:`Relationship` instances to other nodes.
+
+    :ivar name: Name (unique for this service)
+    :vartype name: basestring
+    :ivar node_template: Template from which this node was instantiated (optional)
+    :vartype node_template: :class:`NodeTemplate`
+    :ivar type: Node type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+    :ivar interfaces: Bundles of operations
+    :vartype interfaces: {basestring: :class:`Interface`}
+    :ivar artifacts: Associated files
+    :vartype artifacts: {basestring: :class:`Artifact`}
+    :ivar capabilities: Exposed capabilities
+    :vartype capabilities: {basestring: :class:`Capability`}
+    :ivar outbound_relationships: Relationships to other nodes
+    :vartype outbound_relationships: [:class:`Relationship`]
+    :ivar inbound_relationships: Relationships from other nodes
+    :vartype inbound_relationships: [:class:`Relationship`]
+    :ivar plugin_specifications: Plugins required to be installed on the node's host
+    :vartype plugin_specifications: {basestring: :class:`PluginSpecification`}
+    :ivar host: Host node (can be self)
+    :vartype host: :class:`Node`
+
+    :ivar runtime_properties: TODO: should be replaced with attributes
+    :vartype runtime_properties: {}
+    :ivar scaling_groups: ??
+    :vartype scaling_groups: []
+    :ivar state: ??
+    :vartype state: basestring
+    :ivar version: Used by `aria.storage.instrumentation`
+    :vartype version: int
+
+    :ivar service: Containing service
+    :vartype service: :class:`Service`
+    :ivar groups: We are a member of these groups
+    :vartype groups: [:class:`Group`]
+    :ivar policies: Policies enacted on this node
+    :vartype policies: [:class:`Policy`]
+    :ivar substitution_mapping: Our contribution to service substitution
+    :vartype substitution_mapping: :class:`SubstitutionMapping`
+    :ivar tasks: Tasks on this node
+    :vartype tasks: [:class:`Task`]
+    """
+
+    __tablename__ = 'node'
+
+    __private_fields__ = ['type_fk',
+                          'host_fk',
+                          'service_fk',
+                          'node_template_fk',
+                          'service_name']
+
+    @declared_attr
+    def node_template(cls):
+        return relationship.many_to_one(cls, 'node_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def interfaces(cls):
+        return relationship.one_to_many(cls, 'interface', dict_key='name')
+
+    @declared_attr
+    def artifacts(cls):
+        return relationship.one_to_many(cls, 'artifact', dict_key='name')
+
+    @declared_attr
+    def capabilities(cls):
+        return relationship.one_to_many(cls, 'capability', dict_key='name')
+
+    @declared_attr
+    def outbound_relationships(cls):
+        return relationship.one_to_many(cls, 'relationship', child_fk='source_node_fk',
+                                        child_property='source_node')
+
+    @declared_attr
+    def inbound_relationships(cls):
+        return relationship.one_to_many(cls, 'relationship', child_fk='target_node_fk',
+                                        child_property='target_node')
+
+    @declared_attr
+    def plugin_specifications(cls):
+        return relationship.many_to_many(cls, 'plugin_specification', dict_key='name')
+
+    @declared_attr
+    def host(cls):
+        return relationship.one_to_one_self(cls, 'host_fk')
+
+    # region orchestration
+
+    runtime_properties = Column(modeling_types.Dict)
+    scaling_groups = Column(modeling_types.List)
+    state = Column(Text, nullable=False)
+    version = Column(Integer, default=1)
+
+    __mapper_args__ = {'version_id_col': version} # Enable SQLAlchemy automatic version counting
+
+    @property
+    def ip(self):
+        # TODO: totally broken
+        if not self.host_fk:
+            return None
+        host_node = self.host
+        if 'ip' in host_node.runtime_properties:  # pylint: disable=no-member
+            return host_node.runtime_properties['ip']  # pylint: disable=no-member
+        host_node = host_node.node_template  # pylint: disable=no-member
+        host_ip_property = host_node.properties.get('ip')
+        if host_ip_property:
+            return host_ip_property.value
+        return None
+
+    # endregion
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Node many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def host_fk(cls):
+        """For Node one-to-one to Node"""
+        return relationship.foreign_key('node', nullable=True)
+
+    @declared_attr
+    def service_fk(cls):
+        """For Service one-to-many to Node"""
+        return relationship.foreign_key('service')
+
+    @declared_attr
+    def node_template_fk(cls):
+        """For Node many-to-one to NodeTemplate"""
+        return relationship.foreign_key('node_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', 'name')
+
+    # endregion
+
+    def satisfy_requirements(self):
+        node_template = self.node_template
+        satisfied = True
+        for requirement_template in node_template.requirement_templates:
+            # Find target template
+            target_node_template, target_node_capability = \
+                requirement_template.find_target(node_template)
+            if target_node_template is not None:
+                satisfied = self._satisfy_capability(target_node_capability,
+                                                     target_node_template,
+                                                     requirement_template)
+            else:
+                context = ConsumptionContext.get_thread_local()
+                context.validation.report('requirement "{0}" of node "{1}" has no target node '
+                                          'template'.format(requirement_template.name, self.name),
+                                          level=validation.Issue.BETWEEN_INSTANCES)
+                satisfied = False
+        return satisfied
+
+    def _satisfy_capability(self, target_node_capability, target_node_template,
+                            requirement_template):
+        from . import models
+        context = ConsumptionContext.get_thread_local()
+        # Find target nodes
+        target_nodes = target_node_template.nodes.all()
+        if target_nodes:
+            target_node = None
+            target_capability = None
+
+            if target_node_capability is not None:
+                # Relate to the first target node that has capacity
+                for node in target_nodes:
+                    target_capability = node.capabilities.get(target_node_capability.name)
+                    if target_capability.relate():
+                        target_node = node
+                        break
+            else:
+                # Use first target node
+                target_node = target_nodes[0]
+
+            if target_node is not None:
+                if requirement_template.relationship_template is not None:
+                    the_relationship = \
+                        requirement_template.relationship_template.instantiate(self)
+                else:
+                    the_relationship = models.Relationship(target_capability=target_capability)
+                the_relationship.name = requirement_template.name
+                the_relationship.requirement_template = requirement_template
+                the_relationship.target_node = target_node
+                self.outbound_relationships.append(the_relationship)
+                return True
+            else:
+                context.validation.report('requirement "{0}" of node "{1}" targets node '
+                                          'template "{2}" but its instantiated nodes do not '
+                                          'have enough capacity'.format(
+                                              requirement_template.name,
+                                              self.name,
+                                              target_node_template.name),
+                                          level=validation.Issue.BETWEEN_INSTANCES)
+                return False
+        else:
+            context.validation.report('requirement "{0}" of node "{1}" targets node template '
+                                      '"{2}" but it has no instantiated nodes'.format(
+                                          requirement_template.name,
+                                          self.name,
+                                          target_node_template.name),
+                                      level=validation.Issue.BETWEEN_INSTANCES)
+            return False
+
+    def validate_capabilities(self):
+        context = ConsumptionContext.get_thread_local()
+        satisfied = False
+        for capability in self.capabilities.itervalues():
+            if not capability.has_enough_relationships:
+                context.validation.report('capability "{0}" of node "{1}" requires at least {2:d} '
+                                          'relationships but has {3:d}'.format(
+                                              capability.name,
+                                              self.name,
+                                              capability.min_occurrences,
+                                              capability.occurrences),
+                                          level=validation.Issue.BETWEEN_INSTANCES)
+                satisfied = False
+        return satisfied
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('type_name', self.type.name),
+            ('properties', formatting.as_raw_dict(self.properties)),
+            ('interfaces', formatting.as_raw_list(self.interfaces)),
+            ('artifacts', formatting.as_raw_list(self.artifacts)),
+            ('capabilities', formatting.as_raw_list(self.capabilities)),
+            ('relationships', formatting.as_raw_list(self.outbound_relationships))))
+
+    def validate(self):
+        context = ConsumptionContext.get_thread_local()
+        if len(self.name) > context.modeling.id_max_length:
+            context.validation.report('"{0}" has an ID longer than the limit of {1:d} characters: '
+                                      '{2:d}'.format(
+                                          self.name,
+                                          context.modeling.id_max_length,
+                                          len(self.name)),
+                                      level=validation.Issue.BETWEEN_INSTANCES)
+
+        # TODO: validate that node template is of type?
+
+        utils.validate_dict_values(self.properties)
+        utils.validate_dict_values(self.interfaces)
+        utils.validate_dict_values(self.artifacts)
+        utils.validate_dict_values(self.capabilities)
+        utils.validate_list_values(self.outbound_relationships)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.properties, report_issues)
+        utils.coerce_dict_values(self, self.interfaces, report_issues)
+        utils.coerce_dict_values(self, self.artifacts, report_issues)
+        utils.coerce_dict_values(self, self.capabilities, report_issues)
+        utils.coerce_list_values(self, self.outbound_relationships, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Node: {0}'.format(context.style.node(self.name)))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            console.puts('Template: {0}'.format(context.style.node(self.node_template.name)))
+            utils.dump_dict_values(self.properties, 'Properties')
+            utils.dump_interfaces(self.interfaces)
+            utils.dump_dict_values(self.artifacts, 'Artifacts')
+            utils.dump_dict_values(self.capabilities, 'Capabilities')
+            utils.dump_list_values(self.outbound_relationships, 'Relationships')
+
+class GroupBase(InstanceModelMixin):
+    """
+    Usually an instance of a :class:`GroupTemplate`.
+
+    :ivar name: Name (unique for this service)
+    :vartype name: basestring
+    :ivar group_template: Template from which this group was instantiated (optional)
+    :vartype group_template: :class:`GroupTemplate`
+    :ivar type: Group type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar nodes: Members of this group
+    :vartype nodes: [:class:`Node`]
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+    :ivar interfaces: Bundles of operations
+    :vartype interfaces: {basestring: :class:`Interface`}
+
+    :ivar service: Containing service
+    :vartype service: :class:`Service`
+    :ivar policies: Policies enacted on this group
+    :vartype policies: [:class:`Policy`]
+    """
+
+    __tablename__ = 'group'
+
+    __private_fields__ = ['type_fk',
+                          'service_fk',
+                          'group_template_fk']
+
+    @declared_attr
+    def group_template(cls):
+        return relationship.many_to_one(cls, 'group_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def nodes(cls):
+        return relationship.many_to_many(cls, 'node')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def interfaces(cls):
+        return relationship.one_to_many(cls, 'interface', dict_key='name')
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Group many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_fk(cls):
+        """For Service one-to-many to Group"""
+        return relationship.foreign_key('service')
+
+    @declared_attr
+    def group_template_fk(cls):
+        """For Group many-to-one to GroupTemplate"""
+        return relationship.foreign_key('group_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('properties', formatting.as_raw_dict(self.properties)),
+            ('interfaces', formatting.as_raw_list(self.interfaces))))
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+        utils.validate_dict_values(self.interfaces)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.properties, report_issues)
+        utils.coerce_dict_values(container, self.interfaces, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Group: {0}'.format(context.style.node(self.name)))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            utils.dump_dict_values(self.properties, 'Properties')
+            utils.dump_interfaces(self.interfaces)
+            if self.nodes:
+                console.puts('Member nodes:')
+                with context.style.indent:
+                    for node in self.nodes:
+                        console.puts(context.style.node(node.name))
+
+
+class PolicyBase(InstanceModelMixin):
+    """
+    Usually an instance of a :class:`PolicyTemplate`.
+
+    :ivar name: Name (unique for this service)
+    :vartype name: basestring
+    :ivar policy_template: Template from which this policy was instantiated (optional)
+    :vartype policy_template: :class:`PolicyTemplate`
+    :ivar type: Policy type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar nodes: Policy will be enacted on all these nodes
+    :vartype nodes: [:class:`Node`]
+    :ivar groups: Policy will be enacted on all nodes in these groups
+    :vartype groups: [:class:`Group`]
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+
+    :ivar service: Containing service
+    :vartype service: :class:`Service`
+    """
+
+    __tablename__ = 'policy'
+
+    __private_fields__ = ['type_fk',
+                          'service_fk',
+                          'policy_template_fk']
+
+    @declared_attr
+    def policy_template(cls):
+        return relationship.many_to_one(cls, 'policy_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def nodes(cls):
+        return relationship.many_to_many(cls, 'node')
+
+    @declared_attr
+    def groups(cls):
+        return relationship.many_to_many(cls, 'group')
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Policy many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_fk(cls):
+        """For Service one-to-many to Policy"""
+        return relationship.foreign_key('service')
+
+    @declared_attr
+    def policy_template_fk(cls):
+        """For Policy many-to-one to PolicyTemplate"""
+        return relationship.foreign_key('policy_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('type_name', self.type.name),
+            ('properties', formatting.as_raw_dict(self.properties))))
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.properties, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Policy: {0}'.format(context.style.node(self.name)))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            utils.dump_dict_values(self.properties, 'Properties')
+            if self.nodes:
+                console.puts('Target nodes:')
+                with context.style.indent:
+                    for node in self.nodes:
+                        console.puts(context.style.node(node.name))
+            if self.groups:
+                console.puts('Target groups:')
+                with context.style.indent:
+                    for group in self.groups:
+                        console.puts(context.style.node(group.name))
+
+
+class SubstitutionBase(InstanceModelMixin):
+    """
+    Used to substitute a single node for the entire deployment.
+
+    Usually an instance of a :class:`SubstitutionTemplate`.
+
+    :ivar substitution_template: Template from which this substitution was instantiated (optional)
+    :vartype substitution_template: :class:`SubstitutionTemplate`
+    :ivar node_type: Exposed node type
+    :vartype node_type: :class:`Type`
+    :ivar mappings: Requirement and capability mappings
+    :vartype mappings: {basestring: :class:`SubstitutionTemplate`}
+
+    :ivar service: Containing service
+    :vartype service: :class:`Service`
+    """
+
+    __tablename__ = 'substitution'
+
+    __private_fields__ = ['node_type_fk',
+                          'substitution_template_fk']
+
+    @declared_attr
+    def substitution_template(cls):
+        return relationship.many_to_one(cls, 'substitution_template')
+
+    @declared_attr
+    def node_type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    @declared_attr
+    def mappings(cls):
+        return relationship.one_to_many(cls, 'substitution_mapping', dict_key='name')
+
+    # region foreign_keys
+
+    @declared_attr
+    def node_type_fk(cls):
+        """For Substitution many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def substitution_template_fk(cls):
+        """For Substitution many-to-one to SubstitutionTemplate"""
+        return relationship.foreign_key('substitution_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('node_type_name', self.node_type_name),
+            ('mappings', formatting.as_raw_dict(self.mappings))))
+
+    def validate(self):
+        utils.validate_dict_values(self.mappings)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.mappings, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Substitution:')
+        with context.style.indent:
+            console.puts('Node type: {0}'.format(context.style.type(self.node_type.name)))
+            utils.dump_dict_values(self.mappings, 'Mappings')
+
+
+class SubstitutionMappingBase(InstanceModelMixin):
+    """
+    Used by :class:`Substitution` to map a capability or a requirement to a node.
+
+    Only one of `capability_template` and `requirement_template` can be set.
+
+    Usually an instance of a :class:`SubstitutionTemplate`.
+
+    :ivar name: Exposed capability or requirement name
+    :vartype name: basestring
+    :ivar node: Node
+    :vartype node: :class:`Node`
+    :ivar capability: Capability in the node
+    :vartype capability: :class:`Capability`
+    :ivar requirement_template: Requirement template in the node template
+    :vartype requirement_template: :class:`RequirementTemplate`
+
+    :ivar substitution: Containing substitution
+    :vartype substitution: :class:`Substitution`
+    """
+
+    __tablename__ = 'substitution_mapping'
+
+    __private_fields__ = ['substitution_fk',
+                          'node_fk',
+                          'capability_fk',
+                          'requirement_template_fk']
+
+    @declared_attr
+    def node(cls):
+        return relationship.one_to_one(cls, 'node')
+
+    @declared_attr
+    def capability(cls):
+        return relationship.one_to_one(cls, 'capability')
+
+    @declared_attr
+    def requirement_template(cls):
+        return relationship.one_to_one(cls, 'requirement_template')
+
+    # region foreign keys
+
+    @declared_attr
+    def substitution_fk(cls):
+        """For Substitution one-to-many to SubstitutionMapping"""
+        return relationship.foreign_key('substitution')
+
+    @declared_attr
+    def node_fk(cls):
+        """For Substitution one-to-one to NodeTemplate"""
+        return relationship.foreign_key('node')
+
+    @declared_attr
+    def capability_fk(cls):
+        """For Substitution one-to-one to Capability"""
+        return relationship.foreign_key('capability', nullable=True)
+
+    @declared_attr
+    def requirement_template_fk(cls):
+        """For Substitution one-to-one to RequirementTemplate"""
+        return relationship.foreign_key('requirement_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name)))
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def validate(self):
+        context = ConsumptionContext.get_thread_local()
+        if (self.capability is None) and (self.requirement_template is None):
+            context.validation.report('mapping "{0}" refers to neither capability nor a requirement'
+                                      ' in node: {1}'.format(
+                                          self.name,
+                                          formatting.safe_repr(self.node.name)),
+                                      level=validation.Issue.BETWEEN_TYPES)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('{0} -> {1}.{2}'.format(
+            context.style.node(self.name),
+            context.style.node(self.node.name),
+            context.style.node(self.capability.name
+                               if self.capability
+                               else self.requirement_template.name)))
+
+
+class RelationshipBase(InstanceModelMixin):
+    """
+    Connects :class:`Node` to a capability in another node.
+
+    Might be an instance of a :class:`RelationshipTemplate`.
+
+    :ivar name: Name (usually the name of the requirement at the source node template)
+    :vartype name: basestring
+    :ivar relationship_template: Template from which this relationship was instantiated (optional)
+    :vartype relationship_template: :class:`RelationshipTemplate`
+    :ivar requirement_template: Template from which this relationship was instantiated (optional)
+    :vartype requirement_template: :class:`RequirementTemplate`
+    :ivar type: Relationship type
+    :vartype type: :class:`Type`
+    :ivar target_capability: Capability at the target node (optional)
+    :vartype target_capability: :class:`Capability`
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+    :ivar interfaces: Bundles of operations
+    :vartype interfaces: {basestring: :class:`Interfaces`}
+
+    :ivar source_position: ??
+    :vartype source_position: int
+    :ivar target_position: ??
+    :vartype target_position: int
+
+    :ivar source_node: Source node
+    :vartype source_node: :class:`Node`
+    :ivar target_node: Target node
+    :vartype target_node: :class:`Node`
+    :ivar tasks: Tasks on this node
+    :vartype tasks: [:class:`Task`]
+    """
+
+    __tablename__ = 'relationship'
+
+    __private_fields__ = ['type_fk',
+                          'source_node_fk',
+                          'target_node_fk',
+                          'target_capability_fk',
+                          'requirement_template_fk',
+                          'relationship_template_fk',
+                          'source_node_name',
+                          'target_node_name']
+
+    @declared_attr
+    def relationship_template(cls):
+        return relationship.many_to_one(cls, 'relationship_template')
+
+    @declared_attr
+    def requirement_template(cls):
+        return relationship.many_to_one(cls, 'requirement_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    @declared_attr
+    def target_capability(cls):
+        return relationship.one_to_one(cls, 'capability')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def interfaces(cls):
+        return relationship.one_to_many(cls, 'interface', dict_key='name')
+
+    # region orchestration
+
+    source_position = Column(Integer) # ???
+    target_position = Column(Integer) # ???
+
+    # endregion
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Relationship many-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def source_node_fk(cls):
+        """For Node one-to-many to Relationship"""
+        return relationship.foreign_key('node')
+
+    @declared_attr
+    def target_node_fk(cls):
+        """For Node one-to-many to Relationship"""
+        return relationship.foreign_key('node')
+
+    @declared_attr
+    def target_capability_fk(cls):
+        """For Relationship one-to-one to Capability"""
+        return relationship.foreign_key('capability', nullable=True)
+
+    @declared_attr
+    def requirement_template_fk(cls):
+        """For Relationship many-to-one to RequirementTemplate"""
+        return relationship.foreign_key('requirement_template', nullable=True)
+
+    @declared_attr
+    def relationship_template_fk(cls):
+        """For Relationship many-to-one to RelationshipTemplate"""
+        return relationship.foreign_key('relationship_template', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def source_node_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('source_node', 'name')
+
+    @declared_attr
+    def target_node_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('target_node', 'name')
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('target_node_id', self.target_node.name),
+            ('type_name', self.type.name
+             if self.type is not None else None),
+            ('template_name', self.relationship_template.name
+             if self.relationship_template is not None else None),
+            ('properties', formatting.as_raw_dict(self.properties)),
+            ('interfaces', formatting.as_raw_list(self.interfaces))))
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+        utils.validate_dict_values(self.interfaces)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.properties, report_issues)
+        utils.coerce_dict_values(container, self.interfaces, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.name:
+            console.puts('{0} ->'.format(context.style.node(self.name)))
+        else:
+            console.puts('->')
+        with context.style.indent:
+            console.puts('Node: {0}'.format(context.style.node(self.target_node.name)))
+            if self.target_capability:
+                console.puts('Capability: {0}'.format(context.style.node(
+                    self.target_capability.name)))
+            if self.type is not None:
+                console.puts('Relationship type: {0}'.format(context.style.type(self.type.name)))
+            if (self.relationship_template is not None) and self.relationship_template.name:
+                console.puts('Relationship template: {0}'.format(
+                    context.style.node(self.relationship_template.name)))
+            utils.dump_dict_values(self.properties, 'Properties')
+            utils.dump_interfaces(self.interfaces, 'Interfaces')
+
+
+class CapabilityBase(InstanceModelMixin):
+    """
+    A capability of a :class:`Node`.
+
+    Usually an instance of a :class:`CapabilityTemplate`.
+
+    :ivar name: Name (unique for the node)
+    :vartype name: basestring
+    :ivar capability_template: Template from which this capability was instantiated (optional)
+    :vartype capability_template: :class:`capabilityTemplate`
+    :ivar type: Capability type
+    :vartype type: :class:`Type`
+    :ivar min_occurrences: Minimum number of requirement matches required
+    :vartype min_occurrences: int
+    :ivar max_occurrences: Maximum number of requirement matches allowed
+    :vartype min_occurrences: int
+    :ivar occurrences: Actual number of requirement matches
+    :vartype occurrences: int
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+
+    :ivar node: Containing node
+    :vartype node: :class:`Node`
+    :ivar relationship: Available when we are the target of a relationship
+    :vartype relationship: :class:`Relationship`
+    :ivar substitution_mapping: Our contribution to service substitution
+    :vartype substitution_mapping: :class:`SubstitutionMapping`
+    """
+
+    __tablename__ = 'capability'
+
+    __private_fields__ = ['capability_fk',
+                          'node_fk',
+                          'capability_template_fk']
+
+    @declared_attr
+    def capability_template(cls):
+        return relationship.many_to_one(cls, 'capability_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    min_occurrences = Column(Integer, default=None)
+    max_occurrences = Column(Integer, default=None)
+    occurrences = Column(Integer, default=0)
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Capability many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def node_fk(cls):
+        """For Node one-to-many to Capability"""
+        return relationship.foreign_key('node')
+
+    @declared_attr
+    def capability_template_fk(cls):
+        """For Capability many-to-one to CapabilityTemplate"""
+        return relationship.foreign_key('capability_template', nullable=True)
+
+    # endregion
+
+    @property
+    def has_enough_relationships(self):
+        if self.min_occurrences is not None:
+            return self.occurrences >= self.min_occurrences
+        return True
+
+    def relate(self):
+        if self.max_occurrences is not None:
+            if self.occurrences == self.max_occurrences:
+                return False
+        self.occurrences += 1
+        return True
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('type_name', self.type.name),
+            ('properties', formatting.as_raw_dict(self.properties))))
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.properties, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            console.puts('Occurrences: {0:d} ({1:d}{2})'.format(
+                self.occurrences,
+                self.min_occurrences or 0,
+                ' to {0:d}'.format(self.max_occurrences)
+                if self.max_occurrences is not None
+                else ' or more'))
+            utils.dump_dict_values(self.properties, 'Properties')
+
+
+class InterfaceBase(InstanceModelMixin):
+    """
+    A typed set of :class:`Operation`.
+
+    Usually an instance of :class:`InterfaceTemplate`.
+
+    :ivar name: Name (unique for the node, group, or relationship)
+    :vartype name: basestring
+    :ivar interface_template: Template from which this interface was instantiated (optional)
+    :vartype interface_template: :class:`InterfaceTemplate`
+    :ivar type: Interface type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar inputs: Parameters that can be used by all operations in the interface
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar operations: Operations
+    :vartype operations: {basestring: :class:`Operation`}
+
+    :ivar node: Containing node
+    :vartype node: :class:`Node`
+    :ivar group: Containing group
+    :vartype group: :class:`Group`
+    :ivar relationship: Containing relationship
+    :vartype relationship: :class:`Relationship`
+    """
+
+    __tablename__ = 'interface'
+
+    __private_fields__ = ['type_fk',
+                          'node_fk',
+                          'group_fk',
+                          'relationship_fk',
+                          'interface_template_fk']
+
+    @declared_attr
+    def interface_template(cls):
+        return relationship.many_to_one(cls, 'interface_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    @declared_attr
+    def operations(cls):
+        return relationship.one_to_many(cls, 'operation', dict_key='name')
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Interface many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def node_fk(cls):
+        """For Node one-to-many to Interface"""
+        return relationship.foreign_key('node', nullable=True)
+
+    @declared_attr
+    def group_fk(cls):
+        """For Group one-to-many to Interface"""
+        return relationship.foreign_key('group', nullable=True)
+
+    @declared_attr
+    def relationship_fk(cls):
+        """For Relationship one-to-many to Interface"""
+        return relationship.foreign_key('relationship', nullable=True)
+
+    @declared_attr
+    def interface_template_fk(cls):
+        """For Interface many-to-one to InterfaceTemplate"""
+        return relationship.foreign_key('interface_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('inputs', formatting.as_raw_dict(self.inputs)),
+            ('operations', formatting.as_raw_list(self.operations))))
+
+    def validate(self):
+        utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.operations)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.inputs, report_issues)
+        utils.coerce_dict_values(container, self.operations, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Interface type: {0}'.format(context.style.type(self.type.name)))
+            utils.dump_dict_values(self.inputs, 'Inputs')
+            utils.dump_dict_values(self.operations, 'Operations')
+
+
+class OperationBase(InstanceModelMixin):
+    """
+    An operation in a :class:`Interface`.
+
+    Might be an instance of :class:`OperationTemplate`.
+
+    :ivar name: Name (unique for the interface or service)
+    :vartype name: basestring
+    :ivar operation_template: Template from which this operation was instantiated (optional)
+    :vartype operation_template: :class:`OperationTemplate`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar plugin_specification: Associated plugin
+    :vartype plugin_specification: :class:`PluginSpecification`
+    :ivar implementation: Implementation string (interpreted by the plugin)
+    :vartype implementation: basestring
+    :ivar dependencies: Dependency strings (interpreted by the plugin)
+    :vartype dependencies: [basestring]
+    :ivar inputs: Parameters that can be used by this operation
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar executor: Executor name
+    :vartype executor: basestring
+    :ivar max_retries: Maximum number of retries allowed in case of failure
+    :vartype max_retries: int
+    :ivar retry_interval: Interval between retries (in seconds)
+    :vartype retry_interval: int
+
+    :ivar interface: Containing interface
+    :vartype interface: :class:`Interface`
+    :ivar service: Containing service
+    :vartype service: :class:`Service`
+    """
+
+    __tablename__ = 'operation'
+
+    __private_fields__ = ['service_fk',
+                          'interface_fk',
+                          'plugin_fk',
+                          'operation_template_fk']
+
+    @declared_attr
+    def operation_template(cls):
+        return relationship.many_to_one(cls, 'operation_template')
+
+    description = Column(Text)
+
+    @declared_attr
+    def plugin_specification(cls):
+        return relationship.one_to_one(cls, 'plugin_specification')
+
+    implementation = Column(Text)
+    dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    executor = Column(Text)
+    max_retries = Column(Integer)
+    retry_interval = Column(Integer)
+
+    # region foreign_keys
+
+    @declared_attr
+    def service_fk(cls):
+        """For Service one-to-many to Operation"""
+        return relationship.foreign_key('service', nullable=True)
+
+    @declared_attr
+    def interface_fk(cls):
+        """For Interface one-to-many to Operation"""
+        return relationship.foreign_key('interface', nullable=True)
+
+    @declared_attr
+    def plugin_specification_fk(cls):
+        """For Operation one-to-one to PluginSpecification"""
+        return relationship.foreign_key('plugin_specification', nullable=True)
+
+    @declared_attr
+    def operation_template_fk(cls):
+        """For Operation many-to-one to OperationTemplate"""
+        return relationship.foreign_key('operation_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('implementation', self.implementation),
+            ('dependencies', self.dependencies),
+            ('executor', self.executor),
+            ('max_retries', self.max_retries),
+            ('retry_interval', self.retry_interval),
+            ('inputs', formatting.as_raw_dict(self.inputs))))
+
+    def validate(self):
+        # TODO must be associated with interface or service
+        utils.validate_dict_values(self.inputs)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.inputs, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            if self.implementation is not None:
+                console.puts('Implementation: {0}'.format(
+                    context.style.literal(self.implementation)))
+            if self.dependencies:
+                console.puts(
+                    'Dependencies: {0}'.format(
+                        ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+            if self.executor is not None:
+                console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
+            if self.max_retries is not None:
+                console.puts('Max retries: {0}'.format(context.style.literal(self.max_retries)))
+            if self.retry_interval is not None:
+                console.puts('Retry interval: {0}'.format(
+                    context.style.literal(self.retry_interval)))
+            utils.dump_dict_values(self.inputs, 'Inputs')
+
+
+class ArtifactBase(InstanceModelMixin):
+    """
+    A file associated with a :class:`Node`.
+
+    Usually an instance of :class:`ArtifactTemplate`.
+
+    :ivar name: Name (unique for the node)
+    :vartype name: basestring
+    :ivar artifact_template: Template from which this artifact was instantiated (optional)
+    :vartype artifact_template: :class:`ArtifactTemplate`
+    :ivar type: Artifact type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: string
+    :ivar source_path: Source path (CSAR or repository)
+    :vartype source_path: basestring
+    :ivar target_path: Path at destination machine
+    :vartype target_path: basestring
+    :ivar repository_url: Repository URL
+    :vartype repository_path: basestring
+    :ivar repository_credential: Credentials for accessing the repository
+    :vartype repository_credential: {basestring: basestring}
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+
+    :ivar node: Containing node
+    :vartype node: :class:`Node`
+    """
+
+    __tablename__ = 'artifact'
+
+    __private_fields__ = ['type_fk',
+                          'node_fk',
+                          'artifact_template_fk']
+
+    @declared_attr
+    def artifact_template(cls):
+        return relationship.many_to_one(cls, 'artifact_template')
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+    source_path = Column(Text)
+    target_path = Column(Text)
+    repository_url = Column(Text)
+    repository_credential = Column(modeling_types.StrictDict(basestring, basestring))
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For Artifact many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def node_fk(cls):
+        """For Node one-to-many to Artifact"""
+        return relationship.foreign_key('node')
+
+    @declared_attr
+    def artifact_template_fk(cls):
+        """For Artifact many-to-one to ArtifactTemplate"""
+        return relationship.foreign_key('artifact_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('source_path', self.source_path),
+            ('target_path', self.target_path),
+            ('repository_url', self.repository_url),
+            ('repository_credential', formatting.as_agnostic(self.repository_credential)),
+            ('properties', formatting.as_raw_dict(self.properties))))
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.properties, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Artifact type: {0}'.format(context.style.type(self.type.name)))
+            console.puts('Source path: {0}'.format(context.style.literal(self.source_path)))
+            if self.target_path is not None:
+                console.puts('Target path: {0}'.format(context.style.literal(self.target_path)))
+            if self.repository_url is not None:
+                console.puts('Repository URL: {0}'.format(
+                    context.style.literal(self.repository_url)))
+            if self.repository_credential:
+                console.puts('Repository credential: {0}'.format(
+                    context.style.literal(self.repository_credential)))
+            utils.dump_dict_values(self.properties, 'Properties')


[09/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index bb9d839..127641f 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -12,22 +12,25 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 """
 A common context for both workflow and operation
 """
+
 import logging
 from contextlib import contextmanager
 from datetime import datetime
 from functools import partial
-from uuid import uuid4
 
 import jinja2
 
-from aria import logger as aria_logger
-from aria.storage import (
-    exceptions,
+from aria import (
+    logger as aria_logger,
     modeling
 )
+from aria.storage import exceptions
+
+from ...utils.uuid import generate_uuid
 
 
 class BaseContext(object):
@@ -51,17 +54,17 @@ class BaseContext(object):
     def __init__(
             self,
             name,
-            service_instance_id,
+            service_id,
             model_storage,
             resource_storage,
             workdir=None,
             **kwargs):
         super(BaseContext, self).__init__(**kwargs)
         self._name = name
-        self._id = str(uuid4())
+        self._id = generate_uuid(variant='uuid')
         self._model = model_storage
         self._resource = resource_storage
-        self._service_instance_id = service_instance_id
+        self._service_id = service_id
         self._workdir = workdir
         self.logger = None
 
@@ -89,14 +92,14 @@ class BaseContext(object):
         if self._model._initiator:
             api_kwargs.update(self._model._initiator(**self._model._initiator_kwargs))
         api_kwargs.update(**self._model._api_kwargs)
-        return aria_logger.create_sqla_log_handler(log_cls=modeling.model.Log,
+        return aria_logger.create_sqla_log_handler(log_cls=modeling.models.Log,
                                                    execution_id=self._execution_id,
                                                    **api_kwargs)
 
     def __repr__(self):
         return (
             '{name}(name={self.name}, '
-            'deployment_id={self._service_instance_id}, '
+            'deployment_id={self._service_id}, '
             .format(name=self.__class__.__name__, self=self))
 
     @contextmanager
@@ -135,14 +138,14 @@ class BaseContext(object):
         """
         The blueprint model
         """
-        return self.service_instance.service_template
+        return self.service.service_template
 
     @property
-    def service_instance(self):
+    def service(self):
         """
         The deployment model
         """
-        return self.model.service_instance.get(self._service_instance_id)
+        return self.model.service.get(self._service_id)
 
     @property
     def name(self):
@@ -165,7 +168,7 @@ class BaseContext(object):
         Download a blueprint resource from the resource storage
         """
         try:
-            self.resource.deployment.download(entry_id=str(self.service_instance.id),
+            self.resource.deployment.download(entry_id=str(self.service.id),
                                               destination=destination,
                                               path=path)
         except exceptions.StorageError:
@@ -190,7 +193,7 @@ class BaseContext(object):
         Read a deployment resource as string from the resource storage
         """
         try:
-            return self.resource.deployment.read(entry_id=str(self.service_instance.id), path=path)
+            return self.resource.deployment.read(entry_id=str(self.service.id), path=path)
         except exceptions.StorageError:
             return self.resource.deployment.read(entry_id=str(self.service_template.id), path=path)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index d2716e8..cbd186c 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -33,7 +33,7 @@ class BaseOperationContext(BaseContext):
                  name,
                  model_storage,
                  resource_storage,
-                 service_instance_id,
+                 service_id,
                  task_id,
                  actor_id,
                  execution_id,
@@ -42,7 +42,7 @@ class BaseOperationContext(BaseContext):
             name=name,
             model_storage=model_storage,
             resource_storage=resource_storage,
-            service_instance_id=service_instance_id,
+            service_id=service_id,
             **kwargs)
         self._task_id = task_id
         self._actor_id = actor_id
@@ -79,11 +79,11 @@ class BaseOperationContext(BaseContext):
         """
         A work directory that is unique to the plugin and the deployment id
         """
-        if not self.task.plugin_name:
+        if self.task.plugin is None:
             return None
         plugin_workdir = '{0}/plugins/{1}/{2}'.format(self._workdir,
-                                                      self.service_instance.id,
-                                                      self.task.plugin_name)
+                                                      self.service.id,
+                                                      self.task.plugin.name)
         file.makedirs(plugin_workdir)
         return plugin_workdir
 
@@ -92,7 +92,7 @@ class BaseOperationContext(BaseContext):
         context_cls = self.__class__
         context_dict = {
             'name': self.name,
-            'service_instance_id': self._service_instance_id,
+            'service_id': self._service_id,
             'task_id': self._task_id,
             'actor_id': self._actor_id,
             'workdir': self._workdir,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 5de4b51..5f86d9d 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -50,14 +50,14 @@ class WorkflowContext(BaseContext):
 
     def __repr__(self):
         return (
-            '{name}(deployment_id={self._service_instance_id}, '
+            '{name}(deployment_id={self._service_id}, '
             'workflow_name={self._workflow_name}'.format(
                 name=self.__class__.__name__, self=self))
 
     def _create_execution(self):
         now = datetime.utcnow()
         execution = self.model.execution.model_cls(
-            service_instance=self.service_instance,
+            service=self.service,
             workflow_name=self._workflow_name,
             created_at=now,
             parameters=self.parameters,
@@ -88,11 +88,11 @@ class WorkflowContext(BaseContext):
         """
         Iterator over nodes
         """
-        key = 'service_instance_{0}'.format(self.model.node_template.model_cls.name_column_name())
+        key = 'service_{0}'.format(self.model.node_template.model_cls.name_column_name())
 
         return self.model.node_template.iter(
             filters={
-                key: getattr(self.service_instance, self.service_instance.name_column_name())
+                key: getattr(self.service, self.service.name_column_name())
             }
         )
 
@@ -101,10 +101,10 @@ class WorkflowContext(BaseContext):
         """
         Iterator over node instances
         """
-        key = 'service_instance_{0}'.format(self.model.node.model_cls.name_column_name())
+        key = 'service_{0}'.format(self.model.node.model_cls.name_column_name())
         return self.model.node.iter(
             filters={
-                key: getattr(self.service_instance, self.service_instance.name_column_name())
+                key: getattr(self.service, self.service.name_column_name())
             }
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/decorators.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/decorators.py b/aria/orchestrator/decorators.py
index 3ced61c..62e4a14 100644
--- a/aria/orchestrator/decorators.py
+++ b/aria/orchestrator/decorators.py
@@ -17,10 +17,10 @@
 Workflow and operation decorators
 """
 
-from uuid import uuid4
 from functools import partial, wraps
 
-from aria.utils.validation import validate_function_arguments
+from ..utils.validation import validate_function_arguments
+from ..utils.uuid import generate_uuid
 
 from . import context
 from .workflows.api import task_graph
@@ -78,4 +78,4 @@ def operation(func=None, toolbelt=False, suffix_template='', logging_handlers=No
 def _generate_name(func_name, ctx, suffix_template, **custom_kwargs):
     return '{func_name}.{suffix}'.format(
         func_name=func_name,
-        suffix=suffix_template.format(ctx=ctx, **custom_kwargs) or str(uuid4()))
+        suffix=suffix_template.format(ctx=ctx, **custom_kwargs) or generate_uuid(variant='uuid'))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/runner.py b/aria/orchestrator/runner.py
index bb92d1c..f1633fa 100644
--- a/aria/orchestrator/runner.py
+++ b/aria/orchestrator/runner.py
@@ -47,7 +47,7 @@ class Runner(object):
     """
 
     def __init__(self, workflow_name, workflow_fn, inputs, initialize_model_storage_fn,
-                 service_instance_id, storage_path='', is_storage_temporary=True):
+                 service_id_fn, storage_path='', is_storage_temporary=True):
         if storage_path == '':
             # Temporary file storage
             the_file, storage_path = tempfile.mkstemp(suffix='.db', prefix='aria-')
@@ -58,8 +58,8 @@ class Runner(object):
         self._storage_name = os.path.basename(storage_path)
         self._is_storage_temporary = is_storage_temporary
 
-        workflow_context = self.create_workflow_context(workflow_name, service_instance_id,
-                                                        initialize_model_storage_fn)
+        workflow_context = self.create_workflow_context(workflow_name, initialize_model_storage_fn,
+                                                        service_id_fn)
 
         tasks_graph = workflow_fn(ctx=workflow_context, **inputs)
 
@@ -76,20 +76,21 @@ class Runner(object):
 
     def create_workflow_context(self,
                                 workflow_name,
-                                service_instance_id,
-                                initialize_model_storage_fn):
+                                initialize_model_storage_fn,
+                                service_id_fn):
         self.cleanup()
         model_storage = application_model_storage(
             sql_mapi.SQLAlchemyModelAPI,
             initiator_kwargs=dict(base_dir=self._storage_dir, filename=self._storage_name))
-        initialize_model_storage_fn(model_storage)
+        if initialize_model_storage_fn:
+            initialize_model_storage_fn(model_storage)
         resource_storage = application_resource_storage(
             filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory='.'))
         return WorkflowContext(
             name=workflow_name,
             model_storage=model_storage,
             resource_storage=resource_storage,
-            service_instance_id=service_instance_id,
+            service_id=service_id_fn(),
             workflow_name=self.__class__.__name__,
             task_max_attempts=1,
             task_retry_interval=1)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index 6a00844..9522d7a 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -16,10 +16,10 @@
 """
 Provides the tasks to be entered into the task graph
 """
-from uuid import uuid4
-
-from aria.storage.modeling import model
 
+from ....modeling import models
+from ....utils.collections import OrderedDict
+from ....utils.uuid import generate_uuid
 from ... import context
 from .. import exceptions
 
@@ -28,12 +28,13 @@ class BaseTask(object):
     """
     Abstract task_graph task
     """
+
     def __init__(self, ctx=None, **kwargs):
         if ctx is not None:
             self._workflow_context = ctx
         else:
             self._workflow_context = context.workflow.current.get()
-        self._id = str(uuid4())
+        self._id = generate_uuid(variant='uuid')
 
     @property
     def id(self):
@@ -57,33 +58,30 @@ class OperationTask(BaseTask):
     Represents an operation task in the task_graph
     """
 
-    SOURCE_OPERATION = 'source'
-    TARGET_OPERATION = 'target'
+    NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
 
     def __init__(self,
-                 name,
                  actor,
-                 implementation,
+                 actor_type,
+                 interface_name,
+                 operation_name,
+                 runs_on=None,
                  max_attempts=None,
                  retry_interval=None,
                  ignore_failure=None,
-                 inputs=None,
-                 plugin=None,
-                 runs_on=None):
+                 inputs=None):
         """
-        Creates an operation task using the name, details, node instance and any additional kwargs.
-        :param name: the operation of the name.
-        :param actor: the operation host on which this operation is registered.
-        :param inputs: operation inputs.
+        Do not call this constructor directly. Instead, use :meth:`for_node` or
+        :meth:`for_relationship`.
         """
-        assert isinstance(actor, (model.Node,
-                                  model.Relationship))
+
+        assert isinstance(actor, (models.Node, models.Relationship))
+        assert actor_type in ('node', 'relationship')
+        assert interface_name and operation_name
+        assert runs_on in models.Task.RUNS_ON
         super(OperationTask, self).__init__()
+
         self.actor = actor
-        self.name = '{name}.{actor.id}'.format(name=name, actor=actor)
-        self.implementation = implementation
-        self.inputs = inputs or {}
-        self.plugin = plugin or {}
         self.max_attempts = (self.workflow_context._task_max_attempts
                              if max_attempts is None else max_attempts)
         self.retry_interval = (self.workflow_context._task_retry_interval
@@ -92,144 +90,137 @@ class OperationTask(BaseTask):
                                if ignore_failure is None else ignore_failure)
         self.runs_on = runs_on
 
-    @classmethod
-    def _merge_inputs(cls, operation_inputs, additional_inputs=None):
-        final_inputs = dict((p.name, p.as_raw['value']) for p in operation_inputs)
-        final_inputs.update(additional_inputs or {})
-        return final_inputs
+        # Wrap inputs
+        if inputs:
+            for k, v in inputs.iteritems():
+                if not isinstance(v, models.Parameter):
+                    inputs[k] = models.Parameter.wrap(k, v)
+
+        # TODO: Suggestion: these extra inputs could be stored as a separate entry in the task
+        # model, because they are different from the operation inputs. If we do this, then the two
+        # kinds of inputs should *not* be merged here.
+
+        operation = self._get_operation(interface_name, operation_name)
+        if operation is None:
+            raise exceptions.OperationNotFoundException(
+                'Could not find operation "{0}" on interface "{1}" for {2} "{3}"'
+                .format(operation_name, interface_name, actor_type, actor.name))
+
+        self.plugin = None
+        if operation.plugin_specification:
+            self.plugin = OperationTask._find_plugin(operation.plugin_specification)
+            if self.plugin is None:
+                raise exceptions.PluginNotFoundException(
+                    'Could not find plugin of operation "{0}" on interface "{1}" for {2} "{3}"'
+                    .format(operation_name, interface_name, actor_type, actor.name))
+
+        self.implementation = operation.implementation
+        self.inputs = OperationTask._merge_inputs(operation.inputs, inputs)
+
+        self.name = OperationTask.NAME_FORMAT.format(type=actor_type,
+                                                     name=actor.name,
+                                                     interface=interface_name,
+                                                     operation=operation_name)
 
     @classmethod
-    def node(cls, instance, name, inputs=None, *args, **kwargs):
+    def for_node(cls,
+                 node,
+                 interface_name,
+                 operation_name,
+                 max_attempts=None,
+                 retry_interval=None,
+                 ignore_failure=None,
+                 inputs=None):
         """
-        Represents a node based operation
-
-        :param instance: the node of which this operation belongs to.
-        :param name: the name of the operation.
+        Creates an operation on a node.
+
+        :param node: The node on which to run the operation
+        :param interface_name: The interface name
+        :param operation_name: The operation name within the interface
+        :param max_attempts: The maximum number of attempts in case the operation fails
+                             (if not specified the defaults it taken from the workflow context)
+        :param retry_interval: The interval in seconds between attempts when the operation fails
+                               (if not specified the defaults it taken from the workflow context)
+        :param ignore_failure: Whether to ignore failures
+                               (if not specified the defaults it taken from the workflow context)
+        :param inputs: Additional operation inputs
         """
-        assert isinstance(instance, model.Node)
-        interface_name = _get_interface_name(name)
-        interfaces = instance.interfaces.filter_by(name=interface_name)
-        if interfaces.count() > 1:
-            raise exceptions.TaskException(
-                "More than one interface with the same name `{0}` found".format(name)
-            )
-        elif interfaces.count() == 0:
-            raise exceptions.TaskException(
-                "No Interface with the name `{interface_name}` found".format(
-                    interface_name=interface_name)
-            )
-
-        operation_templates = interfaces[0].operations.filter_by(name=name)
-        if operation_templates.count() > 1:
-            raise exceptions.TaskException(
-                "More than one operation with the same name `{0}` were found".format(name)
-            )
-
-        elif operation_templates.count() == 0:
-            raise exceptions.TaskException(
-                "No interface with the name `{operation_name}` found".format(
-                    operation_name=name)
-            )
-
-        return cls._instance(
-            instance=instance,
-            name=name,
-            operation_template=operation_templates[0],
-            plugins=instance.plugins or [],
-            runs_on=model.Task.RUNS_ON_NODE_INSTANCE,
-            inputs=cls._merge_inputs(operation_templates[0].inputs, inputs),
-            *args,
-            **kwargs)
+
+        assert isinstance(node, models.Node)
+        return cls(
+            actor=node,
+            actor_type='node',
+            interface_name=interface_name,
+            operation_name=operation_name,
+            max_attempts=max_attempts,
+            retry_interval=retry_interval,
+            ignore_failure=ignore_failure,
+            inputs=inputs,
+            runs_on=models.Task.RUNS_ON_NODE)
 
     @classmethod
-    def relationship(cls, instance, name, edge, runs_on=None, inputs=None, *args,
-                     **kwargs):
+    def for_relationship(cls,
+                         relationship,
+                         interface_name,
+                         operation_name,
+                         runs_on=models.Task.RUNS_ON_SOURCE,
+                         max_attempts=None,
+                         retry_interval=None,
+                         ignore_failure=None,
+                         inputs=None):
         """
-        Represents a relationship based operation
-
-        :param instance: the relationship of which this operation belongs to.
-        :param name: the name of the operation.
-        :param edge: the edge of the interface ("source" or "target").
-        :param runs_on: where to run the operation ("source" or "target"); if None defaults to the
-                        interface edge.
-        :param inputs any additional inputs to the operation
+        Creates an operation on a relationship edge.
+
+        :param relationship: The relationship on which to run the operation
+        :param interface_name: The interface name
+        :param operation_name: The operation name within the interface
+        :param runs_on: where to run the operation ("source" or "target"); defaults to "source"
+        :param max_attempts: The maximum number of attempts in case the operation fails
+                             (if not specified the defaults it taken from the workflow context)
+        :param retry_interval: The interval in seconds between attempts when the operation fails
+                               (if not specified the defaults it taken from the workflow context)
+        :param ignore_failure: Whether to ignore failures
+                               (if not specified the defaults it taken from the workflow context)
+        :param inputs: Additional operation inputs
         """
-        assert isinstance(instance, model.Relationship)
-        interface_name = _get_interface_name(name)
-        interfaces = instance.interfaces.filter_by(name=interface_name, edge=edge)
-        count = interfaces.count()
-        if count > 1:
-            raise exceptions.TaskException(
-                "More than one interface with the same name `{interface_name}` found at `{edge}`"
-                + " edge".format(
-                    interface_name=interface_name, edge=edge)
-            )
-        elif count == 0:
-            raise exceptions.TaskException(
-                "No interface with the name `{interface_name}` found at `{edge}` edge".format(
-                    interface_name=interface_name, edge=edge)
-            )
-
-        operations = interfaces.all()[0].operations.filter_by(name=name)
-        count = operations.count()
-        if count > 1:
-            raise exceptions.TaskException(
-                "More than one operation with the same name `{0}` found".format(name)
-            )
-        elif count == 0:
-            raise exceptions.TaskException(
-                "No operation with the name `{operation_name}` found".format(
-                    operation_name=name)
-            )
-
-        if not runs_on:
-            if edge == cls.SOURCE_OPERATION:
-                runs_on = model.Task.RUNS_ON_SOURCE
-            else:
-                runs_on = model.Task.RUNS_ON_TARGET
-
-        if runs_on == model.Task.RUNS_ON_SOURCE:
-            plugins = instance.source_node.plugins
-        else:
-            plugins = instance.target_node.plugins
 
-        return cls._instance(instance=instance,
-                             name=name,
-                             operation_template=operations[0],
-                             plugins=plugins or [],
-                             runs_on=runs_on,
-                             inputs=cls._merge_inputs(operations[0].inputs, inputs),
-                             *args,
-                             **kwargs)
-
-    @classmethod
-    def _instance(cls,
-                  instance,
-                  name,
-                  operation_template,
-                  inputs,
-                  plugins,
-                  runs_on,
-                  *args,
-                  **kwargs):
-        matching_plugins = [p for p in plugins if p['name'] == operation_template.plugin]
-        # All matching plugins should have identical package_name/package_version, so it's safe to
-        # take the first found.
-        plugin = matching_plugins[0] if matching_plugins else {}
-        return cls(actor=instance,
-                   name=name,
-                   implementation=operation_template.implementation,
-                   inputs=inputs,
-                   plugin=plugin,
-                   runs_on=runs_on,
-                   *args,
-                   **kwargs)
+        assert isinstance(relationship, models.Relationship)
+        assert runs_on in models.Task.RUNS_ON
+        return cls(
+            actor=relationship,
+            actor_type='relationship',
+            interface_name=interface_name,
+            operation_name=operation_name,
+            runs_on=runs_on,
+            max_attempts=max_attempts,
+            retry_interval=retry_interval,
+            ignore_failure=ignore_failure,
+            inputs=inputs)
+
+    def _get_operation(self, interface_name, operation_name):
+        interface = self.actor.interfaces.get(interface_name)
+        if interface is not None:
+            return interface.operations.get(operation_name)
+        return None
+
+    @staticmethod
+    def _find_plugin(plugin_specification):
+        workflow_context = context.workflow.current.get()
+        return plugin_specification.find_plugin(workflow_context.model.plugin.list())
+
+    @staticmethod
+    def _merge_inputs(operation_inputs, override_inputs=None):
+        final_inputs = OrderedDict(operation_inputs)
+        if override_inputs:
+            final_inputs.update(override_inputs)
+        return final_inputs
 
 
 class WorkflowTask(BaseTask):
     """
-    Represents an workflow task in the task_graph
+    Represents a workflow task in the task graph
     """
+
     def __init__(self, workflow_func, **kwargs):
         """
         Creates a workflow based task using the workflow_func provided, and its kwargs
@@ -259,8 +250,3 @@ class StubTask(BaseTask):
     """
     Enables creating empty tasks.
     """
-    pass
-
-
-def _get_interface_name(operation_name):
-    return operation_name.rsplit('.', 1)[0]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/api/task_graph.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task_graph.py b/aria/orchestrator/workflows/api/task_graph.py
index c88d343..92a39d2 100644
--- a/aria/orchestrator/workflows/api/task_graph.py
+++ b/aria/orchestrator/workflows/api/task_graph.py
@@ -17,11 +17,11 @@
 Task graph. Used by users to build workflows
 """
 
-from uuid import uuid4
 from collections import Iterable
 
 from networkx import DiGraph, topological_sort
 
+from ....utils.uuid import generate_uuid
 from . import task as api_task
 
 
@@ -49,7 +49,7 @@ class TaskGraph(object):
 
     def __init__(self, name):
         self.name = name
-        self._id = str(uuid4())
+        self._id = generate_uuid(variant='uuid')
         self._graph = DiGraph()
 
     def __repr__(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 5a7f6ce..348f47a 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -25,7 +25,8 @@ from ... import workflow
 def execute_operation(
         ctx,
         graph,
-        operation,
+        interface_name,
+        operation_name,
         operation_kwargs,
         allow_kwargs_override,
         run_by_dependency_order,
@@ -50,33 +51,33 @@ def execute_operation(
     """
     subgraphs = {}
     # filtering node instances
-    filtered_nodes = list(_filter_node_instances(
+    filtered_nodes = list(_filter_nodes(
         context=ctx,
         node_template_ids=node_template_ids,
         node_ids=node_ids,
         type_names=type_names))
 
     if run_by_dependency_order:
-        filtered_node_instances_ids = set(node_instance.id
-                                          for node_instance in filtered_nodes)
-        for node in ctx.node_instances:
-            if node.id not in filtered_node_instances_ids:
+        filtered_node_ids = set(node_instance.id for node_instance in filtered_nodes)
+        for node in ctx.nodes:
+            if node.id not in filtered_node_ids:
                 subgraphs[node.id] = ctx.task_graph(
                     name='execute_operation_stub_{0}'.format(node.id))
 
     # registering actual tasks to sequences
     for node in filtered_nodes:
         graph.add_tasks(
-            _create_node_instance_task(
-                nodes=node,
-                operation=operation,
+            _create_node_task(
+                node=node,
+                interface_name=interface_name,
+                operation_name=operation_name,
                 operation_kwargs=operation_kwargs,
                 allow_kwargs_override=allow_kwargs_override
             )
         )
 
-    for _, node_instance_sub_workflow in subgraphs.items():
-        graph.add_tasks(node_instance_sub_workflow)
+    for _, node_sub_workflow in subgraphs.items():
+        graph.add_tasks(node_sub_workflow)
 
     # adding tasks dependencies if required
     if run_by_dependency_order:
@@ -86,31 +87,32 @@ def execute_operation(
                     source_task=subgraphs[node.id], after=[subgraphs[relationship.target_id]])
 
 
-def _filter_node_instances(context, node_template_ids=(), node_ids=(), type_names=()):
-    def _is_node_by_id(node_id):
-        return not node_template_ids or node_id in node_template_ids
+def _filter_nodes(context, node_template_ids=(), node_ids=(), type_names=()):
+    def _is_node_template_by_id(node_template_id):
+        return not node_template_ids or node_template_id in node_template_ids
 
-    def _is_node_instance_by_id(node_instance_id):
-        return not node_ids or node_instance_id in node_ids
+    def _is_node_by_id(node_id):
+        return not node_ids or node_id in node_ids
 
-    def _is_node_by_type(node_type_hierarchy):
-        return not type_names or node_type_hierarchy in type_names
+    def _is_node_by_type(node_type):
+        return not node_type.name in type_names
 
     for node in context.nodes:
-        if all((_is_node_by_id(node.node_template.id),
-                _is_node_instance_by_id(node.id),
-                _is_node_by_type(node.node_template.type_hierarchy))):
+        if all((_is_node_template_by_id(node.node_template.id),
+                _is_node_by_id(node.id),
+                _is_node_by_type(node.node_template.type))):
             yield node
 
 
-def _create_node_instance_task(
-        nodes,
-        operation,
+def _create_node_task(
+        node,
+        interface_name,
+        operation_name,
         operation_kwargs,
         allow_kwargs_override):
     """
     A workflow which executes a single operation
-    :param nodes: the node instance to install
+    :param node: the node instance to install
     :param basestring operation: the operation name
     :param dict operation_kwargs:
     :param bool allow_kwargs_override:
@@ -120,7 +122,8 @@ def _create_node_instance_task(
     if allow_kwargs_override is not None:
         operation_kwargs['allow_kwargs_override'] = allow_kwargs_override
 
-    return OperationTask.node(
-        instance=nodes,
-        name=operation,
+    return OperationTask.for_node(
+        node=node,
+        interface_name=interface_name,
+        operation_name=operation_name,
         inputs=operation_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/builtin/heal.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/heal.py b/aria/orchestrator/workflows/builtin/heal.py
index 2592323..92b96ea 100644
--- a/aria/orchestrator/workflows/builtin/heal.py
+++ b/aria/orchestrator/workflows/builtin/heal.py
@@ -26,156 +26,156 @@ from ..api import task
 
 
 @workflow
-def heal(ctx, graph, node_instance_id):
+def heal(ctx, graph, node_id):
     """
     The heal workflow
 
     :param WorkflowContext ctx: the workflow context
     :param TaskGraph graph: the graph which will describe the workflow.
-    :param node_instance_id: the id of the node instance to heal
+    :param node_id: the id of the node to heal
     :return:
     """
-    failing_node = ctx.model.node.get(node_instance_id)
+    failing_node = ctx.model.node.get(node_id)
     host_node = ctx.model.node.get(failing_node.host.id)
-    failed_node_instance_subgraph = _get_contained_subgraph(ctx, host_node)
-    failed_node_instance_ids = list(n.id for n in failed_node_instance_subgraph)
+    failed_node_subgraph = _get_contained_subgraph(ctx, host_node)
+    failed_node_ids = list(n.id for n in failed_node_subgraph)
 
-    targeted_node_instances = [node_instance for node_instance in ctx.node_instances
-                               if node_instance.id not in failed_node_instance_ids]
+    targeted_nodes = [node for node in ctx.nodes
+                               if node.id not in failed_node_ids]
 
     uninstall_subgraph = task.WorkflowTask(
         heal_uninstall,
-        failing_node_instances=failed_node_instance_subgraph,
-        targeted_node_instances=targeted_node_instances
+        failing_nodes=failed_node_subgraph,
+        targeted_nodes=targeted_nodes
     )
 
     install_subgraph = task.WorkflowTask(
         heal_install,
-        failing_node_instances=failed_node_instance_subgraph,
-        targeted_node_instances=targeted_node_instances)
+        failing_nodes=failed_node_subgraph,
+        targeted_nodes=targeted_nodes)
 
     graph.sequence(uninstall_subgraph, install_subgraph)
 
 
-@workflow(suffix_template='{failing_node_instances}')
-def heal_uninstall(ctx, graph, failing_node_instances, targeted_node_instances):
+@workflow(suffix_template='{failing_nodes}')
+def heal_uninstall(ctx, graph, failing_nodes, targeted_nodes):
     """
     the uninstall part of the heal mechanism
     :param WorkflowContext ctx: the workflow context
     :param TaskGraph graph: the task graph to edit.
-    :param failing_node_instances: the failing nodes to heal.
-    :param targeted_node_instances: the targets of the relationships where the failing node are
+    :param failing_nodes: the failing nodes to heal.
+    :param targeted_nodes: the targets of the relationships where the failing node are
     source
     :return:
     """
-    node_instance_sub_workflows = {}
-
-    # Create install stub workflow for each unaffected node instance
-    for node_instance in targeted_node_instances:
-        node_instance_stub = task.StubTask()
-        node_instance_sub_workflows[node_instance.id] = node_instance_stub
-        graph.add_tasks(node_instance_stub)
-
-    # create install sub workflow for every node instance
-    for node_instance in failing_node_instances:
-        node_instance_sub_workflow = task.WorkflowTask(uninstall_node,
-                                                       node_instance=node_instance)
-        node_instance_sub_workflows[node_instance.id] = node_instance_sub_workflow
-        graph.add_tasks(node_instance_sub_workflow)
-
-    # create dependencies between the node instance sub workflow
-    for node_instance in failing_node_instances:
-        node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id]
-        for relationship_instance in reversed(node_instance.outbound_relationship_instances):
+    node_sub_workflows = {}
+
+    # Create install stub workflow for each unaffected node
+    for node in targeted_nodes:
+        node_stub = task.StubTask()
+        node_sub_workflows[node.id] = node_stub
+        graph.add_tasks(node_stub)
+
+    # create install sub workflow for every node
+    for node in failing_nodes:
+        node_sub_workflow = task.WorkflowTask(uninstall_node,
+                                                       node=node)
+        node_sub_workflows[node.id] = node_sub_workflow
+        graph.add_tasks(node_sub_workflow)
+
+    # create dependencies between the node sub workflow
+    for node in failing_nodes:
+        node_sub_workflow = node_sub_workflows[node.id]
+        for relationship in reversed(node.outbound_relationships):
             graph.add_dependency(
-                node_instance_sub_workflows[relationship_instance.target_node_instance.id],
-                node_instance_sub_workflow)
+                node_sub_workflows[relationship.target_node.id],
+                node_sub_workflow)
 
-    # Add operations for intact nodes depending on a node instance belonging to node_instances
-    for node_instance in targeted_node_instances:
-        node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id]
+    # Add operations for intact nodes depending on a node belonging to nodes
+    for node in targeted_nodes:
+        node_sub_workflow = node_sub_workflows[node.id]
 
-        for relationship_instance in reversed(node_instance.outbound_relationship_instances):
+        for relationship in reversed(node.outbound_relationships):
 
-            target_node_instance = \
-                ctx.model.node.get(relationship_instance.target_node_instance.id)
-            target_node_instance_subgraph = node_instance_sub_workflows[target_node_instance.id]
-            graph.add_dependency(target_node_instance_subgraph, node_instance_sub_workflow)
+            target_node = \
+                ctx.model.node.get(relationship.target_node.id)
+            target_node_subgraph = node_sub_workflows[target_node.id]
+            graph.add_dependency(target_node_subgraph, node_sub_workflow)
 
-            if target_node_instance in failing_node_instances:
+            if target_node in failing_nodes:
                 dependency = relationship_tasks(
-                    relationship_instance=relationship_instance,
+                    relationship=relationship,
                     operation_name='aria.interfaces.relationship_lifecycle.unlink')
                 graph.add_tasks(*dependency)
-                graph.add_dependency(node_instance_sub_workflow, dependency)
+                graph.add_dependency(node_sub_workflow, dependency)
 
 
-@workflow(suffix_template='{failing_node_instances}')
-def heal_install(ctx, graph, failing_node_instances, targeted_node_instances):
+@workflow(suffix_template='{failing_nodes}')
+def heal_install(ctx, graph, failing_nodes, targeted_nodes):
     """
     the install part of the heal mechanism
     :param WorkflowContext ctx: the workflow context
     :param TaskGraph graph: the task graph to edit.
-    :param failing_node_instances: the failing nodes to heal.
-    :param targeted_node_instances: the targets of the relationships where the failing node are
+    :param failing_nodes: the failing nodes to heal.
+    :param targeted_nodes: the targets of the relationships where the failing node are
     source
     :return:
     """
-    node_instance_sub_workflows = {}
+    node_sub_workflows = {}
 
     # Create install sub workflow for each unaffected
-    for node_instance in targeted_node_instances:
-        node_instance_stub = task.StubTask()
-        node_instance_sub_workflows[node_instance.id] = node_instance_stub
-        graph.add_tasks(node_instance_stub)
-
-    # create install sub workflow for every node instance
-    for node_instance in failing_node_instances:
-        node_instance_sub_workflow = task.WorkflowTask(install_node,
-                                                       node_instance=node_instance)
-        node_instance_sub_workflows[node_instance.id] = node_instance_sub_workflow
-        graph.add_tasks(node_instance_sub_workflow)
-
-    # create dependencies between the node instance sub workflow
-    for node_instance in failing_node_instances:
-        node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id]
-        if node_instance.outbound_relationship_instances:
+    for node in targeted_nodes:
+        node_stub = task.StubTask()
+        node_sub_workflows[node.id] = node_stub
+        graph.add_tasks(node_stub)
+
+    # create install sub workflow for every node
+    for node in failing_nodes:
+        node_sub_workflow = task.WorkflowTask(install_node,
+                                                       node=node)
+        node_sub_workflows[node.id] = node_sub_workflow
+        graph.add_tasks(node_sub_workflow)
+
+    # create dependencies between the node sub workflow
+    for node in failing_nodes:
+        node_sub_workflow = node_sub_workflows[node.id]
+        if node.outbound_relationships:
             dependencies = \
-                [node_instance_sub_workflows[relationship_instance.target_node_instance.id]
-                 for relationship_instance in node_instance.outbound_relationship_instances]
-            graph.add_dependency(node_instance_sub_workflow, dependencies)
-
-    # Add operations for intact nodes depending on a node instance
-    # belonging to node_instances
-    for node_instance in targeted_node_instances:
-        node_instance_sub_workflow = node_instance_sub_workflows[node_instance.id]
-
-        for relationship_instance in node_instance.outbound_relationship_instances:
-            target_node_instance = ctx.model.node.get(
-                relationship_instance.target_node_instance.id)
-            target_node_instance_subworkflow = node_instance_sub_workflows[target_node_instance.id]
-            graph.add_dependency(node_instance_sub_workflow, target_node_instance_subworkflow)
-
-            if target_node_instance in failing_node_instances:
+                [node_sub_workflows[relationship.target_node.id]
+                 for relationship in node.outbound_relationships]
+            graph.add_dependency(node_sub_workflow, dependencies)
+
+    # Add operations for intact nodes depending on a node
+    # belonging to nodes
+    for node in targeted_nodes:
+        node_sub_workflow = node_sub_workflows[node.id]
+
+        for relationship in node.outbound_relationships:
+            target_node = ctx.model.node.get(
+                relationship.target_node.id)
+            target_node_subworkflow = node_sub_workflows[target_node.id]
+            graph.add_dependency(node_sub_workflow, target_node_subworkflow)
+
+            if target_node in failing_nodes:
                 dependent = relationship_tasks(
-                    relationship_instance=relationship_instance,
+                    relationship=relationship,
                     operation_name='aria.interfaces.relationship_lifecycle.establish')
                 graph.add_tasks(*dependent)
-                graph.add_dependency(dependent, node_instance_sub_workflow)
+                graph.add_dependency(dependent, node_sub_workflow)
 
 
-def _get_contained_subgraph(context, host_node_instance):
-    contained_instances = [node_instance
-                           for node_instance in context.node_instances
-                           if node_instance.host_fk == host_node_instance.id and
-                           node_instance.host_fk != node_instance.id]
-    result = [host_node_instance]
+def _get_contained_subgraph(context, host_node):
+    contained_instances = [node
+                           for node in context.nodes
+                           if node.host_fk == host_node.id and
+                           node.host_fk != node.id]
+    result = [host_node]
 
     if not contained_instances:
         return result
 
     result.extend(contained_instances)
-    for node_instance in contained_instances:
-        result.extend(_get_contained_subgraph(context, node_instance))
+    for node in contained_instances:
+        result.extend(_get_contained_subgraph(context, node))
 
     return set(result)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/builtin/utils.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/utils.py b/aria/orchestrator/workflows/builtin/utils.py
index c9dbc6b..8efa889 100644
--- a/aria/orchestrator/workflows/builtin/utils.py
+++ b/aria/orchestrator/workflows/builtin/utils.py
@@ -14,20 +14,24 @@
 # limitations under the License.
 
 from ..api.task import OperationTask
+from .. import exceptions
 
 
-def create_node_task(operation_name, node):
+def create_node_task(interface_name, operation_name, node):
     """
     Returns a new operation task if the operation exists in the node, otherwise returns None.
     """
 
-    if _has_operation(node.interfaces, operation_name):
-        return OperationTask.node(instance=node,
-                                  name=operation_name)
-    return None
+    try:
+        return OperationTask.for_node(node=node,
+                                      interface_name=interface_name,
+                                      operation_name=operation_name)
+    except exceptions.OperationNotFoundException:
+        # We will skip nodes which do not have the operation
+        return None
 
 
-def create_relationship_tasks(operation_name, runs_on, node):
+def create_relationship_tasks(interface_name, operation_name, runs_on, node):
     """
     Returns a list of operation tasks for each outbound relationship of the node if the operation
     exists there.
@@ -35,12 +39,15 @@ def create_relationship_tasks(operation_name, runs_on, node):
 
     sequence = []
     for relationship in node.outbound_relationships:
-        if _has_operation(relationship.interfaces, operation_name):
+        try:
             sequence.append(
-                OperationTask.relationship(instance=relationship,
-                                           name=operation_name,
-                                           edge='source',
-                                           runs_on=runs_on))
+                OperationTask.for_relationship(relationship=relationship,
+                                               interface_name=interface_name,
+                                               operation_name=operation_name,
+                                               runs_on=runs_on))
+        except exceptions.OperationNotFoundException:
+            # We will skip relationships which do not have the operation
+            pass
     return sequence
 
 
@@ -49,16 +56,16 @@ def create_node_task_dependencies(graph, tasks_and_nodes, reverse=False):
     Creates dependencies between tasks if there is a relationship (outbound) between their nodes.
     """
 
-    def get_task(node_id):
+    def get_task(node_name):
         for task, node in tasks_and_nodes:
-            if node.id == node_id:
+            if node.name == node_name:
                 return task
         return None
 
     for task, node in tasks_and_nodes:
         dependencies = []
         for relationship in node.outbound_relationships:
-            dependency = get_task(relationship.target_node.id)
+            dependency = get_task(relationship.target_node.name)
             if dependency:
                 dependencies.append(dependency)
         if dependencies:
@@ -67,10 +74,3 @@ def create_node_task_dependencies(graph, tasks_and_nodes, reverse=False):
                     graph.add_dependency(dependency, task)
             else:
                 graph.add_dependency(task, dependencies)
-
-
-def _has_operation(interfaces, operation_name):
-    for interface in interfaces:
-        if interface.operations.filter_by(name=operation_name).count() == 1:
-            return True
-    return False

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/builtin/workflows.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/workflows.py b/aria/orchestrator/workflows/builtin/workflows.py
index 180b4e9..6065343 100644
--- a/aria/orchestrator/workflows/builtin/workflows.py
+++ b/aria/orchestrator/workflows/builtin/workflows.py
@@ -14,31 +14,32 @@
 # limitations under the License.
 
 """
-A set of builtin workflows.
+TSOCA normative lifecycle workflows.
 """
 
-from .utils import (create_node_task, create_relationship_tasks)
 from ... import workflow
+from ....modeling.models import Task
+from .utils import (create_node_task, create_relationship_tasks)
 
 
 NORMATIVE_STANDARD_INTERFACE = 'Standard' # 'tosca.interfaces.node.lifecycle.Standard'
 NORMATIVE_CONFIGURE_INTERFACE = 'Configure' # 'tosca.interfaces.relationship.Configure'
 
-NORMATIVE_CREATE = NORMATIVE_STANDARD_INTERFACE + '.create'
-NORMATIVE_START = NORMATIVE_STANDARD_INTERFACE + '.start'
-NORMATIVE_STOP = NORMATIVE_STANDARD_INTERFACE + '.stop'
-NORMATIVE_DELETE = NORMATIVE_STANDARD_INTERFACE + '.delete'
+NORMATIVE_CREATE = 'create'
+NORMATIVE_START = 'start'
+NORMATIVE_STOP = 'stop'
+NORMATIVE_DELETE = 'delete'
 
-NORMATIVE_CONFIGURE = NORMATIVE_STANDARD_INTERFACE + '.configure'
-NORMATIVE_PRE_CONFIGURE_SOURCE = NORMATIVE_CONFIGURE_INTERFACE + '.pre_configure_source'
-NORMATIVE_PRE_CONFIGURE_TARGET = NORMATIVE_CONFIGURE_INTERFACE + '.pre_configure_target'
-NORMATIVE_POST_CONFIGURE_SOURCE = NORMATIVE_CONFIGURE_INTERFACE + '.post_configure_source'
-NORMATIVE_POST_CONFIGURE_TARGET = NORMATIVE_CONFIGURE_INTERFACE + '.post_configure_target'
+NORMATIVE_CONFIGURE = 'configure'
+NORMATIVE_PRE_CONFIGURE_SOURCE = 'pre_configure_source'
+NORMATIVE_PRE_CONFIGURE_TARGET = 'pre_configure_target'
+NORMATIVE_POST_CONFIGURE_SOURCE = 'post_configure_source'
+NORMATIVE_POST_CONFIGURE_TARGET = 'post_configure_target'
 
-NORMATIVE_ADD_SOURCE = NORMATIVE_CONFIGURE_INTERFACE + '.add_source'
-NORMATIVE_ADD_TARGET = NORMATIVE_CONFIGURE_INTERFACE + '.add_target'
-NORMATIVE_REMOVE_TARGET = NORMATIVE_CONFIGURE_INTERFACE + '.remove_target'
-NORMATIVE_TARGET_CHANGED = NORMATIVE_CONFIGURE_INTERFACE + '.target_changed'
+NORMATIVE_ADD_SOURCE = 'add_source'
+NORMATIVE_ADD_TARGET = 'add_target'
+NORMATIVE_REMOVE_TARGET = 'remove_target'
+NORMATIVE_TARGET_CHANGED = 'target_changed'
 
 
 __all__ = (
@@ -64,40 +65,40 @@ __all__ = (
 )
 
 
-@workflow(suffix_template='{node.id}')
+@workflow(suffix_template='{node.name}')
 def install_node(graph, node, **kwargs):
     sequence = []
 
     # Create
     sequence.append(
         create_node_task(
-            NORMATIVE_CREATE,
+            NORMATIVE_STANDARD_INTERFACE, NORMATIVE_CREATE,
             node))
 
     # Configure
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_PRE_CONFIGURE_SOURCE,
-            'source',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_PRE_CONFIGURE_SOURCE,
+            Task.RUNS_ON_SOURCE,
             node)
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_PRE_CONFIGURE_TARGET,
-            'target',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_PRE_CONFIGURE_TARGET,
+            Task.RUNS_ON_TARGET,
             node)
     sequence.append(
         create_node_task(
-            NORMATIVE_CONFIGURE,
+            NORMATIVE_STANDARD_INTERFACE, NORMATIVE_CONFIGURE,
             node))
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_POST_CONFIGURE_SOURCE,
-            'source',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_POST_CONFIGURE_SOURCE,
+            Task.RUNS_ON_SOURCE,
             node)
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_POST_CONFIGURE_TARGET,
-            'target',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_POST_CONFIGURE_TARGET,
+            Task.RUNS_ON_TARGET,
             node)
 
     # Start
@@ -106,7 +107,7 @@ def install_node(graph, node, **kwargs):
     graph.sequence(*sequence)
 
 
-@workflow(suffix_template='{node.id}')
+@workflow(suffix_template='{node.name}')
 def uninstall_node(graph, node, **kwargs):
     # Stop
     sequence = _create_stop_tasks(node)
@@ -114,18 +115,18 @@ def uninstall_node(graph, node, **kwargs):
     # Delete
     sequence.append(
         create_node_task(
-            NORMATIVE_DELETE,
+            NORMATIVE_STANDARD_INTERFACE, NORMATIVE_DELETE,
             node))
 
     graph.sequence(*sequence)
 
 
-@workflow(suffix_template='{node.id}')
+@workflow(suffix_template='{node.name}')
 def start_node(graph, node, **kwargs):
     graph.sequence(*_create_start_tasks(node))
 
 
-@workflow(suffix_template='{node.id}')
+@workflow(suffix_template='{node.name}')
 def stop_node(graph, node, **kwargs):
     graph.sequence(*_create_stop_tasks(node))
 
@@ -134,22 +135,22 @@ def _create_start_tasks(node):
     sequence = []
     sequence.append(
         create_node_task(
-            NORMATIVE_START,
+            NORMATIVE_STANDARD_INTERFACE, NORMATIVE_START,
             node))
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_ADD_SOURCE,
-            'source',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_ADD_SOURCE,
+            Task.RUNS_ON_SOURCE,
             node)
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_ADD_TARGET,
-            'target',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_ADD_TARGET,
+            Task.RUNS_ON_TARGET,
             node)
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_TARGET_CHANGED,
-            'target',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_TARGET_CHANGED,
+            Task.RUNS_ON_TARGET,
             node)
     return sequence
 
@@ -158,16 +159,16 @@ def _create_stop_tasks(node):
     sequence = []
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_REMOVE_TARGET,
-            'target',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_REMOVE_TARGET,
+            Task.RUNS_ON_TARGET,
             node)
     sequence += \
         create_relationship_tasks(
-            NORMATIVE_TARGET_CHANGED,
-            'target',
+            NORMATIVE_CONFIGURE_INTERFACE, NORMATIVE_TARGET_CHANGED,
+            Task.RUNS_ON_TARGET,
             node)
     sequence.append(
         create_node_task(
-            NORMATIVE_STOP,
+            NORMATIVE_STANDARD_INTERFACE, NORMATIVE_STOP,
             node))
     return sequence

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index c6ac2b3..fa4550d 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -23,7 +23,7 @@ from datetime import datetime
 import networkx
 
 from aria import logger
-from aria.storage.modeling import model
+from aria.modeling import models
 from aria.orchestrator import events
 
 from .. import exceptions
@@ -82,18 +82,18 @@ class Engine(logger.LoggerMixin):
         events.on_cancelling_workflow_signal.send(self._workflow_context)
 
     def _is_cancel(self):
-        return self._workflow_context.execution.status in [model.Execution.CANCELLING,
-                                                           model.Execution.CANCELLED]
+        return self._workflow_context.execution.status in [models.Execution.CANCELLING,
+                                                           models.Execution.CANCELLED]
 
     def _executable_tasks(self):
         now = datetime.utcnow()
         return (task for task in self._tasks_iter()
-                if task.status in model.Task.WAIT_STATES and
+                if task.status in models.Task.WAIT_STATES and
                 task.due_at <= now and
                 not self._task_has_dependencies(task))
 
     def _ended_tasks(self):
-        return (task for task in self._tasks_iter() if task.status in model.Task.END_STATES)
+        return (task for task in self._tasks_iter() if task.status in models.Task.END_STATES)
 
     def _task_has_dependencies(self, task):
         return len(self._execution_graph.pred.get(task.id, {})) > 0
@@ -105,19 +105,19 @@ class Engine(logger.LoggerMixin):
         for _, data in self._execution_graph.nodes_iter(data=True):
             task = data['task']
             if isinstance(task, engine_task.OperationTask):
-                if task.model_task.status not in model.Task.END_STATES:
+                if task.model_task.status not in models.Task.END_STATES:
                     self._workflow_context.model.task.refresh(task.model_task)
             yield task
 
     def _handle_executable_task(self, task):
         if isinstance(task, engine_task.StubTask):
-            task.status = model.Task.SUCCESS
+            task.status = models.Task.SUCCESS
         else:
             events.sent_task_signal.send(task)
             self._executor.execute(task)
 
     def _handle_ended_tasks(self, task):
-        if task.status == model.Task.FAILED and not task.ignore_failure:
+        if task.status == models.Task.FAILED and not task.ignore_failure:
             raise exceptions.ExecutorException('Workflow failed')
         else:
             self._execution_graph.remove_node(task.id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/core/events_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/events_handler.py b/aria/orchestrator/workflows/core/events_handler.py
index c973ad9..a420d2b 100644
--- a/aria/orchestrator/workflows/core/events_handler.py
+++ b/aria/orchestrator/workflows/core/events_handler.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 """
-Aria's events Sub-Package
+ARIA's events Sub-Package
 Path: aria.events.storage_event_handler
 
 Implementation of storage handlers for workflow and operation events.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 9f63bcf..f23312d 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -16,6 +16,7 @@
 """
 Workflow tasks
 """
+
 from contextlib import contextmanager
 from datetime import datetime
 from functools import (
@@ -23,9 +24,9 @@ from functools import (
     wraps,
 )
 
-from aria.storage.modeling import model
-from aria.orchestrator.context import operation as operation_context
 
+from ....modeling import models
+from ...context import operation as operation_context
 from .. import exceptions
 
 
@@ -36,7 +37,7 @@ def _locked(func=None):
     @wraps(func)
     def _wrapper(self, value, **kwargs):
         if self._update_fields is None:
-            raise exceptions.TaskException("Task is not in update mode")
+            raise exceptions.TaskException('Task is not in update mode')
         return func(self, value, **kwargs)
     return _wrapper
 
@@ -65,66 +66,61 @@ class StubTask(BaseTask):
 
     def __init__(self, *args, **kwargs):
         super(StubTask, self).__init__(*args, **kwargs)
-        self.status = model.Task.PENDING
+        self.status = models.Task.PENDING
         self.due_at = datetime.utcnow()
 
 
 class StartWorkflowTask(StubTask):
     """
-    Tasks marking a workflow start
+    Task marking a workflow start
     """
     pass
 
 
 class EndWorkflowTask(StubTask):
     """
-    Tasks marking a workflow end
+    Task marking a workflow end
     """
     pass
 
 
 class StartSubWorkflowTask(StubTask):
     """
-    Tasks marking a subworkflow start
+    Task marking a subworkflow start
     """
     pass
 
 
 class EndSubWorkflowTask(StubTask):
     """
-    Tasks marking a subworkflow end
+    Task marking a subworkflow end
     """
     pass
 
 
 class OperationTask(BaseTask):
     """
-    Operation tasks
+    Operation task
     """
 
     def __init__(self, api_task, *args, **kwargs):
         super(OperationTask, self).__init__(id=api_task.id, **kwargs)
         self._workflow_context = api_task._workflow_context
         model_storage = api_task._workflow_context.model
+        plugin = api_task.plugin
 
         base_task_model = model_storage.task.model_cls
-        if isinstance(api_task.actor, model.Node):
+        if isinstance(api_task.actor, models.Node):
             context_cls = operation_context.NodeOperationContext
-            task_model_cls = base_task_model.as_node_instance
-        elif isinstance(api_task.actor, model.Relationship):
+            create_task_model = base_task_model.for_node
+        elif isinstance(api_task.actor, models.Relationship):
             context_cls = operation_context.RelationshipOperationContext
-            task_model_cls = base_task_model.as_relationship_instance
+            create_task_model = base_task_model.for_relationship
         else:
             raise RuntimeError('No operation context could be created for {actor.model_cls}'
                                .format(actor=api_task.actor))
-        plugin = api_task.plugin
-        plugins = model_storage.plugin.list(filters={
-            'package_name': plugin.get('package_name', ''),
-            'package_version': plugin.get('package_version', '')
-        })
-        # Validation during installation ensures that at most one plugin can exists with provided
-        # package_name and package_version
-        operation_task = task_model_cls(
+
+        task_model = create_task_model(
             name=api_task.name,
             implementation=api_task.implementation,
             instance=api_task.actor,
@@ -133,22 +129,21 @@ class OperationTask(BaseTask):
             max_attempts=api_task.max_attempts,
             retry_interval=api_task.retry_interval,
             ignore_failure=api_task.ignore_failure,
-            plugin=plugins[0] if plugins else None,
-            plugin_name=plugin.get('name'),
+            plugin=plugin,
             execution=self._workflow_context.execution,
             runs_on=api_task.runs_on
         )
-        self._workflow_context.model.task.put(operation_task)
+        self._workflow_context.model.task.put(task_model)
 
         self._ctx = context_cls(name=api_task.name,
                                 model_storage=self._workflow_context.model,
                                 resource_storage=self._workflow_context.resource,
-                                service_instance_id=self._workflow_context._service_instance_id,
-                                task_id=operation_task.id,
+                                service_id=self._workflow_context._service_id,
+                                task_id=task_model.id,
                                 actor_id=api_task.actor.id,
                                 execution_id=self._workflow_context._execution_id,
                                 workdir=self._workflow_context._workdir)
-        self._task_id = operation_task.id
+        self._task_id = task_model.id
         self._update_fields = None
 
     @contextmanager

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 73d8994..e831bfe 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -15,7 +15,7 @@
 
 
 """
-Aria's events Sub-Package
+ARIA's events Sub-Package
 Path: aria.events.storage_event_handler
 
 Implementation of logger handlers for workflow and operation events.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/exceptions.py b/aria/orchestrator/workflows/exceptions.py
index e2f5b59..4fb8dd7 100644
--- a/aria/orchestrator/workflows/exceptions.py
+++ b/aria/orchestrator/workflows/exceptions.py
@@ -68,4 +68,15 @@ class TaskException(exceptions.AriaError):
     """
     Raised by the task
     """
-    pass
+
+
+class OperationNotFoundException(TaskException):
+    """
+    Could not find an operation on the node or relationship.
+    """
+
+
+class PluginNotFoundException(TaskException):
+    """
+    Could not find a plugin matching the plugin specification.
+    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index 8a096b5..baa0375 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -44,7 +44,7 @@ class CeleryExecutor(BaseExecutor):
 
     def execute(self, task):
         self._tasks[task.id] = task
-        inputs = task.inputs.copy()
+        inputs = dict((k, v.value) for k, v in task.inputs.iteritems())
         inputs['ctx'] = task.context
         self._results[task.id] = self._app.send_task(
             task.operation_mapping,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index d999b37..6397e88 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -47,7 +47,7 @@ from aria.utils import imports
 from aria.utils import exceptions
 from aria.orchestrator.workflows.executor import base
 from aria.storage import instrumentation
-from aria.storage.modeling import type as storage_type
+from aria.modeling import types as modeling_types
 
 _IS_WIN = os.name == 'nt'
 
@@ -148,7 +148,7 @@ class ProcessExecutor(base.BaseExecutor):
         return {
             'task_id': task.id,
             'implementation': task.implementation,
-            'operation_inputs': task.inputs,
+            'operation_inputs': dict((k, v.value) for k, v in task.inputs.iteritems()),
             'port': self._server_port,
             'context': task.context.serialization_dict,
         }
@@ -381,7 +381,8 @@ def _main():
 
     # This is required for the instrumentation work properly.
     # See docstring of `remove_mutable_association_listener` for further details
-    storage_type.remove_mutable_association_listener()
+    modeling_types.remove_mutable_association_listener()
+
     with instrumentation.track_changes() as instrument:
         try:
             ctx = context_dict['context_cls'].deserialize_from_dict(**context_dict['context'])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 6c59986..1a49af5 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -58,7 +58,8 @@ class ThreadExecutor(BaseExecutor):
                 self._task_started(task)
                 try:
                     task_func = imports.load_attribute(task.implementation)
-                    task_func(ctx=task.context, **task.inputs)
+                    inputs = dict((k, v.value) for k, v in task.inputs.iteritems())
+                    task_func(ctx=task.context, **inputs)
                     self._task_succeeded(task)
                 except BaseException as e:
                     self._task_failed(task, exception=e)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/consumption/__init__.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/__init__.py b/aria/parser/consumption/__init__.py
index 7b7590e..7da8490 100644
--- a/aria/parser/consumption/__init__.py
+++ b/aria/parser/consumption/__init__.py
@@ -20,7 +20,7 @@ from .style import Style
 from .consumer import Consumer, ConsumerChain
 from .presentation import Read
 from .validation import Validate
-from .modeling import Model, Types, Instance
+from .modeling import ServiceTemplate, Types, ServiceInstance
 from .inputs import Inputs
 
 __all__ = (
@@ -31,7 +31,7 @@ __all__ = (
     'ConsumerChain',
     'Read',
     'Validate',
-    'Model',
+    'ServiceTemplate',
     'Types',
-    'Instance',
+    'ServiceInstance',
     'Inputs')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/consumption/modeling.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/modeling.py b/aria/parser/consumption/modeling.py
index 599c260..4847ba7 100644
--- a/aria/parser/consumption/modeling.py
+++ b/aria/parser/consumption/modeling.py
@@ -17,61 +17,65 @@ from ...utils.formatting import json_dumps, yaml_dumps
 from .consumer import Consumer, ConsumerChain
 
 
-class Derive(Consumer):
+class DeriveServiceTemplate(Consumer):
     """
-    Derives the service model.
+    Derives the service template from the presenter.
     """
 
     def consume(self):
         if self.context.presentation.presenter is None:
-            self.context.validation.report('Derive consumer: missing presenter')
+            self.context.validation.report('DeriveServiceTemplate consumer: missing presenter')
             return
 
-        if not hasattr(self.context.presentation.presenter, '_get_service_model'):
-            self.context.validation.report('Derive consumer: presenter does not support '
-                                           '"_get_service_model"')
+        if not hasattr(self.context.presentation.presenter, '_get_model'):
+            self.context.validation.report('DeriveServiceTemplate consumer: presenter does not'
+                                           ' support "_get_model"')
             return
 
-        self.context.modeling.model = \
-            self.context.presentation.presenter._get_service_model(self.context)
+        self.context.modeling.template = \
+            self.context.presentation.presenter._get_model(self.context)
 
 
-class CoerceModelValues(Consumer):
+class CoerceServiceTemplateValues(Consumer):
     """
-    Coerces values in the service model.
+    Coerces values in the service template.
     """
 
     def consume(self):
-        self.context.modeling.model.coerce_values(self.context, None, True)
+        self.context.modeling.template.coerce_values(None, True)
 
 
-class ValidateModel(Consumer):
+class ValidateServiceTemplate(Consumer):
     """
-    Validates the service model.
+    Validates the service template.
     """
 
     def consume(self):
-        self.context.modeling.model.validate(self.context)
+        self.context.modeling.template.validate()
 
-class Model(ConsumerChain):
+
+class ServiceTemplate(ConsumerChain):
     """
-    Generates the service model by deriving it from the presentation.
+    Generates the service template from the presenter.
     """
 
     def __init__(self, context):
-        super(Model, self).__init__(context, (Derive, CoerceModelValues, ValidateModel))
+        super(ServiceTemplate, self).__init__(context, (DeriveServiceTemplate,
+                                                        CoerceServiceTemplateValues,
+                                                        ValidateServiceTemplate))
 
     def dump(self):
         if self.context.has_arg_switch('yaml'):
             indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.model_as_raw
+            raw = self.context.modeling.template_as_raw
             self.context.write(yaml_dumps(raw, indent=indent))
         elif self.context.has_arg_switch('json'):
             indent = self.context.get_arg_value_int('indent', 2)
-            raw = self.context.modeling.model_as_raw
+            raw = self.context.modeling.template_as_raw
             self.context.write(json_dumps(raw, indent=indent))
         else:
-            self.context.modeling.model.dump(self.context)
+            self.context.modeling.template.dump()
+
 
 class Types(Consumer):
     """
@@ -88,35 +92,40 @@ class Types(Consumer):
             raw = self.context.modeling.types_as_raw
             self.context.write(json_dumps(raw, indent=indent))
         else:
-            self.context.modeling.dump_types(self.context)
+            self.context.modeling.template.dump_types()
 
-class Instantiate(Consumer):
+
+class InstantiateServiceInstance(Consumer):
     """
-    Instantiates the service model.
+    Instantiates the service template into a service instance.
     """
 
     def consume(self):
-        if self.context.modeling.model is None:
-            self.context.validation.report('Instantiate consumer: missing service model')
+        if self.context.modeling.template is None:
+            self.context.validation.report('InstantiateServiceInstance consumer: missing service '
+                                           'model')
             return
 
-        self.context.modeling.model.instantiate(self.context, None)
+        self.context.modeling.template.instantiate(None)
+
 
-class CoerceInstanceValues(Consumer):
+class CoerceServiceInstanceValues(Consumer):
     """
     Coerces values in the service instance.
     """
 
     def consume(self):
-        self.context.modeling.instance.coerce_values(self.context, None, True)
+        self.context.modeling.instance.coerce_values(None, True)
 
-class ValidateInstance(Consumer):
+
+class ValidateServiceInstance(Consumer):
     """
     Validates the service instance.
     """
 
     def consume(self):
-        self.context.modeling.instance.validate(self.context)
+        self.context.modeling.instance.validate()
+
 
 class SatisfyRequirements(Consumer):
     """
@@ -124,7 +133,8 @@ class SatisfyRequirements(Consumer):
     """
 
     def consume(self):
-        self.context.modeling.instance.satisfy_requirements(self.context)
+        self.context.modeling.instance.satisfy_requirements()
+
 
 class ValidateCapabilities(Consumer):
     """
@@ -132,22 +142,27 @@ class ValidateCapabilities(Consumer):
     """
 
     def consume(self):
-        self.context.modeling.instance.validate_capabilities(self.context)
+        self.context.modeling.instance.validate_capabilities()
+
 
-class Instance(ConsumerChain):
+class ServiceInstance(ConsumerChain):
     """
-    Generates the service instance by instantiating the service model.
+    Generates the service instance by instantiating the service template.
     """
 
     def __init__(self, context):
-        super(Instance, self).__init__(context, (Instantiate, CoerceInstanceValues,
-                                                 ValidateInstance, CoerceInstanceValues,
-                                                 SatisfyRequirements, CoerceInstanceValues,
-                                                 ValidateCapabilities, CoerceInstanceValues))
+        super(ServiceInstance, self).__init__(context, (InstantiateServiceInstance,
+                                                        CoerceServiceInstanceValues,
+                                                        ValidateServiceInstance,
+                                                        CoerceServiceInstanceValues,
+                                                        SatisfyRequirements,
+                                                        CoerceServiceInstanceValues,
+                                                        ValidateCapabilities,
+                                                        CoerceServiceInstanceValues))
 
     def dump(self):
         if self.context.has_arg_switch('graph'):
-            self.context.modeling.instance.dump_graph(self.context)
+            self.context.modeling.instance.dump_graph()
         elif self.context.has_arg_switch('yaml'):
             indent = self.context.get_arg_value_int('indent', 2)
             raw = self.context.modeling.instance_as_raw
@@ -157,4 +172,4 @@ class Instance(ConsumerChain):
             raw = self.context.modeling.instance_as_raw
             self.context.write(json_dumps(raw, indent=indent))
         else:
-            self.context.modeling.instance.dump(self.context)
+            self.context.modeling.instance.dump()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/consumption/style.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/style.py b/aria/parser/consumption/style.py
index 88ad934..72892b9 100644
--- a/aria/parser/consumption/style.py
+++ b/aria/parser/consumption/style.py
@@ -43,7 +43,7 @@ class Style(object):
 
     @staticmethod
     def literal(value):
-        return Colored.yellow(safe_repr(value), bold=True)
+        return Colored.magenta(safe_repr(value))
 
     @staticmethod
     def meta(value):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/__init__.py b/aria/parser/modeling/__init__.py
index cad25ca..df127cd 100644
--- a/aria/parser/modeling/__init__.py
+++ b/aria/parser/modeling/__init__.py
@@ -14,57 +14,9 @@
 # limitations under the License.
 
 from .context import IdType, ModelingContext
-from .elements import Element, ModelElement, Function, Parameter, Metadata
-from .instance_elements import (ServiceInstance, Node, Capability, Relationship, Artifact, Group,
-                                Policy, GroupPolicy, GroupPolicyTrigger, Mapping, Substitution,
-                                Interface, Operation)
-from .model_elements import (ServiceModel, NodeTemplate, RequirementTemplate, CapabilityTemplate,
-                             RelationshipTemplate, ArtifactTemplate, GroupTemplate, PolicyTemplate,
-                             GroupPolicyTemplate, GroupPolicyTriggerTemplate, MappingTemplate,
-                             SubstitutionTemplate, InterfaceTemplate, OperationTemplate)
-from .types import TypeHierarchy, Type, RelationshipType, PolicyType, PolicyTriggerType
-from .exceptions import CannotEvaluateFunctionException
 
 
 __all__ = (
     'IdType',
-    'ModelingContext',
-    'Element',
-    'ModelElement',
-    'Function',
-    'Parameter',
-    'Metadata',
-    'ServiceInstance',
-    'Node',
-    'Capability',
-    'Relationship',
-    'Artifact',
-    'Group',
-    'Policy',
-    'GroupPolicy',
-    'GroupPolicyTrigger',
-    'Mapping',
-    'Substitution',
-    'Interface',
-    'Operation',
-    'ServiceModel',
-    'NodeTemplate',
-    'RequirementTemplate',
-    'CapabilityTemplate',
-    'RelationshipTemplate',
-    'ArtifactTemplate',
-    'GroupTemplate',
-    'PolicyTemplate',
-    'GroupPolicyTemplate',
-    'GroupPolicyTriggerTemplate',
-    'MappingTemplate',
-    'SubstitutionTemplate',
-    'InterfaceTemplate',
-    'OperationTemplate',
-    'TypeHierarchy',
-    'Type',
-    'RelationshipType',
-    'PolicyType',
-    'PolicyTriggerType',
-    'CannotEvaluateFunctionException',
+    'ModelingContext'
 )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/context.py b/aria/parser/modeling/context.py
index d47c202..dff5991 100644
--- a/aria/parser/modeling/context.py
+++ b/aria/parser/modeling/context.py
@@ -15,11 +15,8 @@
 
 import itertools
 
-from ...utils.collections import StrictDict, prune, OrderedDict
-from ...utils.formatting import as_raw
-from ...utils.console import puts
-from .types import TypeHierarchy
-from .utils import generate_id_string
+from ...utils.collections import StrictDict, prune
+from ...utils.uuid import generate_uuid
 
 
 class IdType(object):
@@ -35,7 +32,7 @@ class IdType(object):
 
     UNIVERSAL_RANDOM = 2
     """
-    Universally unique ID (UUID): 25 random safe characters.
+    Universally unique ID (UUID): 22 random safe characters.
     """
 
 
@@ -43,73 +40,58 @@ class ModelingContext(object):
     """
     Properties:
 
-    * :code:`model`: The generated service model
+    * :code:`template`: The generated service template
     * :code:`instance`: The generated service instance
+    * :code:`node_id_format`: Format for node instance IDs
     * :code:`id_type`: Type of IDs to use for instances
     * :code:`id_max_length`: Maximum allowed instance ID length
     * :code:`inputs`: Dict of inputs values
-    * :code:`node_types`: The generated hierarchy of node types
-    * :code:`group_types`: The generated hierarchy of group types
-    * :code:`capability_types`: The generated hierarchy of capability types
-    * :code:`relationship_types`: The generated hierarchy of relationship types
-    * :code:`policy_types`: The generated hierarchy of policy types
-    * :code:`policy_trigger_types`: The generated hierarchy of policy trigger types
-    * :code:`artifact_types`: The generated hierarchy of artifact types
-    * :code:`interface_types`: The generated hierarchy of interface types
     """
 
     def __init__(self):
-        self.model = None
+        self.template = None
         self.instance = None
+        self.node_id_format = '{template}_{id}'
         #self.id_type = IdType.LOCAL_SERIAL
         #self.id_type = IdType.LOCAL_RANDOM
         self.id_type = IdType.UNIVERSAL_RANDOM
         self.id_max_length = 63 # See: http://www.faqs.org/rfcs/rfc1035.html
         self.inputs = StrictDict(key_class=basestring)
-        self.node_types = TypeHierarchy()
-        self.group_types = TypeHierarchy()
-        self.capability_types = TypeHierarchy()
-        self.relationship_types = TypeHierarchy()
-        self.policy_types = TypeHierarchy()
-        self.policy_trigger_types = TypeHierarchy()
-        self.artifact_types = TypeHierarchy()
-        self.interface_types = TypeHierarchy()
 
         self._serial_id_counter = itertools.count(1)
         self._locally_unique_ids = set()
 
+    def store(self, model_storage):
+        if self.template is not None:
+            model_storage.service_template.put(self.template)
+        if self.instance is not None:
+            model_storage.service.put(self.instance)
+
+    def generate_node_id(self, template_name):
+        return self.node_id_format.format(
+            template=template_name,
+            id=self.generate_id())
+
     def generate_id(self):
         if self.id_type == IdType.LOCAL_SERIAL:
             return self._serial_id_counter.next()
 
         elif self.id_type == IdType.LOCAL_RANDOM:
-            the_id = generate_id_string(6)
+            the_id = generate_uuid(6)
             while the_id in self._locally_unique_ids:
-                the_id = generate_id_string(6)
+                the_id = generate_uuid(6)
             self._locally_unique_ids.add(the_id)
             return the_id
 
-        return generate_id_string()
+        return generate_uuid()
 
     def set_input(self, name, value):
         self.inputs[name] = value
         # TODO: coerce to validate type
 
     @property
-    def types_as_raw(self):
-        return OrderedDict((
-            ('node_types', as_raw(self.node_types)),
-            ('group_types', as_raw(self.group_types)),
-            ('capability_types', as_raw(self.capability_types)),
-            ('relationship_types', as_raw(self.relationship_types)),
-            ('policy_types', as_raw(self.policy_types)),
-            ('policy_trigger_types', as_raw(self.policy_trigger_types)),
-            ('artifact_types', as_raw(self.artifact_types)),
-            ('interface_types', as_raw(self.interface_types))))
-
-    @property
-    def model_as_raw(self):
-        raw = self.model.as_raw
+    def template_as_raw(self):
+        raw = self.template.as_raw
         prune(raw)
         return raw
 
@@ -118,29 +100,3 @@ class ModelingContext(object):
         raw = self.instance.as_raw
         prune(raw)
         return raw
-
-    def dump_types(self, context):
-        if self.node_types.children:
-            puts('Node types:')
-            self.node_types.dump(context)
-        if self.group_types.children:
-            puts('Group types:')
-            self.group_types.dump(context)
-        if self.capability_types.children:
-            puts('Capability types:')
-            self.capability_types.dump(context)
-        if self.relationship_types.children:
-            puts('Relationship types:')
-            self.relationship_types.dump(context)
-        if self.policy_types.children:
-            puts('Policy types:')
-            self.policy_types.dump(context)
-        if self.policy_trigger_types.children:
-            puts('Policy trigger types:')
-            self.policy_trigger_types.dump(context)
-        if self.artifact_types.children:
-            puts('Artifact types:')
-            self.artifact_types.dump(context)
-        if self.interface_types.children:
-            puts('Interface types:')
-            self.interface_types.dump(context)


[12/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
ARIA-105 Integrate parser and orchestrator models


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/9841ca4a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/9841ca4a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/9841ca4a

Branch: refs/heads/master
Commit: 9841ca4ae8df4353a75250ce57adfaaacec3aa88
Parents: 95177d0
Author: Tal Liron <ta...@gmail.com>
Authored: Fri Feb 17 16:00:40 2017 -0600
Committer: Tal Liron <ta...@gmail.com>
Committed: Tue Mar 21 12:36:49 2017 -0500

----------------------------------------------------------------------
 aria/VERSION.py                                 |    4 +-
 aria/__init__.py                                |   44 +-
 aria/cli/args_parser.py                         |   11 +-
 aria/cli/commands.py                            |   59 +-
 aria/cli/dry.py                                 |   88 +
 aria/exceptions.py                              |    6 +-
 aria/logger.py                                  |    2 +-
 aria/modeling/__init__.py                       |   48 +
 aria/modeling/exceptions.py                     |   34 +
 aria/modeling/functions.py                      |   32 +
 aria/modeling/mixins.py                         |  142 ++
 aria/modeling/models.py                         |  286 +++
 aria/modeling/orchestration.py                  |  351 ++++
 aria/modeling/relationship.py                   |  402 +++++
 aria/modeling/service_changes.py                |  228 +++
 aria/modeling/service_common.py                 |  277 +++
 aria/modeling/service_instance.py               | 1564 ++++++++++++++++
 aria/modeling/service_template.py               | 1701 ++++++++++++++++++
 aria/modeling/types.py                          |  304 ++++
 aria/modeling/utils.py                          |  121 ++
 aria/orchestrator/__init__.py                   |    2 +-
 aria/orchestrator/context/common.py             |   31 +-
 aria/orchestrator/context/operation.py          |   12 +-
 aria/orchestrator/context/workflow.py           |   12 +-
 aria/orchestrator/decorators.py                 |    6 +-
 aria/orchestrator/runner.py                     |   15 +-
 aria/orchestrator/workflows/api/task.py         |  286 ++-
 aria/orchestrator/workflows/api/task_graph.py   |    4 +-
 .../workflows/builtin/execute_operation.py      |   59 +-
 aria/orchestrator/workflows/builtin/heal.py     |  188 +-
 aria/orchestrator/workflows/builtin/utils.py    |   42 +-
 .../orchestrator/workflows/builtin/workflows.py |   85 +-
 aria/orchestrator/workflows/core/engine.py      |   16 +-
 .../workflows/core/events_handler.py            |    2 +-
 aria/orchestrator/workflows/core/task.py        |   49 +-
 aria/orchestrator/workflows/events_logging.py   |    2 +-
 aria/orchestrator/workflows/exceptions.py       |   13 +-
 aria/orchestrator/workflows/executor/celery.py  |    2 +-
 aria/orchestrator/workflows/executor/process.py |    7 +-
 aria/orchestrator/workflows/executor/thread.py  |    3 +-
 aria/parser/consumption/__init__.py             |    6 +-
 aria/parser/consumption/modeling.py             |   95 +-
 aria/parser/consumption/style.py                |    2 +-
 aria/parser/modeling/__init__.py                |   50 +-
 aria/parser/modeling/context.py                 |   90 +-
 aria/parser/modeling/elements.py                |  128 --
 aria/parser/modeling/exceptions.py              |   22 -
 aria/parser/modeling/instance_elements.py       | 1041 -----------
 aria/parser/modeling/model_elements.py          | 1221 -------------
 aria/parser/modeling/storage.py                 |  186 --
 aria/parser/modeling/types.py                   |  146 --
 aria/parser/modeling/utils.py                   |  146 --
 aria/parser/reading/__init__.py                 |    4 +-
 aria/parser/reading/locator.py                  |   37 +-
 aria/storage/__init__.py                        |   10 +-
 aria/storage/core.py                            |   10 +-
 aria/storage/instrumentation.py                 |    9 +-
 aria/storage/modeling/__init__.py               |   35 -
 aria/storage/modeling/elements.py               |  106 --
 aria/storage/modeling/instance_elements.py      | 1288 -------------
 aria/storage/modeling/model.py                  |  223 ---
 aria/storage/modeling/orchestrator_elements.py  |  497 -----
 aria/storage/modeling/structure.py              |  320 ----
 aria/storage/modeling/template_elements.py      | 1387 --------------
 aria/storage/modeling/type.py                   |  302 ----
 aria/storage/modeling/utils.py                  |  139 --
 aria/storage_initializer.py                     |  134 --
 aria/utils/exceptions.py                        |   44 +-
 aria/utils/formatting.py                        |   19 +-
 aria/utils/uuid.py                              |   66 +
 docs/requirements.txt                           |    4 +-
 .../simple_v1_0/functions.py                    |    9 +-
 .../simple_v1_0/modeling/__init__.py            |  526 ++++--
 .../simple_v1_0/presenter.py                    |    6 +-
 tests/end2end/test_orchestrator.py              |   18 +-
 tests/end2end/test_tosca_simple_v1_0.py         |    6 +-
 tests/mock/context.py                           |    4 +-
 tests/mock/models.py                            |  253 +--
 tests/mock/operations.py                        |   46 +-
 tests/mock/topology.py                          |   88 +-
 tests/modeling/__init__.py                      |   34 +
 tests/modeling/test_mixins.py                   |  219 +++
 tests/modeling/test_model_storage.py            |  102 ++
 tests/modeling/test_models.py                   |  837 +++++++++
 tests/orchestrator/context/__init__.py          |    4 -
 tests/orchestrator/context/test_operation.py    |  203 ++-
 .../context/test_resource_render.py             |    6 +-
 tests/orchestrator/context/test_serialize.py    |   26 +-
 tests/orchestrator/context/test_toolbelt.py     |   61 +-
 tests/orchestrator/context/test_workflow.py     |   14 +-
 .../execution_plugin/test_common.py             |    4 +-
 .../orchestrator/execution_plugin/test_local.py |   23 +-
 tests/orchestrator/execution_plugin/test_ssh.py |   35 +-
 tests/orchestrator/test_runner.py               |    7 +-
 tests/orchestrator/workflows/api/test_task.py   |  189 +-
 .../workflows/builtin/test_execute_operation.py |   27 +-
 .../orchestrator/workflows/builtin/test_heal.py |   20 +-
 .../orchestrator/workflows/core/test_engine.py  |   28 +-
 tests/orchestrator/workflows/core/test_task.py  |   84 +-
 .../test_task_graph_into_exececution_graph.py   |   27 +-
 .../workflows/executor/test_executor.py         |   13 +-
 .../workflows/executor/test_process_executor.py |    6 +-
 ...process_executor_concurrent_modifications.py |   23 +-
 .../executor/test_process_executor_extension.py |   29 +-
 .../test_process_executor_tracked_changes.py    |   30 +-
 tests/parser/utils.py                           |   14 +-
 .../tosca-simple-1.0/node-cellar/workflows.py   |   17 +-
 tests/storage/__init__.py                       |   18 +-
 tests/storage/test_instrumentation.py           |   53 +-
 tests/storage/test_model_storage.py             |  103 --
 tests/storage/test_models.py                    |  875 ---------
 tests/storage/test_resource_storage.py          |  113 +-
 tests/storage/test_structures.py                |  218 ---
 113 files changed, 8704 insertions(+), 10021 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/VERSION.py
----------------------------------------------------------------------
diff --git a/aria/VERSION.py b/aria/VERSION.py
index 7e11072..9ce332c 100644
--- a/aria/VERSION.py
+++ b/aria/VERSION.py
@@ -14,8 +14,8 @@
 # limitations under the License.
 
 """
-Aria Version module:
-    * version: Aria Package version
+ARIA Version module:
+    * version: ARIA Package version
 """
 
 version = '0.1.0'  # pylint: disable=C0103

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 6b10501..b9251d5 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 """
-Aria top level package
+ARIA top level package
 """
 
 import sys
@@ -27,6 +27,7 @@ from . import (
     utils,
     parser,
     storage,
+    modeling,
     orchestrator,
     cli
 )
@@ -69,48 +70,9 @@ def application_model_storage(api, api_kwargs=None, initiator=None, initiator_kw
     """
     Initiate model storage
     """
-    models_to_register = [
-        storage.modeling.model.Parameter,
-
-        storage.modeling.model.MappingTemplate,
-        storage.modeling.model.SubstitutionTemplate,
-        storage.modeling.model.ServiceTemplate,
-        storage.modeling.model.NodeTemplate,
-        storage.modeling.model.GroupTemplate,
-        storage.modeling.model.InterfaceTemplate,
-        storage.modeling.model.OperationTemplate,
-        storage.modeling.model.ArtifactTemplate,
-        storage.modeling.model.PolicyTemplate,
-        storage.modeling.model.GroupPolicyTemplate,
-        storage.modeling.model.GroupPolicyTriggerTemplate,
-        storage.modeling.model.RequirementTemplate,
-        storage.modeling.model.CapabilityTemplate,
-
-        storage.modeling.model.Mapping,
-        storage.modeling.model.Substitution,
-        storage.modeling.model.ServiceInstance,
-        storage.modeling.model.Node,
-        storage.modeling.model.Group,
-        storage.modeling.model.Interface,
-        storage.modeling.model.Operation,
-        storage.modeling.model.Capability,
-        storage.modeling.model.Artifact,
-        storage.modeling.model.Policy,
-        storage.modeling.model.GroupPolicy,
-        storage.modeling.model.GroupPolicyTrigger,
-        storage.modeling.model.Relationship,
-
-        storage.modeling.model.Execution,
-        storage.modeling.model.ServiceInstanceUpdate,
-        storage.modeling.model.ServiceInstanceUpdateStep,
-        storage.modeling.model.ServiceInstanceModification,
-        storage.modeling.model.Plugin,
-        storage.modeling.model.Task,
-        storage.modeling.model.Log
-    ]
     return storage.ModelStorage(api_cls=api,
                                 api_kwargs=api_kwargs,
-                                items=models_to_register,
+                                items=modeling.models.models_to_register,
                                 initiator=initiator,
                                 initiator_kwargs=initiator_kwargs or {})
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/cli/args_parser.py
----------------------------------------------------------------------
diff --git a/aria/cli/args_parser.py b/aria/cli/args_parser.py
index 50fec39..81ee513 100644
--- a/aria/cli/args_parser.py
+++ b/aria/cli/args_parser.py
@@ -91,7 +91,7 @@ def add_parse_parser(parse):
         'consumer',
         nargs='?',
         default='validate',
-        help='"validate" (default), "presentation", "model", "types", "instance", or consumer '
+        help='"validate" (default), "presentation", "template", "types", "instance", or consumer '
              'class name (full class path or short name)')
     parse.add_argument(
         '--loader-source',
@@ -137,10 +137,11 @@ def add_workflow_parser(workflow):
         '-w', '--workflow',
         default='install',
         help='The workflow name')
-    workflow.add_argument(
-        '-i', '--service-instance-id',
-        required=False,
-        help='A unique ID for the service instance')
+    workflow.add_flag_argument(
+        'dry',
+        default=True,
+        help_true='dry run',
+        help_false='wet run')
 
 
 @sub_parser_decorator(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/cli/commands.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands.py b/aria/cli/commands.py
index 91d748f..1eef61d 100644
--- a/aria/cli/commands.py
+++ b/aria/cli/commands.py
@@ -36,13 +36,12 @@ from ..parser.consumption import (
     ConsumerChain,
     Read,
     Validate,
-    Model,
+    ServiceTemplate,
     Types,
     Inputs,
-    Instance
+    ServiceInstance
 )
 from ..parser.loading import LiteralLocation, UriLocation
-from ..parser.modeling.storage import initialize_storage
 from ..utils.application import StorageManager
 from ..utils.caching import cachedmethod
 from ..utils.console import (puts, Colored, indent)
@@ -51,6 +50,7 @@ from ..utils.collections import OrderedDict
 from ..orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
 from ..orchestrator.runner import Runner
 from ..orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
+from .dry import convert_to_dry
 
 from .exceptions import (
     AriaCliFormatInputsError,
@@ -157,14 +157,14 @@ class ParseCommand(BaseCommand):
             dumper = None
         elif consumer_class_name == 'presentation':
             dumper = consumer.consumers[0]
-        elif consumer_class_name == 'model':
-            consumer.append(Model)
+        elif consumer_class_name == 'template':
+            consumer.append(ServiceTemplate)
         elif consumer_class_name == 'types':
-            consumer.append(Model, Types)
+            consumer.append(ServiceTemplate, Types)
         elif consumer_class_name == 'instance':
-            consumer.append(Model, Inputs, Instance)
+            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
         else:
-            consumer.append(Model, Inputs, Instance)
+            consumer.append(ServiceTemplate, Inputs, ServiceInstance)
             consumer.append(import_fullname(consumer_class_name))
 
         if dumper is None:
@@ -211,16 +211,17 @@ class WorkflowCommand(BaseCommand):
     def __call__(self, args_namespace, unknown_args):
         super(WorkflowCommand, self).__call__(args_namespace, unknown_args)
 
-        service_instance_id = args_namespace.service_instance_id or 1 
         context = self._parse(args_namespace.uri)
         workflow_fn, inputs = self._get_workflow(context, args_namespace.workflow)
-        self._run(context, args_namespace.workflow, workflow_fn, inputs, service_instance_id)
+        self._dry = args_namespace.dry
+        self._run(context, args_namespace.workflow, workflow_fn, inputs)
     
     def _parse(self, uri):
         # Parse
         context = ConsumptionContext()
         context.presentation.location = UriLocation(uri)
-        consumer = ConsumerChain(context, (Read, Validate, Model, Inputs, Instance))
+        consumer = ConsumerChain(context, (Read, Validate, ServiceTemplate, Inputs,
+                                           ServiceInstance))
         consumer.consume()
 
         if context.validation.dump_issues():
@@ -230,43 +231,45 @@ class WorkflowCommand(BaseCommand):
     
     def _get_workflow(self, context, workflow_name):
         if workflow_name in BUILTIN_WORKFLOWS:
-            workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.%s' % workflow_name)
+            workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.{0}'.format(
+                workflow_name))
             inputs = {}
         else:
+            workflow = context.modeling.instance.policies.get(workflow_name)
+            if workflow is None:
+                raise AttributeError('workflow policy does not exist: "{0}"'.format(workflow_name))
+            if workflow.type.role != 'workflow':
+                raise AttributeError('policy is not a workflow: "{0}"'.format(workflow_name))
+
             try:
-                policy = context.modeling.instance.policies[workflow_name]
-            except KeyError:
-                raise AttributeError('workflow policy does not exist: "%s"' % workflow_name)
-            if context.modeling.policy_types.get_role(policy.type_name) != 'workflow':
-                raise AttributeError('policy is not a workflow: "%s"' % workflow_name)
-    
-            try:
-                sys.path.append(policy.properties['implementation'].value)
+                sys.path.append(workflow.properties['implementation'].value)
             except KeyError:
                 pass
     
-            workflow_fn = import_fullname(policy.properties['function'].value)
+            workflow_fn = import_fullname(workflow.properties['function'].value)
     
-            for k in policy.properties:
+            for k in workflow.properties:
                 if k in WORKFLOW_DECORATOR_RESERVED_ARGUMENTS:
-                    raise AttributeError('workflow policy "%s" defines a reserved property: "%s"' %
-                                         (workflow_name, k))
+                    raise AttributeError('workflow policy "{0}" defines a reserved property: "{1}"'
+                                         .format(workflow_name, k))
     
             inputs = OrderedDict([
-                (k, v.value) for k, v in policy.properties.iteritems()
+                (k, v.value) for k, v in workflow.properties.iteritems()
                 if k not in WorkflowCommand.WORKFLOW_POLICY_INTERNAL_PROPERTIES
             ])
         
         return workflow_fn, inputs
     
-    def _run(self, context, workflow_name, workflow_fn, inputs, service_instance_id):
+    def _run(self, context, workflow_name, workflow_fn, inputs):
         # Storage
         def _initialize_storage(model_storage):
-            initialize_storage(context, model_storage, service_instance_id)
+            if self._dry:
+                convert_to_dry(context.modeling.instance)
+            context.modeling.store(model_storage)
 
         # Create runner
         runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
-                        service_instance_id)
+                        lambda: context.modeling.instance.id)
         
         # Run
         runner.run()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/cli/dry.py
----------------------------------------------------------------------
diff --git a/aria/cli/dry.py b/aria/cli/dry.py
new file mode 100644
index 0000000..82faf42
--- /dev/null
+++ b/aria/cli/dry.py
@@ -0,0 +1,88 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from threading import RLock
+
+from ..modeling import models
+from ..orchestrator.decorators import operation
+from ..utils.collections import OrderedDict
+from ..utils.console import puts, Colored
+from ..utils.formatting import safe_repr
+
+
+_TERMINAL_LOCK = RLock()
+
+
+def convert_to_dry(service):
+    """
+    Converts all operations on the service (on workflows, node interfaces, and relationship
+    interfaces) to run dryly.
+    """
+
+    for workflow in service.workflows:
+        convert_operation_to_dry(workflow)
+
+    for node in service.nodes.itervalues():
+        for interface in node.interfaces.itervalues():
+            for oper in interface.operations.itervalues():
+                convert_operation_to_dry(oper)
+        for relationship in node.outbound_relationships:
+            for interface in relationship.interfaces.itervalues():
+                for oper in interface.operations.itervalues():
+                    convert_operation_to_dry(oper)
+
+
+def convert_operation_to_dry(oper):
+    """
+    Converts a single :class:`Operation` to run dryly.
+    """
+
+    plugin = oper.plugin_specification.name \
+        if oper.plugin_specification is not None else None
+    if oper.inputs is None:
+        oper.inputs = OrderedDict()
+    oper.inputs['_implementation'] = models.Parameter(name='_implementation',
+                                                      type_name='string',
+                                                      value=oper.implementation)
+    oper.inputs['_plugin'] = models.Parameter(name='_plugin',
+                                              type_name='string',
+                                              value=plugin)
+    oper.implementation = '{0}.{1}'.format(__name__, 'dry_operation')
+    oper.plugin_specification = None
+
+
+@operation
+def dry_operation(ctx, _plugin, _implementation, **kwargs):
+    """
+    The dry operation simply prints out information about the operation to the console.
+    """
+
+    with _TERMINAL_LOCK:
+        print ctx.name
+        if hasattr(ctx, 'relationship'):
+            puts('> Relationship: {0} -> {1}'.format(
+                Colored.red(ctx.relationship.source_node.name),
+                Colored.red(ctx.relationship.target_node.name)))
+        else:
+            puts('> Node: {0}'.format(Colored.red(ctx.node.name)))
+        puts('  Operation: {0}'.format(Colored.green(ctx.name)))
+        _dump_implementation(_plugin, _implementation)
+
+
+def _dump_implementation(plugin, implementation):
+    if plugin:
+        puts('  Plugin: {0}'.format(Colored.magenta(plugin, bold=True)))
+    if implementation:
+        puts('  Implementation: {0}'.format(Colored.magenta(safe_repr(implementation))))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/exceptions.py b/aria/exceptions.py
index 28f8be9..a180ce1 100644
--- a/aria/exceptions.py
+++ b/aria/exceptions.py
@@ -14,8 +14,8 @@
 # limitations under the License.
 
 """
-Aria exceptions module
-Every sub-package in Aria has a module with its exceptions.
+ARIA exceptions module
+Every sub-package in ARIA has a module with its exceptions.
 aria.exceptions module conveniently collects all these exceptions for easier imports.
 """
 
@@ -43,4 +43,4 @@ class AriaException(Exception):
             if cause == e:
                 # Make sure it's our traceback
                 cause_traceback = traceback
-        self.cause_tb = cause_traceback
+        self.cause_traceback = cause_traceback

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/logger.py
----------------------------------------------------------------------
diff --git a/aria/logger.py b/aria/logger.py
index 42e3679..e3039f5 100644
--- a/aria/logger.py
+++ b/aria/logger.py
@@ -167,7 +167,7 @@ class _SQLAlchemyHandler(logging.Handler):
             task_fk=record.task_id,
             actor=record.prefix,
             level=record.levelname,
-            msg=record.msg,
+            msg=str(record.msg),
             created_at=created_at,
         )
         self._session.add(log)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/aria/modeling/__init__.py b/aria/modeling/__init__.py
new file mode 100644
index 0000000..4dfc39d
--- /dev/null
+++ b/aria/modeling/__init__.py
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import namedtuple
+
+from . import (
+    mixins,
+    types,
+    models,
+    service_template as _service_template_bases,
+    service_instance as _service_instance_bases,
+    service_changes as _service_changes_bases,
+    service_common as _service_common_bases,
+    orchestration as _orchestration_bases
+)
+
+
+_ModelBasesCls = namedtuple('ModelBase', 'service_template,'
+                                         'service_instance,'
+                                         'service_changes,'
+                                         'service_common,'
+                                         'orchestration')
+
+model_bases = _ModelBasesCls(service_template=_service_template_bases,
+                             service_instance=_service_instance_bases,
+                             service_changes=_service_changes_bases,
+                             service_common=_service_common_bases,
+                             orchestration=_orchestration_bases)
+
+
+__all__ = (
+    'mixins',
+    'types',
+    'models',
+    'model_bases',
+)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
new file mode 100644
index 0000000..6931c78
--- /dev/null
+++ b/aria/modeling/exceptions.py
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..exceptions import AriaException
+
+
+class ModelingException(AriaException):
+    """
+    ARIA modeling exception.
+    """
+
+
+class ValueFormatException(ModelingException):
+    """
+    ARIA modeling exception: the value is in the wrong format.
+    """
+
+
+class CannotEvaluateFunctionException(ModelingException):
+    """
+    ARIA modeling exception: cannot evaluate the function at this time.
+    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/functions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/functions.py b/aria/modeling/functions.py
new file mode 100644
index 0000000..02f4454
--- /dev/null
+++ b/aria/modeling/functions.py
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class Function(object):
+    """
+    An intrinsic function.
+
+    Serves as a placeholder for a value that should eventually be derived by calling the function.
+    """
+
+    @property
+    def as_raw(self):
+        raise NotImplementedError
+
+    def _evaluate(self, context, container):
+        raise NotImplementedError
+
+    def __deepcopy__(self, memo):
+        # Circumvent cloning in order to maintain our state
+        return self

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/mixins.py
----------------------------------------------------------------------
diff --git a/aria/modeling/mixins.py b/aria/modeling/mixins.py
new file mode 100644
index 0000000..e6db5a3
--- /dev/null
+++ b/aria/modeling/mixins.py
@@ -0,0 +1,142 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+classes:
+    * ModelMixin - abstract model implementation.
+    * ModelIDMixin - abstract model implementation with IDs.
+"""
+
+from sqlalchemy.ext import associationproxy
+from sqlalchemy import (
+    Column,
+    Integer,
+    Text
+)
+
+from . import utils
+
+
+class ModelMixin(object):
+
+    @utils.classproperty
+    def __modelname__(cls):                                                                         # pylint: disable=no-self-argument
+        return getattr(cls, '__mapiname__', cls.__tablename__)
+
+    @classmethod
+    def id_column_name(cls):
+        raise NotImplementedError
+
+    @classmethod
+    def name_column_name(cls):
+        raise NotImplementedError
+
+    def to_dict(self, fields=None, suppress_error=False):
+        """
+        Return a dict representation of the model
+
+        :param suppress_error: If set to True, sets ``None`` to attributes that it's unable to
+                               retrieve (e.g., if a relationship wasn't established yet, and so it's
+                               impossible to access a property through it)
+        """
+
+        res = dict()
+        fields = fields or self.fields()
+        for field in fields:
+            try:
+                field_value = getattr(self, field)
+            except AttributeError:
+                if suppress_error:
+                    field_value = None
+                else:
+                    raise
+            if isinstance(field_value, list):
+                field_value = list(field_value)
+            elif isinstance(field_value, dict):
+                field_value = dict(field_value)
+            elif isinstance(field_value, ModelMixin):
+                field_value = field_value.to_dict()
+            res[field] = field_value
+
+        return res
+
+    @classmethod
+    def fields(cls):
+        """
+        Return the list of field names for this table
+
+        Mostly for backwards compatibility in the code (that uses ``fields``)
+        """
+
+        fields = set(cls._iter_association_proxies())
+        fields.update(cls.__table__.columns.keys())
+        return fields - set(getattr(cls, '__private_fields__', []))
+
+    @classmethod
+    def _iter_association_proxies(cls):
+        for col, value in vars(cls).items():
+            if isinstance(value, associationproxy.AssociationProxy):
+                yield col
+
+    def __repr__(self):
+        return '<{cls} id=`{id}`>'.format(
+            cls=self.__class__.__name__,
+            id=getattr(self, self.name_column_name()))
+
+
+class ModelIDMixin(object):
+    id = Column(Integer, primary_key=True, autoincrement=True)
+    name = Column(Text, index=True)
+
+    @classmethod
+    def id_column_name(cls):
+        return 'id'
+
+    @classmethod
+    def name_column_name(cls):
+        return 'name'
+
+
+class InstanceModelMixin(ModelMixin):
+    """
+    Mixin for :class:`ServiceInstance` models.
+
+    All models support validation, diagnostic dumping, and representation as
+    raw data (which can be translated into JSON or YAML) via ``as_raw``.
+    """
+
+    @property
+    def as_raw(self):
+        raise NotImplementedError
+
+    def validate(self):
+        pass
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def dump(self):
+        pass
+
+
+class TemplateModelMixin(InstanceModelMixin):
+    """
+    Mixin for :class:`ServiceTemplate` models.
+
+    All model models can be instantiated into :class:`ServiceInstance` models.
+    """
+
+    def instantiate(self, container):
+        raise NotImplementedError

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/models.py
----------------------------------------------------------------------
diff --git a/aria/modeling/models.py b/aria/modeling/models.py
new file mode 100644
index 0000000..a01783b
--- /dev/null
+++ b/aria/modeling/models.py
@@ -0,0 +1,286 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=abstract-method
+
+from sqlalchemy.ext.declarative import declarative_base
+
+from . import (
+    service_template,
+    service_instance,
+    service_changes,
+    service_common,
+    orchestration,
+    mixins,
+)
+
+
+aria_declarative_base = declarative_base(cls=mixins.ModelIDMixin)
+
+
+# See also models_to_register at the bottom of this file
+__all__ = (
+    'aria_declarative_base',
+    'models_to_register',
+
+    # Service template models
+    'ServiceTemplate',
+    'NodeTemplate',
+    'GroupTemplate',
+    'PolicyTemplate',
+    'SubstitutionTemplate',
+    'SubstitutionTemplateMapping',
+    'RequirementTemplate',
+    'RelationshipTemplate',
+    'CapabilityTemplate',
+    'InterfaceTemplate',
+    'OperationTemplate',
+    'ArtifactTemplate',
+
+    # Service instance models
+    'Service',
+    'Node',
+    'Group',
+    'Policy',
+    'Substitution',
+    'SubstitutionMapping',
+    'Relationship',
+    'Capability',
+    'Interface',
+    'Operation',
+    'Artifact',
+
+    # Service changes models
+    'ServiceUpdate',
+    'ServiceUpdateStep',
+    'ServiceModification',
+
+    # Common service models
+    'Parameter',
+    'Type',
+    'Metadata',
+    'PluginSpecification',
+
+    # Orchestration models
+    'Execution',
+    'Plugin',
+    'Task',
+    'Log'
+)
+
+
+# region service template models
+
+class ServiceTemplate(aria_declarative_base, service_template.ServiceTemplateBase):
+    pass
+
+
+class NodeTemplate(aria_declarative_base, service_template.NodeTemplateBase):
+    pass
+
+
+class GroupTemplate(aria_declarative_base, service_template.GroupTemplateBase):
+    pass
+
+
+class PolicyTemplate(aria_declarative_base, service_template.PolicyTemplateBase):
+    pass
+
+
+class SubstitutionTemplate(aria_declarative_base, service_template.SubstitutionTemplateBase):
+    pass
+
+
+class SubstitutionTemplateMapping(aria_declarative_base,
+                                  service_template.SubstitutionTemplateMappingBase):
+    pass
+
+
+class RequirementTemplate(aria_declarative_base, service_template.RequirementTemplateBase):
+    pass
+
+
+class RelationshipTemplate(aria_declarative_base, service_template.RelationshipTemplateBase):
+    pass
+
+
+class CapabilityTemplate(aria_declarative_base, service_template.CapabilityTemplateBase):
+    pass
+
+
+class InterfaceTemplate(aria_declarative_base, service_template.InterfaceTemplateBase):
+    pass
+
+
+class OperationTemplate(aria_declarative_base, service_template.OperationTemplateBase):
+    pass
+
+
+class ArtifactTemplate(aria_declarative_base, service_template.ArtifactTemplateBase):
+    pass
+
+# endregion
+
+
+# region service instance models
+
+class Service(aria_declarative_base, service_instance.ServiceBase):
+    pass
+
+
+class Node(aria_declarative_base, service_instance.NodeBase):
+    pass
+
+
+class Group(aria_declarative_base, service_instance.GroupBase):
+    pass
+
+
+class Policy(aria_declarative_base, service_instance.PolicyBase):
+    pass
+
+
+class Substitution(aria_declarative_base, service_instance.SubstitutionBase):
+    pass
+
+
+class SubstitutionMapping(aria_declarative_base, service_instance.SubstitutionMappingBase):
+    pass
+
+
+class Relationship(aria_declarative_base, service_instance.RelationshipBase):
+    pass
+
+
+class Capability(aria_declarative_base, service_instance.CapabilityBase):
+    pass
+
+
+class Interface(aria_declarative_base, service_instance.InterfaceBase):
+    pass
+
+
+class Operation(aria_declarative_base, service_instance.OperationBase):
+    pass
+
+
+class Artifact(aria_declarative_base, service_instance.ArtifactBase):
+    pass
+
+# endregion
+
+
+# region service changes models
+
+class ServiceUpdate(aria_declarative_base, service_changes.ServiceUpdateBase):
+    pass
+
+
+class ServiceUpdateStep(aria_declarative_base, service_changes.ServiceUpdateStepBase):
+    pass
+
+
+class ServiceModification(aria_declarative_base, service_changes.ServiceModificationBase):
+    pass
+
+# endregion
+
+
+# region common service models
+
+class Parameter(aria_declarative_base, service_common.ParameterBase):
+    pass
+
+
+class Type(aria_declarative_base, service_common.TypeBase):
+    pass
+
+
+class Metadata(aria_declarative_base, service_common.MetadataBase):
+    pass
+
+
+class PluginSpecification(aria_declarative_base, service_common.PluginSpecificationBase):
+    pass
+
+# endregion
+
+
+# region orchestration models
+
+class Execution(aria_declarative_base, orchestration.ExecutionBase):
+    pass
+
+
+class Plugin(aria_declarative_base, orchestration.PluginBase):
+    pass
+
+
+class Task(aria_declarative_base, orchestration.TaskBase):
+    pass
+
+
+class Log(aria_declarative_base, orchestration.LogBase):
+    pass
+
+# endregion
+
+
+# See also __all__ at the top of this file
+models_to_register = [
+    # Service template models
+    ServiceTemplate,
+    NodeTemplate,
+    GroupTemplate,
+    PolicyTemplate,
+    SubstitutionTemplate,
+    SubstitutionTemplateMapping,
+    RequirementTemplate,
+    RelationshipTemplate,
+    CapabilityTemplate,
+    InterfaceTemplate,
+    OperationTemplate,
+    ArtifactTemplate,
+
+    # Service instance models
+    Service,
+    Node,
+    Group,
+    Policy,
+    SubstitutionMapping,
+    Substitution,
+    Relationship,
+    Capability,
+    Interface,
+    Operation,
+    Artifact,
+
+    # Service changes models
+    ServiceUpdate,
+    ServiceUpdateStep,
+    ServiceModification,
+
+    # Common service models
+    Parameter,
+    Type,
+    Metadata,
+    PluginSpecification,
+
+    # Orchestration models
+    Execution,
+    Plugin,
+    Task,
+    Log
+]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
new file mode 100644
index 0000000..0277756
--- /dev/null
+++ b/aria/modeling/orchestration.py
@@ -0,0 +1,351 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+classes:
+    * Execution - execution implementation model.
+    * Plugin - plugin implementation model.
+    * Task - a task
+"""
+
+# pylint: disable=no-self-argument, no-member, abstract-method
+
+from datetime import datetime
+
+from sqlalchemy import (
+    Column,
+    Integer,
+    Text,
+    DateTime,
+    Boolean,
+    Enum,
+    String,
+    Float,
+    orm,
+)
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.declarative import declared_attr
+
+from ..orchestrator.exceptions import (TaskAbortException, TaskRetryException)
+from .types import (List, Dict)
+from .mixins import ModelMixin
+from . import relationship
+
+
+class ExecutionBase(ModelMixin):
+    """
+    Execution model representation.
+    """
+
+    __tablename__ = 'execution'
+
+    __private_fields__ = ['service_fk',
+                          'service_name',
+                          'service_template',
+                          'service_template_name']
+
+    TERMINATED = 'terminated'
+    FAILED = 'failed'
+    CANCELLED = 'cancelled'
+    PENDING = 'pending'
+    STARTED = 'started'
+    CANCELLING = 'cancelling'
+    FORCE_CANCELLING = 'force_cancelling'
+
+    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, FORCE_CANCELLING]
+    END_STATES = [TERMINATED, FAILED, CANCELLED]
+    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
+
+    VALID_TRANSITIONS = {
+        PENDING: [STARTED, CANCELLED],
+        STARTED: END_STATES + [CANCELLING],
+        CANCELLING: END_STATES + [FORCE_CANCELLING]
+    }
+
+    @orm.validates('status')
+    def validate_status(self, key, value):
+        """Validation function that verifies execution status transitions are OK"""
+        try:
+            current_status = getattr(self, key)
+        except AttributeError:
+            return
+        valid_transitions = self.VALID_TRANSITIONS.get(current_status, [])
+        if all([current_status is not None,
+                current_status != value,
+                value not in valid_transitions]):
+            raise ValueError('Cannot change execution status from {current} to {new}'.format(
+                current=current_status,
+                new=value))
+        return value
+
+    created_at = Column(DateTime, index=True)
+    started_at = Column(DateTime, nullable=True, index=True)
+    ended_at = Column(DateTime, nullable=True, index=True)
+    error = Column(Text, nullable=True)
+    is_system_workflow = Column(Boolean, nullable=False, default=False)
+    parameters = Column(Dict)
+    status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
+    workflow_name = Column(Text)
+
+    @declared_attr
+    def service(cls):
+        return relationship.many_to_one(cls, 'service')
+
+    # region foreign keys
+
+    @declared_attr
+    def service_fk(cls):
+        return relationship.foreign_key('service')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', cls.name_column_name())
+
+    @declared_attr
+    def service_template(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', 'service_template')
+
+    @declared_attr
+    def service_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', 'service_template_name')
+
+    # endregion
+
+    def __str__(self):
+        return '<{0} id=`{1}` (status={2})>'.format(
+            self.__class__.__name__,
+            getattr(self, self.name_column_name()),
+            self.status
+        )
+
+
+class PluginBase(ModelMixin):
+    """
+    Plugin model representation.
+    """
+
+    __tablename__ = 'plugin'
+
+    archive_name = Column(Text, nullable=False, index=True)
+    distribution = Column(Text)
+    distribution_release = Column(Text)
+    distribution_version = Column(Text)
+    package_name = Column(Text, nullable=False, index=True)
+    package_source = Column(Text)
+    package_version = Column(Text)
+    supported_platform = Column(Text)
+    supported_py_versions = Column(List)
+    uploaded_at = Column(DateTime, nullable=False, index=True)
+    wheels = Column(List, nullable=False)
+
+
+class TaskBase(ModelMixin):
+    """
+    A Model which represents an task
+    """
+
+    __tablename__ = 'task'
+
+    __private_fields__ = ['node_fk',
+                          'relationship_fk',
+                          'plugin_fk',
+                          'execution_fk',
+                          'node_name',
+                          'relationship_name',
+                          'execution_name']
+
+    PENDING = 'pending'
+    RETRYING = 'retrying'
+    SENT = 'sent'
+    STARTED = 'started'
+    SUCCESS = 'success'
+    FAILED = 'failed'
+    STATES = (
+        PENDING,
+        RETRYING,
+        SENT,
+        STARTED,
+        SUCCESS,
+        FAILED,
+    )
+
+    WAIT_STATES = [PENDING, RETRYING]
+    END_STATES = [SUCCESS, FAILED]
+
+    RUNS_ON_SOURCE = 'source'
+    RUNS_ON_TARGET = 'target'
+    RUNS_ON_NODE = 'node'
+    RUNS_ON = (RUNS_ON_NODE, RUNS_ON_SOURCE, RUNS_ON_TARGET)
+
+    INFINITE_RETRIES = -1
+
+    @declared_attr
+    def node(cls):
+        return relationship.many_to_one(cls, 'node')
+
+    @declared_attr
+    def relationship(cls):
+        return relationship.many_to_one(cls, 'relationship')
+
+    @declared_attr
+    def plugin(cls):
+        return relationship.many_to_one(cls, 'plugin')
+
+    @declared_attr
+    def execution(cls):
+        return relationship.many_to_one(cls, 'execution')
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    status = Column(Enum(*STATES, name='status'), default=PENDING)
+
+    due_at = Column(DateTime, nullable=False, index=True, default=datetime.utcnow())
+    started_at = Column(DateTime, default=None)
+    ended_at = Column(DateTime, default=None)
+    max_attempts = Column(Integer, default=1)
+    retry_count = Column(Integer, default=0)
+    retry_interval = Column(Float, default=0)
+    ignore_failure = Column(Boolean, default=False)
+
+    # Operation specific fields
+    implementation = Column(String)
+    _runs_on = Column(Enum(*RUNS_ON, name='runs_on'), name='runs_on')
+
+    @property
+    def runs_on(self):
+        if self._runs_on == self.RUNS_ON_NODE:
+            return self.node
+        elif self._runs_on == self.RUNS_ON_SOURCE:
+            return self.relationship.source_node  # pylint: disable=no-member
+        elif self._runs_on == self.RUNS_ON_TARGET:
+            return self.relationship.target_node  # pylint: disable=no-member
+        return None
+
+    @property
+    def actor(self):
+        """
+        Return the actor of the task
+        :return:
+        """
+        return self.node or self.relationship
+
+    @orm.validates('max_attempts')
+    def validate_max_attempts(self, _, value):                                  # pylint: disable=no-self-use
+        """Validates that max attempts is either -1 or a positive number"""
+        if value < 1 and value != TaskBase.INFINITE_RETRIES:
+            raise ValueError('Max attempts can be either -1 (infinite) or any positive number. '
+                             'Got {value}'.format(value=value))
+        return value
+
+    # region foreign keys
+
+    @declared_attr
+    def node_fk(cls):
+        return relationship.foreign_key('node', nullable=True)
+
+    @declared_attr
+    def relationship_fk(cls):
+        return relationship.foreign_key('relationship', nullable=True)
+
+    @declared_attr
+    def plugin_fk(cls):
+        return relationship.foreign_key('plugin', nullable=True)
+
+    @declared_attr
+    def execution_fk(cls):
+        return relationship.foreign_key('execution', nullable=True)
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def node_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('node', cls.name_column_name())
+
+    @declared_attr
+    def relationship_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('relationship', cls.name_column_name())
+
+    @declared_attr
+    def execution_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('execution', cls.name_column_name())
+
+    # endregion
+
+    @classmethod
+    def for_node(cls, instance, runs_on, **kwargs):
+        return cls(node=instance, _runs_on=runs_on, **kwargs)
+
+    @classmethod
+    def for_relationship(cls, instance, runs_on, **kwargs):
+        return cls(relationship=instance, _runs_on=runs_on, **kwargs)
+
+    @staticmethod
+    def abort(message=None):
+        raise TaskAbortException(message)
+
+    @staticmethod
+    def retry(message=None, retry_interval=None):
+        raise TaskRetryException(message, retry_interval=retry_interval)
+
+
+class LogBase(ModelMixin):
+
+    __tablename__ = 'log'
+
+    __private_fields__ = ['execution_fk',
+                          'task_fk']
+
+    @declared_attr
+    def execution(cls):
+        return relationship.many_to_one(cls, 'execution')
+
+    @declared_attr
+    def task(cls):
+        return relationship.many_to_one(cls, 'task')
+
+    level = Column(String)
+    msg = Column(String)
+    created_at = Column(DateTime, index=True)
+    actor = Column(String)
+
+    # region foreign keys
+
+    @declared_attr
+    def execution_fk(cls):
+        return relationship.foreign_key('execution')
+
+    @declared_attr
+    def task_fk(cls):
+        return relationship.foreign_key('task', nullable=True)
+
+    # endregion
+
+    def __repr__(self):
+        return "<{self.created_at}: [{self.level}] @{self.actor}> {msg}".format(
+            self=self, msg=self.msg[:50])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/relationship.py
----------------------------------------------------------------------
diff --git a/aria/modeling/relationship.py b/aria/modeling/relationship.py
new file mode 100644
index 0000000..bed1599
--- /dev/null
+++ b/aria/modeling/relationship.py
@@ -0,0 +1,402 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=invalid-name, redefined-outer-name
+
+from sqlalchemy.orm import relationship, backref
+from sqlalchemy.orm.collections import attribute_mapped_collection
+from sqlalchemy import (
+    Column,
+    ForeignKey,
+    Integer,
+    Table
+)
+
+from ..utils import formatting
+
+
+def foreign_key(other_table,
+                nullable=False):
+    """
+    Declare a foreign key property, which will also create a foreign key column in the table with
+    the name of the property. By convention the property name should end in "_fk".
+
+    You are required to explicitly create foreign keys in order to allow for one-to-one,
+    one-to-many, and many-to-one relationships (but not for many-to-many relationships). If you do
+    not do so, SQLAlchemy will fail to create the relationship property and raise an exception with
+    a clear error message.
+
+    You should normally not have to access this property directly, but instead use the associated
+    relationship properties.
+
+    *This utility method should only be used during class creation.*
+
+    :param other_table: Other table name
+    :type other_table: basestring
+    :param nullable: True to allow null values (meaning that there is no relationship)
+    :type nullable: bool
+    """
+
+    return Column(Integer,
+                  ForeignKey('{table}.id'.format(table=other_table), ondelete='CASCADE'),
+                  nullable=nullable)
+
+
+def one_to_one_self(model_class,
+                    fk,
+                    relationship_kwargs=None):
+    """
+    Declare a one-to-one relationship property. The property value would be an instance of the same
+    model.
+
+    You will need an associated foreign key to our own table.
+
+    *This utility method should only be used during class creation.*
+
+    :param model_class: The class in which this relationship will be declared
+    :type model_class: type
+    :param fk: Foreign key name
+    :type fk: basestring
+    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
+    :type relationship_kwargs: {}
+    """
+
+    relationship_kwargs = relationship_kwargs or {}
+
+    remote_side = '{model_class}.{remote_column}'.format(
+        model_class=model_class.__name__,
+        remote_column=model_class.id_column_name()
+    )
+
+    primaryjoin = '{remote_side} == {model_class}.{column}'.format(
+        remote_side=remote_side,
+        model_class=model_class.__name__,
+        column=fk
+    )
+
+    return relationship(
+        _get_class_for_table(model_class, model_class.__tablename__).__name__,
+        primaryjoin=primaryjoin,
+        remote_side=remote_side,
+        post_update=True,
+        **relationship_kwargs
+    )
+
+
+def one_to_many_self(model_class,
+                     fk,
+                     dict_key=None,
+                     relationship_kwargs=None):
+    """
+    Declare a one-to-many relationship property. The property value would be a list or dict of
+    instances of the same model.
+
+    You will need an associated foreign key to our own table.
+
+    *This utility method should only be used during class creation.*
+
+    :param model_class: The class in which this relationship will be declared
+    :type model_class: type
+    :param fk: Foreign key name
+    :type fk: basestring
+    :param dict_key: If set the value will be a dict with this key as the dict key; otherwise will
+                     be a list
+    :type dict_key: basestring
+    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
+    :type relationship_kwargs: {}
+    """
+
+    relationship_kwargs = relationship_kwargs or {}
+
+    relationship_kwargs.setdefault('remote_side', '{model_class}.{remote_column}'.format(
+        model_class=model_class.__name__,
+        remote_column=fk
+    ))
+
+    return _relationship(model_class, model_class.__tablename__, None, relationship_kwargs,
+                         other_property=False, dict_key=dict_key)
+
+
+def one_to_one(model_class,
+               other_table,
+               fk=None,
+               other_fk=None,
+               other_property=None,
+               relationship_kwargs=None,
+               backref_kwargs=None):
+    """
+    Declare a one-to-one relationship property. The property value would be an instance of the other
+    table's model.
+
+    You have two options for the foreign key. Either this table can have an associated key to the
+    other table (use the ``fk`` argument) or the other table can have an associated foreign key to
+    this our table (use the ``other_fk`` argument).
+
+    *This utility method should only be used during class creation.*
+
+    :param model_class: The class in which this relationship will be declared
+    :type model_class: type
+    :param other_table: Other table name
+    :type other_table: basestring
+    :param fk: Foreign key name at our table (no need specify if there's no ambiguity)
+    :type fk: basestring
+    :param other_fk: Foreign key name at the other table (no need specify if there's no ambiguity)
+    :type other_fk: basestring
+    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
+    :type relationship_kwargs: {}
+    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
+    :type backref_kwargs: {}
+    """
+
+    backref_kwargs = backref_kwargs or {}
+    backref_kwargs.setdefault('uselist', False)
+
+    return _relationship(model_class, other_table, backref_kwargs, relationship_kwargs,
+                         other_property, fk=fk, other_fk=other_fk)
+
+
+def one_to_many(model_class,
+                child_table,
+                child_fk=None,
+                dict_key=None,
+                child_property=None,
+                relationship_kwargs=None,
+                backref_kwargs=None):
+    """
+    Declare a one-to-many relationship property. The property value would be a list or dict of
+    instances of the child table's model.
+
+    The child table will need an associated foreign key to our table.
+
+    The declaration will automatically create a matching many-to-one property at the child model,
+    named after our table name. Use the ``child_property`` argument to override this name.
+
+    *This utility method should only be used during class creation.*
+
+    :param model_class: The class in which this relationship will be declared
+    :type model_class: type
+    :param child_table: Child table name
+    :type child_table: basestring
+    :param child_fk: Foreign key name at the child table (no need specify if there's no ambiguity)
+    :type child_fk: basestring
+    :param dict_key: If set the value will be a dict with this key as the dict key; otherwise will
+                     be a list
+    :type dict_key: basestring
+    :param child_property: Override name of matching many-to-one property at child table; set to
+                           false to disable
+    :type child_property: basestring|bool
+    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
+    :type relationship_kwargs: {}
+    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
+    :type backref_kwargs: {}
+    """
+
+    backref_kwargs = backref_kwargs or {}
+    backref_kwargs.setdefault('uselist', False)
+
+    return _relationship(model_class, child_table, backref_kwargs, relationship_kwargs,
+                         child_property, other_fk=child_fk, dict_key=dict_key)
+
+
+def many_to_one(model_class,
+                parent_table,
+                fk=None,
+                parent_fk=None,
+                parent_property=None,
+                relationship_kwargs=None,
+                backref_kwargs=None):
+    """
+    Declare a many-to-one relationship property. The property value would be an instance of the
+    parent table's model.
+
+    You will need an associated foreign key to the parent table.
+
+    The declaration will automatically create a matching one-to-many property at the child model,
+    named after the plural form of our table name. Use the ``parent_property`` argument to override
+    this name. Note: the automatic property will always be a SQLAlchemy query object; if you need a
+    Python collection then use :meth:`one_to_many` at that model.
+
+    *This utility method should only be used during class creation.*
+
+    :param model_class: The class in which this relationship will be declared
+    :type model_class: type
+    :param parent_table: Parent table name
+    :type parent_table: basestring
+    :param fk: Foreign key name at our table (no need specify if there's no ambiguity)
+    :type fk: basestring
+    :param parent_property: Override name of matching one-to-many property at parent table; set to
+                            false to disable
+    :type parent_property: basestring|bool
+    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
+    :type relationship_kwargs: {}
+    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
+    :type backref_kwargs: {}
+    """
+
+    if parent_property is None:
+        parent_property = formatting.pluralize(model_class.__tablename__)
+
+    backref_kwargs = backref_kwargs or {}
+    backref_kwargs.setdefault('uselist', True)
+    backref_kwargs.setdefault('lazy', 'dynamic')
+    backref_kwargs.setdefault('cascade', 'all') # delete children when parent is deleted
+
+    return _relationship(model_class, parent_table, backref_kwargs, relationship_kwargs,
+                         parent_property, fk=fk, other_fk=parent_fk)
+
+
+def many_to_many(model_class,
+                 other_table,
+                 prefix=None,
+                 dict_key=None,
+                 other_property=None,
+                 relationship_kwargs=None,
+                 backref_kwargs=None):
+    """
+    Declare a many-to-many relationship property. The property value would be a list or dict of
+    instances of the other table's model.
+
+    You do not need associated foreign keys for this relationship. Instead, an extra table will be
+    created for you.
+
+    The declaration will automatically create a matching many-to-many property at the other model,
+    named after the plural form of our table name. Use the ``other_property`` argument to override
+    this name. Note: the automatic property will always be a SQLAlchemy query object; if you need a
+    Python collection then use :meth:`many_to_many` again at that model.
+
+    *This utility method should only be used during class creation.*
+
+    :param model_class: The class in which this relationship will be declared
+    :type model_class: type
+    :param parent_table: Parent table name
+    :type parent_table: basestring
+    :param prefix: Optional prefix for extra table name as well as for ``other_property``
+    :type prefix: basestring
+    :param dict_key: If set the value will be a dict with this key as the dict key; otherwise will
+                     be a list
+    :type dict_key: basestring
+    :param other_property: Override name of matching many-to-many property at other table; set to
+                           false to disable
+    :type other_property: basestring|bool
+    :param relationship_kwargs: Extra kwargs for SQLAlchemy ``relationship``
+    :type relationship_kwargs: {}
+    :param backref_kwargs: Extra kwargs for SQLAlchemy ``backref``
+    :type backref_kwargs: {}
+    """
+
+    this_table = model_class.__tablename__
+    this_column_name = '{0}_id'.format(this_table)
+    this_foreign_key = '{0}.id'.format(this_table)
+
+    other_column_name = '{0}_id'.format(other_table)
+    other_foreign_key = '{0}.id'.format(other_table)
+
+    secondary_table = '{0}_{1}'.format(this_table, other_table)
+
+    if other_property is None:
+        other_property = formatting.pluralize(this_table)
+        if prefix is not None:
+            secondary_table = '{0}_{1}'.format(prefix, secondary_table)
+            other_property = '{0}_{1}'.format(prefix, other_property)
+
+    backref_kwargs = backref_kwargs or {}
+    backref_kwargs.setdefault('uselist', True)
+
+    relationship_kwargs = relationship_kwargs or {}
+    relationship_kwargs.setdefault('secondary', _get_secondary_table(
+        model_class.metadata,
+        secondary_table,
+        this_column_name,
+        other_column_name,
+        this_foreign_key,
+        other_foreign_key
+    ))
+
+    return _relationship(model_class, other_table, backref_kwargs, relationship_kwargs,
+                         other_property, dict_key=dict_key)
+
+
+def _relationship(model_class, other_table, backref_kwargs, relationship_kwargs, other_property,
+                  fk=None, other_fk=None, dict_key=None):
+    relationship_kwargs = relationship_kwargs or {}
+
+    if fk:
+        relationship_kwargs.setdefault('foreign_keys',
+                                       lambda: getattr(
+                                           _get_class_for_table(
+                                               model_class,
+                                               model_class.__tablename__),
+                                           fk))
+
+    elif other_fk:
+        relationship_kwargs.setdefault('foreign_keys',
+                                       lambda: getattr(
+                                           _get_class_for_table(
+                                               model_class,
+                                               other_table),
+                                           other_fk))
+
+    if dict_key:
+        relationship_kwargs.setdefault('collection_class',
+                                       attribute_mapped_collection(dict_key))
+
+    if other_property is False:
+        # No backref
+        return relationship(
+            lambda: _get_class_for_table(model_class, other_table),
+            **relationship_kwargs
+        )
+    else:
+        if other_property is None:
+            other_property = model_class.__tablename__
+        backref_kwargs = backref_kwargs or {}
+        return relationship(
+            lambda: _get_class_for_table(model_class, other_table),
+            backref=backref(other_property, **backref_kwargs),
+            **relationship_kwargs
+        )
+
+
+def _get_class_for_table(model_class, tablename):
+    if tablename in (model_class.__name__, model_class.__tablename__):
+        return model_class
+
+    for table_cls in model_class._decl_class_registry.values():
+        if tablename == getattr(table_cls, '__tablename__', None):
+            return table_cls
+
+    raise ValueError('unknown table: {0}'.format(tablename))
+
+
+def _get_secondary_table(metadata,
+                         name,
+                         first_column,
+                         second_column,
+                         first_foreign_key,
+                         second_foreign_key):
+    return Table(
+        name,
+        metadata,
+        Column(
+            first_column,
+            Integer,
+            ForeignKey(first_foreign_key)
+        ),
+        Column(
+            second_column,
+            Integer,
+            ForeignKey(second_foreign_key)
+        )
+    )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/service_changes.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_changes.py b/aria/modeling/service_changes.py
new file mode 100644
index 0000000..a33e6ae
--- /dev/null
+++ b/aria/modeling/service_changes.py
@@ -0,0 +1,228 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+classes:
+    * ServiceUpdate - service update implementation model.
+    * ServiceUpdateStep - service update step implementation model.
+    * ServiceModification - service modification implementation model.
+"""
+
+# pylint: disable=no-self-argument, no-member, abstract-method
+
+from collections import namedtuple
+
+from sqlalchemy import (
+    Column,
+    Text,
+    DateTime,
+    Enum,
+)
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.declarative import declared_attr
+
+from .types import (List, Dict)
+from .mixins import ModelMixin
+from . import relationship
+
+
+class ServiceUpdateBase(ModelMixin):
+    """
+    Deployment update model representation.
+    """
+
+    steps = None
+
+    __tablename__ = 'service_update'
+
+    __private_fields__ = ['service_fk',
+                          'execution_fk',
+                          'execution_name',
+                          'service_name']
+
+    created_at = Column(DateTime, nullable=False, index=True)
+    service_plan = Column(Dict, nullable=False)
+    service_update_nodes = Column(Dict)
+    service_update_service = Column(Dict)
+    service_update_node_templates = Column(List)
+    modified_entity_ids = Column(Dict)
+    state = Column(Text)
+
+    @declared_attr
+    def execution(cls):
+        return relationship.many_to_one(cls, 'execution')
+
+    @declared_attr
+    def service(cls):
+        return relationship.many_to_one(cls, 'service', parent_property='updates')
+
+    # region foreign keys
+
+    @declared_attr
+    def execution_fk(cls):
+        return relationship.foreign_key('execution', nullable=True)
+
+    @declared_attr
+    def service_fk(cls):
+        return relationship.foreign_key('service')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def execution_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('execution', cls.name_column_name())
+
+    @declared_attr
+    def service_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', cls.name_column_name())
+
+    # endregion
+
+    def to_dict(self, suppress_error=False, **kwargs):
+        dep_update_dict = super(ServiceUpdateBase, self).to_dict(suppress_error)     #pylint: disable=no-member
+        # Taking care of the fact the DeploymentSteps are _BaseModels
+        dep_update_dict['steps'] = [step.to_dict() for step in self.steps]
+        return dep_update_dict
+
+
+class ServiceUpdateStepBase(ModelMixin):
+    """
+    Deployment update step model representation.
+    """
+
+    __tablename__ = 'service_update_step'
+
+    __private_fields__ = ['service_update_fk',
+                          'service_update_name']
+
+    _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
+    ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
+
+    _entity_types = namedtuple(
+        'ENTITY_TYPES',
+        'NODE, RELATIONSHIP, PROPERTY, OPERATION, WORKFLOW, OUTPUT, DESCRIPTION, GROUP, PLUGIN')
+    ENTITY_TYPES = _entity_types(
+        NODE='node',
+        RELATIONSHIP='relationship',
+        PROPERTY='property',
+        OPERATION='operation',
+        WORKFLOW='workflow',
+        OUTPUT='output',
+        DESCRIPTION='description',
+        GROUP='group',
+        PLUGIN='plugin'
+    )
+
+    action = Column(Enum(*ACTION_TYPES, name='action_type'), nullable=False)
+    entity_id = Column(Text, nullable=False)
+    entity_type = Column(Enum(*ENTITY_TYPES, name='entity_type'), nullable=False)
+
+    @declared_attr
+    def service_update(cls):
+        return relationship.many_to_one(cls, 'service_update', parent_property='steps')
+
+    # region foreign keys
+
+    @declared_attr
+    def service_update_fk(cls):
+        return relationship.foreign_key('service_update')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_update_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service_update', cls.name_column_name())
+
+    # endregion
+
+    def __hash__(self):
+        return hash((getattr(self, self.id_column_name()), self.entity_id))
+
+    def __lt__(self, other):
+        """
+        the order is 'remove' < 'modify' < 'add'
+        :param other:
+        :return:
+        """
+        if not isinstance(other, self.__class__):
+            return not self >= other
+
+        if self.action != other.action:
+            if self.action == 'remove':
+                return_value = True
+            elif self.action == 'add':
+                return_value = False
+            else:
+                return_value = other.action == 'add'
+            return return_value
+
+        if self.action == 'add':
+            return self.entity_type == 'node' and other.entity_type == 'relationship'
+        if self.action == 'remove':
+            return self.entity_type == 'relationship' and other.entity_type == 'node'
+        return False
+
+
+class ServiceModificationBase(ModelMixin):
+    """
+    Deployment modification model representation.
+    """
+
+    __tablename__ = 'service_modification'
+
+    __private_fields__ = ['service_fk',
+                          'service_name']
+
+    STARTED = 'started'
+    FINISHED = 'finished'
+    ROLLEDBACK = 'rolledback'
+
+    STATES = [STARTED, FINISHED, ROLLEDBACK]
+    END_STATES = [FINISHED, ROLLEDBACK]
+
+    context = Column(Dict)
+    created_at = Column(DateTime, nullable=False, index=True)
+    ended_at = Column(DateTime, index=True)
+    modified_node_templates = Column(Dict)
+    nodes = Column(Dict)
+    status = Column(Enum(*STATES, name='service_modification_status'))
+
+    @declared_attr
+    def service(cls):
+        return relationship.many_to_one(cls, 'service', parent_property='modifications')
+
+    # region foreign keys
+
+    @declared_attr
+    def service_fk(cls):
+        return relationship.foreign_key('service')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service', cls.name_column_name())
+
+    # endregion

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
new file mode 100644
index 0000000..dfe4674
--- /dev/null
+++ b/aria/modeling/service_common.py
@@ -0,0 +1,277 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=no-self-argument, no-member, abstract-method
+
+from sqlalchemy import (
+    Column,
+    Text,
+    PickleType
+)
+from sqlalchemy.ext.declarative import declared_attr
+
+from ..parser.consumption import ConsumptionContext
+from ..utils import collections, formatting, console
+from .mixins import InstanceModelMixin, TemplateModelMixin
+from .types import List
+from . import (
+    relationship,
+    utils
+)
+
+
+class ParameterBase(TemplateModelMixin):
+    """
+    Represents a typed value.
+
+    This model is used by both service template and service instance elements.
+
+    :ivar name: Name
+    :ivar type_name: Type name
+    :ivar value: Value
+    :ivar description: Description
+    """
+
+    __tablename__ = 'parameter'
+
+    name = Column(Text)
+    type_name = Column(Text)
+    value = Column(PickleType)
+    description = Column(Text)
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('type_name', self.type_name),
+            ('value', self.value),
+            ('description', self.description)))
+
+    def instantiate(self, container):
+        from . import models
+        return models.Parameter(name=self.name,
+                                type_name=self.type_name,
+                                value=self.value,
+                                description=self.description)
+
+    def coerce_values(self, container, report_issues):
+        if self.value is not None:
+            self.value = utils.coerce_value(container, self.value,
+                                            report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.type_name is not None:
+            console.puts('{0}: {1} ({2})'.format(
+                context.style.property(self.name),
+                context.style.literal(self.value),
+                context.style.type(self.type_name)))
+        else:
+            console.puts('{0}: {1}'.format(
+                context.style.property(self.name),
+                context.style.literal(self.value)))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+
+    @classmethod
+    def wrap(cls, name, value, description=None):
+        """
+        Wraps an arbitrary value as a parameter. The type will be guessed via introspection.
+
+        :param name: Parameter name
+        :type name: basestring
+        :param value: Parameter value
+        :param description: Description (optional)
+        :type description: basestring
+        """
+
+        from . import models
+        return models.Parameter(name=name,
+                                type_name=formatting.full_type_name(value),
+                                value=value,
+                                description=description)
+
+
+class TypeBase(InstanceModelMixin):
+    """
+    Represents a type and its children.
+    """
+
+    __tablename__ = 'type'
+
+    __private_fields__ = ['parent_type_fk']
+
+    variant = Column(Text, nullable=False)
+    description = Column(Text)
+    _role = Column(Text, name='role')
+
+    @declared_attr
+    def parent(cls):
+        return relationship.one_to_one_self(cls, 'parent_type_fk')
+
+    @declared_attr
+    def children(cls):
+        return relationship.one_to_many_self(cls, 'parent_type_fk')
+
+    # region foreign keys
+
+    @declared_attr
+    def parent_type_fk(cls):
+        """For Type one-to-many to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    # endregion
+
+    @property
+    def role(self):
+        def get_role(the_type):
+            if the_type is None:
+                return None
+            elif the_type._role is None:
+                return get_role(the_type.parent)
+            return the_type._role
+
+        return get_role(self)
+
+    @role.setter
+    def role(self, value):
+        self._role = value
+
+    def is_descendant(self, base_name, name):
+        base = self.get_descendant(base_name)
+        if base is not None:
+            if base.get_descendant(name) is not None:
+                return True
+        return False
+
+    def get_descendant(self, name):
+        if self.name == name:
+            return self
+        for child in self.children:
+            found = child.get_descendant(name)
+            if found is not None:
+                return found
+        return None
+
+    def iter_descendants(self):
+        for child in self.children:
+            yield child
+            for descendant in child.iter_descendants():
+                yield descendant
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('role', self.role)))
+
+    @property
+    def as_raw_all(self):
+        types = []
+        self._append_raw_children(types)
+        return types
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.name:
+            console.puts(context.style.type(self.name))
+        with context.style.indent:
+            for child in self.children:
+                child.dump()
+
+    def _append_raw_children(self, types):
+        for child in self.children:
+            raw_child = formatting.as_raw(child)
+            raw_child['parent'] = self.name
+            types.append(raw_child)
+            child._append_raw_children(types)
+
+
+class MetadataBase(TemplateModelMixin):
+    """
+    Custom values associated with the service.
+
+    This model is used by both service template and service instance elements.
+
+    :ivar name: Name
+    :ivar value: Value
+    """
+
+    __tablename__ = 'metadata'
+
+    value = Column(Text)
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('value', self.value)))
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def instantiate(self, container):
+        from . import models
+        return models.Metadata(name=self.name,
+                               value=self.value)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('{0}: {1}'.format(
+            context.style.property(self.name),
+            context.style.literal(self.value)))
+
+
+class PluginSpecificationBase(InstanceModelMixin):
+    """
+    Plugin specification model representation.
+    """
+
+    __tablename__ = 'plugin_specification'
+
+    __private_fields__ = ['service_template_fk']
+
+    archive_name = Column(Text, nullable=False, index=True)
+    distribution = Column(Text)
+    distribution_release = Column(Text)
+    distribution_version = Column(Text)
+    package_name = Column(Text, nullable=False, index=True)
+    package_source = Column(Text)
+    package_version = Column(Text)
+    supported_platform = Column(Text)
+    supported_py_versions = Column(List)
+
+    # region foreign keys
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to PluginSpecification"""
+        return relationship.foreign_key('service_template', nullable=True)
+
+    # endregion
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def find_plugin(self, plugins):
+        # TODO: this should check versions/distribution and other specification
+        for plugin in plugins:
+            if plugin.name == self.name:
+                return plugin
+        return None


[03/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
new file mode 100644
index 0000000..5266d79
--- /dev/null
+++ b/tests/modeling/test_models.py
@@ -0,0 +1,837 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from datetime import datetime
+from contextlib import contextmanager
+
+import pytest
+
+from aria import application_model_storage
+from aria.storage import (
+    sql_mapi,
+)
+from aria.storage.exceptions import StorageError
+from aria.modeling.exceptions import ValueFormatException
+from aria.modeling.models import (
+    ServiceTemplate,
+    Service,
+    ServiceUpdate,
+    ServiceUpdateStep,
+    ServiceModification,
+    Execution,
+    Task,
+    Plugin,
+    Relationship,
+    NodeTemplate,
+    Node,
+    Parameter
+)
+
+from tests import mock
+from ..storage import release_sqlite_storage, init_inmemory_model_storage
+
+
+@contextmanager
+def sql_storage(storage_func):
+    storage = None
+    try:
+        storage = storage_func()
+        yield storage
+    finally:
+        if storage:
+            release_sqlite_storage(storage)
+
+
+def _empty_storage():
+    return application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                     initiator=init_inmemory_model_storage)
+
+
+def _service_template_storage():
+    storage = _empty_storage()
+    service_template = mock.models.create_service_template()
+    storage.service_template.put(service_template)
+    return storage
+
+
+def _service_storage():
+    storage = _service_template_storage()
+    service = mock.models.create_service(
+        storage.service_template.get_by_name(mock.models.SERVICE_TEMPLATE_NAME))
+    storage.service.put(service)
+    return storage
+
+
+def _service_update_storage():
+    storage = _service_storage()
+    service_update = ServiceUpdate(
+        service=storage.service.list()[0],
+        created_at=now,
+        service_plan={},
+    )
+    storage.service_update.put(service_update)
+    return storage
+
+
+def _node_template_storage():
+    storage = _service_storage()
+    service_template = storage.service_template.list()[0]
+    dependency_node_template = mock.models.create_dependency_node_template(service_template)
+    mock.models.create_dependent_node_template(service_template, dependency_node_template)
+    storage.service_template.update(service_template)
+    return storage
+
+
+def _node_storage():
+    storage = _node_template_storage()
+    service = storage.service.get_by_name(mock.models.SERVICE_NAME)
+    dependency_node_template = storage.node_template.get_by_name(
+        mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
+    dependent_node_template = storage.node_template.get_by_name(
+        mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
+    mock.models.create_dependency_node(dependency_node_template, service)
+    mock.models.create_dependent_node(dependent_node_template, service)
+    storage.service.update(service)
+    return storage
+
+
+def _execution_storage():
+    storage = _service_storage()
+    execution = mock.models.create_execution(storage.service.list()[0])
+    plugin = mock.models.create_plugin()
+    storage.execution.put(execution)
+    storage.plugin.put(plugin)
+    return storage
+
+
+@pytest.fixture
+def empty_storage():
+    with sql_storage(_empty_storage) as storage:
+        yield storage
+
+
+@pytest.fixture
+def service_template_storage():
+    with sql_storage(_service_template_storage) as storage:
+        yield storage
+
+
+@pytest.fixture
+def service_storage():
+    with sql_storage(_service_storage) as storage:
+        yield storage
+
+
+@pytest.fixture
+def service_update_storage():
+    with sql_storage(_service_update_storage) as storage:
+        yield storage
+
+
+@pytest.fixture
+def node_template_storage():
+    with sql_storage(_node_template_storage) as storage:
+        yield storage
+
+
+@pytest.fixture
+def node_storage():
+    with sql_storage(_node_storage) as storage:
+        yield storage
+
+
+@pytest.fixture
+def execution_storage():
+    with sql_storage(_execution_storage) as storage:
+        yield storage
+
+
+m_cls = type('MockClass')
+now = datetime.utcnow()
+
+
+def _test_model(is_valid, storage, model_cls, model_kwargs):
+    if is_valid:
+        model = model_cls(**model_kwargs)
+        getattr(storage, model_cls.__modelname__).put(model)
+        return model
+    else:
+        with pytest.raises((ValueFormatException, StorageError, TypeError),):
+            getattr(storage, model_cls.__modelname__).put(model_cls(**model_kwargs))
+
+
+class TestServiceTemplate(object):
+
+    @pytest.mark.parametrize(
+        'is_valid, description, created_at, updated_at, main_file_name',
+        [
+            (False, {}, now, now, '/path'),
+            (False, 'description', 'error', now, '/path'),
+            (False, 'description', now, 'error', '/path'),
+            (False, 'description', now, now, {}),
+
+            (True, 'description', now, now, '/path'),
+        ]
+    )
+
+    def test_service_template_model_creation(self, empty_storage, is_valid, description, created_at,
+                                             updated_at, main_file_name):
+        _test_model(is_valid=is_valid,
+                    storage=empty_storage,
+                    model_cls=ServiceTemplate,
+                    model_kwargs=dict(
+                        description=description,
+                        created_at=created_at,
+                        updated_at=updated_at,
+                        main_file_name=main_file_name)
+                   )
+
+
+class TestService(object):
+
+    @pytest.mark.parametrize(
+        'is_valid, name, created_at, description, inputs, permalink, '
+        'outputs, scaling_groups, updated_at',
+        [
+            (False, m_cls, now, 'desc', {}, 'perlnk', {}, {}, now),
+            (False, 'name', m_cls, 'desc', {}, 'perlnk', {}, {}, now),
+            (False, 'name', now, m_cls, {}, 'perlnk', {}, {}, now),
+            (False, 'name', now, 'desc', {}, m_cls, {}, {}, now),
+            (False, 'name', now, 'desc', {}, 'perlnk', m_cls, {}, now),
+            (False, 'name', now, 'desc', {}, 'perlnk', {}, m_cls, now),
+            (False, 'name', now, 'desc', {}, 'perlnk', {}, {}, m_cls),
+
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, {}, now),
+            (True, None, now, 'desc', {}, 'perlnk', {}, {}, now),
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, {}, now),
+            (True, 'name', now, None, {}, 'perlnk', {}, {}, now),
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, {}, now),
+            (True, 'name', now, 'desc', {}, None, {}, {}, now),
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, {}, now),
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, None, now),
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, {}, None),
+            (True, 'name', now, 'desc', {}, 'perlnk', {}, {}, now),
+        ]
+    )
+    def test_service_model_creation(self, service_storage, is_valid, name, created_at, description,
+                                    inputs, permalink, outputs, scaling_groups, updated_at):
+        service = _test_model(
+            is_valid=is_valid,
+            storage=service_storage,
+            model_cls=Service,
+            model_kwargs=dict(
+                name=name,
+                service_template=service_storage.service_template.list()[0],
+                created_at=created_at,
+                description=description,
+                inputs=inputs,
+                permalink=permalink,
+                outputs=outputs,
+                scaling_groups=scaling_groups,
+                updated_at=updated_at
+            ))
+        if is_valid:
+            assert service.service_template == \
+                   service_storage.service_template.list()[0]
+
+
+class TestExecution(object):
+
+    @pytest.mark.parametrize(
+        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, '
+        'status, workflow_name',
+        [
+            (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
+            (False, now, m_cls, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
+            (False, now, now, m_cls, 'error', False, {}, Execution.STARTED, 'wf_name'),
+            (False, now, now, now, m_cls, False, {}, Execution.STARTED, 'wf_name'),
+            (False, now, now, now, 'error', False, m_cls, Execution.STARTED, 'wf_name'),
+            (False, now, now, now, 'error', False, {}, m_cls, 'wf_name'),
+            (False, now, now, now, 'error', False, {}, Execution.STARTED, m_cls),
+
+            (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
+            (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
+            (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'),
+            (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'),
+            (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
+        ]
+    )
+    def test_execution_model_creation(self, service_storage, is_valid, created_at, started_at,
+                                      ended_at, error, is_system_workflow, parameters, status,
+                                      workflow_name):
+        execution = _test_model(
+            is_valid=is_valid,
+            storage=service_storage,
+            model_cls=Execution,
+            model_kwargs=dict(
+                service=service_storage.service.list()[0],
+                created_at=created_at,
+                started_at=started_at,
+                ended_at=ended_at,
+                error=error,
+                is_system_workflow=is_system_workflow,
+                parameters=parameters,
+                status=status,
+                workflow_name=workflow_name,
+            ))
+        if is_valid:
+            assert execution.service == service_storage.service.list()[0]
+            assert execution.service_template == service_storage.service_template.list()[0]
+
+    def test_execution_status_transition(self):
+        def create_execution(status):
+            execution = Execution(
+                id='e_id',
+                workflow_name='w_name',
+                status=status,
+                parameters={},
+                created_at=now,
+            )
+            return execution
+
+        valid_transitions = {
+            Execution.PENDING: [Execution.STARTED,
+                                Execution.CANCELLED,
+                                Execution.PENDING],
+            Execution.STARTED: [Execution.FAILED,
+                                Execution.TERMINATED,
+                                Execution.CANCELLED,
+                                Execution.CANCELLING,
+                                Execution.STARTED],
+            Execution.CANCELLING: [Execution.FAILED,
+                                   Execution.TERMINATED,
+                                   Execution.CANCELLED,
+                                   Execution.CANCELLING],
+            Execution.FAILED: [Execution.FAILED],
+            Execution.TERMINATED: [Execution.TERMINATED],
+            Execution.CANCELLED: [Execution.CANCELLED]
+        }
+
+        invalid_transitions = {
+            Execution.PENDING: [Execution.FAILED,
+                                Execution.TERMINATED,
+                                Execution.CANCELLING],
+            Execution.STARTED: [Execution.PENDING],
+            Execution.CANCELLING: [Execution.PENDING,
+                                   Execution.STARTED],
+            Execution.FAILED: [Execution.PENDING,
+                               Execution.STARTED,
+                               Execution.TERMINATED,
+                               Execution.CANCELLED,
+                               Execution.CANCELLING],
+            Execution.TERMINATED: [Execution.PENDING,
+                                   Execution.STARTED,
+                                   Execution.FAILED,
+                                   Execution.CANCELLED,
+                                   Execution.CANCELLING],
+            Execution.CANCELLED: [Execution.PENDING,
+                                  Execution.STARTED,
+                                  Execution.FAILED,
+                                  Execution.TERMINATED,
+                                  Execution.CANCELLING],
+        }
+
+        for current_status, valid_transitioned_statues in valid_transitions.items():
+            for transitioned_status in valid_transitioned_statues:
+                execution = create_execution(current_status)
+                execution.status = transitioned_status
+
+        for current_status, invalid_transitioned_statues in invalid_transitions.items():
+            for transitioned_status in invalid_transitioned_statues:
+                execution = create_execution(current_status)
+                with pytest.raises(ValueError):
+                    execution.status = transitioned_status
+
+
+class TestServiceUpdate(object):
+    @pytest.mark.parametrize(
+        'is_valid, created_at, service_plan, service_update_nodes, '
+        'service_update_service, service_update_node_templates, '
+        'modified_entity_ids, state',
+        [
+            (False, m_cls, {}, {}, {}, [], {}, 'state'),
+            (False, now, m_cls, {}, {}, [], {}, 'state'),
+            (False, now, {}, m_cls, {}, [], {}, 'state'),
+            (False, now, {}, {}, m_cls, [], {}, 'state'),
+            (False, now, {}, {}, {}, m_cls, {}, 'state'),
+            (False, now, {}, {}, {}, [], m_cls, 'state'),
+            (False, now, {}, {}, {}, [], {}, m_cls),
+
+            (True, now, {}, {}, {}, [], {}, 'state'),
+            (True, now, {}, None, {}, [], {}, 'state'),
+            (True, now, {}, {}, None, [], {}, 'state'),
+            (True, now, {}, {}, {}, None, {}, 'state'),
+            (True, now, {}, {}, {}, [], None, 'state'),
+            (True, now, {}, {}, {}, [], {}, None),
+        ]
+    )
+    def test_service_update_model_creation(self, service_storage, is_valid, created_at,
+                                           service_plan, service_update_nodes,
+                                           service_update_service, service_update_node_templates,
+                                           modified_entity_ids, state):
+        service_update = _test_model(
+            is_valid=is_valid,
+            storage=service_storage,
+            model_cls=ServiceUpdate,
+            model_kwargs=dict(
+                service=service_storage.service.list()[0],
+                created_at=created_at,
+                service_plan=service_plan,
+                service_update_nodes=service_update_nodes,
+                service_update_service=service_update_service,
+                service_update_node_templates=service_update_node_templates,
+                modified_entity_ids=modified_entity_ids,
+                state=state
+            ))
+        if is_valid:
+            assert service_update.service == \
+                   service_storage.service.list()[0]
+
+
+class TestServiceUpdateStep(object):
+
+    @pytest.mark.parametrize(
+        'is_valid, action, entity_id, entity_type',
+        [
+            (False, m_cls, 'id', ServiceUpdateStep.ENTITY_TYPES.NODE),
+            (False, ServiceUpdateStep.ACTION_TYPES.ADD, m_cls,
+             ServiceUpdateStep.ENTITY_TYPES.NODE),
+            (False, ServiceUpdateStep.ACTION_TYPES.ADD, 'id', m_cls),
+
+            (True, ServiceUpdateStep.ACTION_TYPES.ADD, 'id',
+             ServiceUpdateStep.ENTITY_TYPES.NODE)
+        ]
+    )
+    def test_service_update_step_model_creation(self, service_update_storage, is_valid, action,
+                                                entity_id, entity_type):
+        service_update_step = _test_model(
+            is_valid=is_valid,
+            storage=service_update_storage,
+            model_cls=ServiceUpdateStep,
+            model_kwargs=dict(
+                service_update=
+                service_update_storage.service_update.list()[0],
+                action=action,
+                entity_id=entity_id,
+                entity_type=entity_type
+            ))
+        if is_valid:
+            assert service_update_step.service_update == \
+                   service_update_storage.service_update.list()[0]
+
+    def test_service_update_step_order(self):
+        add_node = ServiceUpdateStep(
+            id='add_step',
+            action='add',
+            entity_type='node',
+            entity_id='node_id')
+
+        modify_node = ServiceUpdateStep(
+            id='modify_step',
+            action='modify',
+            entity_type='node',
+            entity_id='node_id')
+
+        remove_node = ServiceUpdateStep(
+            id='remove_step',
+            action='remove',
+            entity_type='node',
+            entity_id='node_id')
+
+        for step in (add_node, modify_node, remove_node):
+            assert hash((step.id, step.entity_id)) == hash(step)
+
+        assert remove_node < modify_node < add_node
+        assert not remove_node > modify_node > add_node
+
+        add_rel = ServiceUpdateStep(
+            id='add_step',
+            action='add',
+            entity_type='relationship',
+            entity_id='relationship_id')
+
+        remove_rel = ServiceUpdateStep(
+            id='remove_step',
+            action='remove',
+            entity_type='relationship',
+            entity_id='relationship_id')
+
+        assert remove_rel < remove_node < add_node < add_rel
+        assert not add_node < None
+
+
+class TestServiceModification(object):
+    @pytest.mark.parametrize(
+        'is_valid, context, created_at, ended_at, modified_node_templates, nodes, status',
+        [
+            (False, m_cls, now, now, {}, {}, ServiceModification.STARTED),
+            (False, {}, m_cls, now, {}, {}, ServiceModification.STARTED),
+            (False, {}, now, m_cls, {}, {}, ServiceModification.STARTED),
+            (False, {}, now, now, m_cls, {}, ServiceModification.STARTED),
+            (False, {}, now, now, {}, m_cls, ServiceModification.STARTED),
+            (False, {}, now, now, {}, {}, m_cls),
+
+            (True, {}, now, now, {}, {}, ServiceModification.STARTED),
+            (True, {}, now, None, {}, {}, ServiceModification.STARTED),
+            (True, {}, now, now, None, {}, ServiceModification.STARTED),
+            (True, {}, now, now, {}, None, ServiceModification.STARTED),
+        ]
+    )
+    def test_service_modification_model_creation(self, service_storage, is_valid, context,
+                                                 created_at, ended_at, modified_node_templates,
+                                                 nodes, status):
+        service_modification = _test_model(
+            is_valid=is_valid,
+            storage=service_storage,
+            model_cls=ServiceModification,
+            model_kwargs=dict(
+                service=service_storage.service.list()[0],
+                context=context,
+                created_at=created_at,
+                ended_at=ended_at,
+                modified_node_templates=modified_node_templates,
+                nodes=nodes,
+                status=status,
+            ))
+        if is_valid:
+            assert service_modification.service == \
+                   service_storage.service.list()[0]
+
+
+class TestNodeTemplate(object):
+    @pytest.mark.parametrize(
+        'is_valid, name, default_instances, max_instances, min_instances, plugin_specifications, '
+        'properties',
+        [
+            (False, m_cls, 1, 1, 1, {}, {}),
+            (False, 'name', m_cls, 1, 1, {}, {}),
+            (False, 'name', 1, m_cls, 1, {}, {}),
+            (False, 'name', 1, 1, m_cls, {}, {}),
+            (False, 'name', 1, 1, 1, m_cls, {}),
+            (False, 'name', 1, 1, 1, None, {}),
+
+            (True, 'name', 1, 1, 1, {}, {}),
+        ]
+    )
+    def test_node_template_model_creation(self, service_storage, is_valid, name, default_instances,
+                                          max_instances, min_instances, plugin_specifications,
+                                          properties):
+        node_template = _test_model(
+            is_valid=is_valid,
+            storage=service_storage,
+            model_cls=NodeTemplate,
+            model_kwargs=dict(
+                name=name,
+                type=service_storage.type.list()[0],
+                default_instances=default_instances,
+                max_instances=max_instances,
+                min_instances=min_instances,
+                plugin_specifications=plugin_specifications,
+                properties=properties,
+                service_template=service_storage.service_template.list()[0]
+            ))
+        if is_valid:
+            assert node_template.service_template == \
+                   service_storage.service_template.list()[0]
+
+
+class TestNode(object):
+    @pytest.mark.parametrize(
+        'is_valid, name, runtime_properties, scaling_groups, state, version',
+        [
+            (False, m_cls, {}, [], 'state', 1),
+            (False, 'name', m_cls, [], 'state', 1),
+            (False, 'name', {}, m_cls, 'state', 1),
+            (False, 'name', {}, [], m_cls, 1),
+            (False, m_cls, {}, [], 'state', m_cls),
+
+            (True, 'name', {}, [], 'state', 1),
+            (True, None, {}, [], 'state', 1),
+            (True, 'name', None, [], 'state', 1),
+            (True, 'name', {}, None, 'state', 1),
+            (True, 'name', {}, [], 'state', None),
+        ]
+    )
+    def test_node_model_creation(self, node_template_storage, is_valid, name, runtime_properties,
+                                 scaling_groups, state, version):
+        node = _test_model(
+            is_valid=is_valid,
+            storage=node_template_storage,
+            model_cls=Node,
+            model_kwargs=dict(
+                node_template=node_template_storage.node_template.list()[0],
+                type=node_template_storage.type.list()[0],
+                name=name,
+                runtime_properties=runtime_properties,
+                scaling_groups=scaling_groups,
+                state=state,
+                version=version,
+                service=node_template_storage.service.list()[0]
+            ))
+        if is_valid:
+            assert node.node_template == node_template_storage.node_template.list()[0]
+            assert node.service == \
+                   node_template_storage.service.list()[0]
+
+
+class TestNodeIP(object):
+
+    ip = '1.1.1.1'
+
+    def test_ip_on_none_hosted_node(self, service_storage):
+        node_template = self._node_template(service_storage, ip='not considered')
+        node = self._node(service_storage,
+                          node_template,
+                          is_host=False,
+                          ip='not considered')
+        assert node.ip is None
+
+    def test_property_ip_on_host_node(self, service_storage):
+        node_template = self._node_template(service_storage, ip=self.ip)
+        node = self._node(service_storage, node_template, is_host=True, ip=None)
+        assert node.ip == self.ip
+
+    def test_runtime_property_ip_on_host_node(self, service_storage):
+        node_template = self._node_template(service_storage, ip='not considered')
+        node = self._node(service_storage, node_template, is_host=True, ip=self.ip)
+        assert node.ip == self.ip
+
+    def test_no_ip_configured_on_host_node(self, service_storage):
+        node_template = self._node_template(service_storage, ip=None)
+        node = self._node(service_storage, node_template, is_host=True, ip=None)
+        assert node.ip is None
+
+    def test_runtime_property_on_hosted_node(self, service_storage):
+        host_node_template = self._node_template(service_storage, ip=None)
+        host_node = self._node(service_storage,
+                               host_node_template,
+                               is_host=True,
+                               ip=self.ip)
+        node_template = self._node_template(service_storage, ip=None)
+        node = self._node(service_storage,
+                          node_template,
+                          is_host=False,
+                          ip=None,
+                          host_fk=host_node.id)
+        assert node.ip == self.ip
+
+    def _node_template(self, storage, ip):
+        kwargs = dict(
+            name='node_template',
+            type=storage.type.list()[0],
+            default_instances=1,
+            max_instances=1,
+            min_instances=1,
+            service_template=storage.service_template.list()[0]
+        )
+        if ip:
+            kwargs['properties'] = {'ip': Parameter.wrap('ip', ip)}
+        node = NodeTemplate(**kwargs)
+        storage.node_template.put(node)
+        return node
+
+    def _node(self, storage, node, is_host, ip, host_fk=None):
+        kwargs = dict(
+            name='node',
+            node_template=node,
+            type=storage.type.list()[0],
+            runtime_properties={},
+            state='',
+            service=storage.service.list()[0]
+        )
+        if ip:
+            kwargs['runtime_properties']['ip'] = ip
+        if is_host:
+            kwargs['host_fk'] = 1
+        elif host_fk:
+            kwargs['host_fk'] = host_fk
+        node = Node(**kwargs)
+        storage.node.put(node)
+        return node
+
+
+class TestRelationship(object):
+    @pytest.mark.parametrize(
+        'is_valid, source_position, target_position',
+        [
+            (False, m_cls, 0),
+            (False, 0, m_cls),
+
+            (True, 0, 0),
+            (True, None, 0),
+            (True, 0, None),
+        ]
+    )
+    def test_relationship_model_creation(self, node_storage, is_valid, source_position,
+                                         target_position):
+        nodes = node_storage.node
+        source_node = nodes.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+        target_node = nodes.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+        _test_model(is_valid=is_valid,
+                    storage=node_storage,
+                    model_cls=Relationship,
+                    model_kwargs=dict(
+                        source_node=source_node,
+                        target_node=target_node,
+                        source_position=source_position,
+                        target_position=target_position
+                    ))
+
+
+class TestPlugin(object):
+    @pytest.mark.parametrize(
+        'is_valid, archive_name, distribution, distribution_release, '
+        'distribution_version, package_name, package_source, '
+        'package_version, supported_platform, supported_py_versions, uploaded_at, wheels',
+        [
+            (False, m_cls, 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (False, 'arc_name', m_cls, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (False, 'arc_name', 'dis_name', m_cls, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', m_cls, 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', m_cls, 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', m_cls, 'pak_ver',
+             'sup_plat', [], now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', m_cls,
+             'sup_plat', [], now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', m_cls, [], now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', 'sup_plat', m_cls, now, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', 'sup_plat', [], m_cls, []),
+            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', 'sup_plat', [], now, m_cls),
+
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (True, 'arc_name', None, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (True, 'arc_name', 'dis_name', None, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', None, 'pak_name', 'pak_src', 'pak_ver',
+             'sup_plat', [], now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', 'sup_plat', [], now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', None, 'pak_ver',
+             'sup_plat', [], now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', None,
+             'sup_plat', [], now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', None, [], now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', 'sup_plat', None, now, []),
+            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
+             'pak_ver', 'sup_plat', [], now, []),
+        ]
+    )
+    def test_plugin_model_creation(self, empty_storage, is_valid, archive_name, distribution,
+                                   distribution_release, distribution_version, package_name,
+                                   package_source, package_version, supported_platform,
+                                   supported_py_versions, uploaded_at, wheels):
+        _test_model(is_valid=is_valid,
+                    storage=empty_storage,
+                    model_cls=Plugin,
+                    model_kwargs=dict(
+                        archive_name=archive_name,
+                        distribution=distribution,
+                        distribution_release=distribution_release,
+                        distribution_version=distribution_version,
+                        package_name=package_name,
+                        package_source=package_source,
+                        package_version=package_version,
+                        supported_platform=supported_platform,
+                        supported_py_versions=supported_py_versions,
+                        uploaded_at=uploaded_at,
+                        wheels=wheels,
+                    ))
+
+
+class TestTask(object):
+
+    @pytest.mark.parametrize(
+        'is_valid, status, due_at, started_at, ended_at, max_attempts, retry_count, '
+        'retry_interval, ignore_failure, name, operation_mapping, inputs, plugin_id',
+        [
+            (False, m_cls, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, m_cls, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, now, m_cls, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, now, now, m_cls, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, now, now, now, m_cls, 1, 1, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, now, now, now, 1, m_cls, 1, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, now, now, now, 1, 1, m_cls, True, 'name', 'map', {}, '1'),
+            (False, Task.STARTED, now, now, now, 1, 1, 1, True, m_cls, 'map', {}, '1'),
+            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', m_cls, {}, '1'),
+            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', m_cls, '1'),
+            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, m_cls),
+            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', None, '1'),
+
+            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, None, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, now, None, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, now, now, None, 1, 1, 1, True, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, now, now, now, 1, None, 1, True, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, now, now, now, 1, 1, None, True, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, now, now, now, 1, 1, 1, None, 'name', 'map', {}, '1'),
+            (True, Task.STARTED, now, now, now, 1, 1, 1, True, None, 'map', {}, '1'),
+            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', None, {}, '1'),
+            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, None),
+        ]
+    )
+    def test_task_model_creation(self, execution_storage, is_valid, status, due_at, started_at,
+                                 ended_at, max_attempts, retry_count, retry_interval,
+                                 ignore_failure, name, operation_mapping, inputs, plugin_id):
+        task = _test_model(
+            is_valid=is_valid,
+            storage=execution_storage,
+            model_cls=Task,
+            model_kwargs=dict(
+                status=status,
+                execution=execution_storage.execution.list()[0],
+                due_at=due_at,
+                started_at=started_at,
+                ended_at=ended_at,
+                max_attempts=max_attempts,
+                retry_count=retry_count,
+                retry_interval=retry_interval,
+                ignore_failure=ignore_failure,
+                name=name,
+                implementation=operation_mapping,
+                inputs=inputs,
+                plugin_fk=plugin_id,
+            ))
+        if is_valid:
+            assert task.execution == execution_storage.execution.list()[0]
+            if task.plugin:
+                assert task.plugin == execution_storage.plugin.list()[0]
+
+    def test_task_max_attempts_validation(self):
+        def create_task(max_attempts):
+            Task(execution_fk='eid',
+                 name='name',
+                 implementation='',
+                 inputs={},
+                 max_attempts=max_attempts)
+        create_task(max_attempts=1)
+        create_task(max_attempts=2)
+        create_task(max_attempts=Task.INFINITE_RETRIES)
+        with pytest.raises(ValueError):
+            create_task(max_attempts=0)
+        with pytest.raises(ValueError):
+            create_task(max_attempts=-2)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/context/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/__init__.py b/tests/orchestrator/context/__init__.py
index ea0fea9..4fde0a7 100644
--- a/tests/orchestrator/context/__init__.py
+++ b/tests/orchestrator/context/__init__.py
@@ -23,10 +23,6 @@ def op_path(func, module_path=None):
     return '{0}.{1}'.format(module_path, func.__name__)
 
 
-def op_name(actor, operation_name):
-    return '{name}.{actor.id}'.format(name=operation_name, actor=actor)
-
-
 def execute(workflow_func, workflow_context, executor):
     graph = workflow_func(ctx=workflow_context)
     eng = engine.Engine(executor=executor, workflow_context=workflow_context, tasks_graph=graph)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index b49b1cb..05c9656 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -31,7 +31,6 @@ import tests
 from tests import mock, storage
 from . import (
     op_path,
-    op_name,
     execute,
 )
 
@@ -67,40 +66,54 @@ def thread_executor():
 
 
 def test_node_operation_task_execution(ctx, thread_executor):
-    operation_name = 'aria.interfaces.lifecycle.create'
+    interface_name = 'Standard'
+    operation_name = 'create'
 
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    interface = mock.models.get_interface(
+    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
         operation_name,
         operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__))
     )
-    node.interfaces = [interface]
+    node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
     inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.node(
-                name=operation_name,
-                instance=node,
+            api.task.OperationTask.for_node(
+                interface_name=interface_name,
+                operation_name=operation_name,
+                node=node,
                 inputs=inputs
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    operation_context = global_test_holder[op_name(node, operation_name)]
+    operation_context = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
+        type='node',
+        name=node.name,
+        interface=interface_name,
+        operation=operation_name
+    )]
 
     assert isinstance(operation_context, context.operation.NodeOperationContext)
 
     # Task bases assertions
     assert operation_context.task.actor == node
-    assert operation_context.task.name == op_name(node, operation_name)
-    operations = interface.operations.filter_by(name=operation_name)                                # pylint: disable=no-member
-    assert operations.count() == 1
-    assert operation_context.task.implementation == operations[0].implementation
-    assert operation_context.task.inputs == inputs
+    assert operation_context.task.name == api.task.OperationTask.NAME_FORMAT.format(
+        type='node',
+        name=node.name,
+        interface=interface_name,
+        operation=operation_name
+    )
+    operations = interface.operations
+    assert len(operations) == 1
+    assert operation_context.task.implementation == operations.values()[0].implementation           # pylint: disable=no-member
+    assert operation_context.task.inputs['putput'].value is True
 
     # Context based attributes (sugaring)
     assert operation_context.node_template == node.node_template
@@ -108,49 +121,57 @@ def test_node_operation_task_execution(ctx, thread_executor):
 
 
 def test_relationship_operation_task_execution(ctx, thread_executor):
-    operation_name = 'aria.interfaces.relationship_lifecycle.post_configure'
-    relationship = ctx.model.relationship.list()[0]
+    interface_name = 'Configure'
+    operation_name = 'post_configure'
 
-    interface = mock.models.get_interface(
-        operation_name=operation_name,
+    relationship = ctx.model.relationship.list()[0]
+    interface = mock.models.create_interface(
+        relationship.source_node.service,
+        interface_name,
+        operation_name,
         operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)),
-        edge='source'
     )
 
-    relationship.interfaces = [interface]
+    relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
     inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.relationship(
-                instance=relationship,
-                name=operation_name,
-                inputs=inputs,
-                edge='source'
+            api.task.OperationTask.for_relationship(
+                relationship=relationship,
+                interface_name=interface_name,
+                operation_name=operation_name,
+                inputs=inputs
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    operation_context = global_test_holder[op_name(relationship,
-                                                   operation_name)]
+    operation_context = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
+        type='relationship',
+        name=relationship.name,
+        interface=interface_name,
+        operation=operation_name
+    )]
 
     assert isinstance(operation_context, context.operation.RelationshipOperationContext)
 
     # Task bases assertions
     assert operation_context.task.actor == relationship
-    assert operation_context.task.name.startswith(operation_name)
-    operation = interface.operations.filter_by(name=operation_name)                                 # pylint: disable=no-member
-    assert operation_context.task.implementation == operation.all()[0].implementation
-    assert operation_context.task.inputs == inputs
+    assert interface_name in operation_context.task.name
+    operations = interface.operations
+    assert operation_context.task.implementation == operations.values()[0].implementation           # pylint: disable=no-member
+    assert operation_context.task.inputs['putput'].value is True
 
     # Context based attributes (sugaring)
-    dependency_node_template = ctx.model.node_template.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependent_node_template = ctx.model.node_template.get_by_name(mock.models.DEPENDENT_NODE_NAME)
-    dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+    dependency_node_template = ctx.model.node_template.get_by_name(
+        mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
+    dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    dependent_node_template = ctx.model.node_template.get_by_name(
+        mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
+    dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
 
     assert operation_context.target_node_template == dependency_node_template
     assert operation_context.target_node == dependency_node
@@ -162,47 +183,65 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
 def test_invalid_task_operation_id(ctx, thread_executor):
     """
     Checks that the right id is used. The task created with id == 1, thus running the task on
-    node_instance with id == 2. will check that indeed the node_instance uses the correct id.
+    node with id == 2. will check that indeed the node uses the correct id.
     :param ctx:
     :param thread_executor:
     :return:
     """
-    operation_name = 'aria.interfaces.lifecycle.create'
+    interface_name = 'Standard'
+    operation_name = 'create'
+
     other_node, node = ctx.model.node.list()
     assert other_node.id == 1
     assert node.id == 2
 
-    interface = mock.models.get_interface(
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(get_node_instance_id, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__))
     )
-    node.interfaces = [interface]
+    node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.node(name=operation_name, instance=node)
+            api.task.OperationTask.for_node(node=node,
+                                            interface_name=interface_name,
+                                            operation_name=operation_name)
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    op_node_instance_id = global_test_holder[op_name(node, operation_name)]
-    assert op_node_instance_id == node.id
-    assert op_node_instance_id != other_node.id
+    op_node_id = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
+        type='node',
+        name=node.name,
+        interface=interface_name,
+        operation=operation_name
+    )]
+    assert op_node_id == node.id
+    assert op_node_id != other_node.id
 
 
 def test_plugin_workdir(ctx, thread_executor, tmpdir):
-    op = 'test.op'
-    plugin_name = 'mock_plugin'
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    node.interfaces = [mock.models.get_interface(
-        op,
+    interface_name = 'Standard'
+    operation_name = 'create'
+
+    plugin = mock.models.create_plugin()
+    ctx.model.plugin.put(plugin)
+    plugin_specification = mock.models.create_plugin_specification()
+    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
+        operation_name,
         operation_kwargs=dict(
             implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
-            plugin=plugin_name)
-    )]
-    node.plugins = [{'name': plugin_name}]
+            plugin_specification=plugin_specification)
+    )
+    node.interfaces[interface.name] = interface
+    node.plugin_specifications[plugin_specification.name] = plugin_specification
     ctx.model.node.update(node)
 
     filename = 'test_file'
@@ -211,12 +250,14 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
 
     @workflow
     def basic_workflow(graph, **_):
-        graph.add_tasks(api.task.OperationTask.node(
-            name=op, instance=node, inputs=inputs))
+        graph.add_tasks(api.task.OperationTask.for_node(node=node,
+                                                        interface_name=interface_name,
+                                                        operation_name=operation_name,
+                                                        inputs=inputs))
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
-    expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service_instance.id),
-                                plugin_name,
+    expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service.id),
+                                plugin.name,
                                 filename)
     assert expected_file.read() == content
 
@@ -235,14 +276,17 @@ def executor(request):
 
 
 def test_node_operation_logging(ctx, executor):
-    operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
+    interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
 
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    interface = mock.models.get_interface(
+    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface = mock.models.create_interface(
+        node.service,
+        interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__))
+        operation_kwargs=dict(
+            implementation=op_path(logged_operation, module_path=__name__))
     )
-    node.interfaces = [interface]
+    node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
 
     inputs = {
@@ -253,9 +297,10 @@ def test_node_operation_logging(ctx, executor):
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.node(
-                name=operation_name,
-                instance=node,
+            api.task.OperationTask.for_node(
+                interface_name=interface_name,
+                operation_name=operation_name,
+                node=node,
                 inputs=inputs
             )
         )
@@ -265,14 +310,16 @@ def test_node_operation_logging(ctx, executor):
 
 
 def test_relationship_operation_logging(ctx, executor):
-    operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0].rsplit('_', 1)[0]
+    interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0]
 
     relationship = ctx.model.relationship.list()[0]
-    relationship.interfaces = [mock.models.get_interface(
+    interface = mock.models.create_interface(
+        relationship.source_node.service,
+        interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__)),
-        edge='source'
-    )]
+        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__))
+    )
+    relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
 
     inputs = {
@@ -283,11 +330,11 @@ def test_relationship_operation_logging(ctx, executor):
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.relationship(
-                name=operation_name,
-                instance=relationship,
-                inputs=inputs,
-                edge='source',
+            api.task.OperationTask.for_relationship(
+                interface_name=interface_name,
+                operation_name=operation_name,
+                relationship=relationship,
+                inputs=inputs
             )
         )
 
@@ -316,11 +363,13 @@ def _assert_loggins(ctx, inputs):
     assert all(l.execution == execution for l in logs)
     assert all(l in logs and l.task == task for l in task.logs)
 
-    op_start_log = [l for l in logs if inputs['op_start'] in l.msg and l.level.lower() == 'info']
+    op_start_log = [l for l in logs if
+                    inputs['op_start'].value in l.msg and l.level.lower() == 'info']
     assert len(op_start_log) == 1
     op_start_log = op_start_log[0]
 
-    op_end_log = [l for l in logs if inputs['op_end'] in l.msg and l.level.lower() == 'debug']
+    op_end_log = [l for l in logs
+                  if inputs['op_end'].value in l.msg and l.level.lower() == 'debug']
     assert len(op_end_log) == 1
     op_end_log = op_end_log[0]
 
@@ -341,7 +390,7 @@ def basic_operation(ctx, **_):
 
 
 @operation
-def get_node_instance_id(ctx, **_):
+def get_node_id(ctx, **_):
     global_test_holder[ctx.name] = ctx.node.id
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/context/test_resource_render.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py
index aef439c..696e9b3 100644
--- a/tests/orchestrator/context/test_resource_render.py
+++ b/tests/orchestrator/context/test_resource_render.py
@@ -17,7 +17,7 @@ import pytest
 
 from tests import mock, storage
 
-_IMPLICIT_CTX_TEMPLATE = '{{ctx.service_instance.name}}'
+_IMPLICIT_CTX_TEMPLATE = '{{ctx.service.name}}'
 _IMPLICIT_CTX_TEMPLATE_PATH = 'implicit-ctx.template'
 _VARIABLES_TEMPLATE = '{{variable}}'
 _VARIABLES_TEMPLATE_PATH = 'variables.template'
@@ -25,7 +25,7 @@ _VARIABLES_TEMPLATE_PATH = 'variables.template'
 
 def test_get_resource_and_render_implicit_ctx_no_variables(ctx):
     content = ctx.get_resource_and_render(_IMPLICIT_CTX_TEMPLATE_PATH)
-    assert content == mock.models.DEPLOYMENT_NAME
+    assert content == mock.models.SERVICE_NAME
 
 
 def test_get_resource_and_render_provided_variables(ctx):
@@ -39,7 +39,7 @@ def test_download_resource_and_render_implicit_ctx_no_variables(tmpdir, ctx):
     destination = tmpdir.join('destination')
     ctx.download_resource_and_render(destination=str(destination),
                                      path=_IMPLICIT_CTX_TEMPLATE_PATH)
-    assert destination.read() == mock.models.DEPLOYMENT_NAME
+    assert destination.read() == mock.models.SERVICE_NAME
 
 
 def test_download_resource_and_render_provided_variables(tmpdir, ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 03f9529..9a1250e 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -42,14 +42,20 @@ def test_serialize_operation_context(context, executor, tmpdir):
 
 @workflow
 def _mock_workflow(ctx, graph):
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    plugin_name = 'mock_plugin'
-    node.interfaces = [mock.models.get_interface(
-        'test.op',
-        operation_kwargs=dict(implementation=_operation_mapping(), plugin=plugin_name)
-    )]
-    node.plugins = [{'name': plugin_name}]
-    task = api.task.OperationTask.node(instance=node, name='test.op')
+    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    plugin = mock.models.create_plugin()
+    ctx.model.plugin.put(plugin)
+    plugin_specification = mock.models.create_plugin_specification()
+    interface = mock.models.create_interface(
+        node.service,
+        'test',
+        'op',
+        operation_kwargs=dict(implementation=_operation_mapping(),
+                              plugin_specification=plugin_specification)
+    )
+    node.interfaces[interface.name] = interface
+    node.plugin_specifications[plugin_specification.name] = plugin_specification
+    task = api.task.OperationTask.for_node(node=node, interface_name='test', operation_name='op')
     graph.add_tasks(task)
     return graph
 
@@ -61,12 +67,12 @@ def _mock_operation(ctx):
     # a correct ctx.task.implementation tells us we kept the correct task_id
     assert ctx.task.implementation == _operation_mapping()
     # a correct ctx.node.name tells us we kept the correct actor_id
-    assert ctx.node.name == mock.models.DEPENDENCY_NODE_INSTANCE_NAME
+    assert ctx.node.name == mock.models.DEPENDENCY_NODE_NAME
     # a correct ctx.name tells us we kept the correct name
     assert ctx.name is not None
     assert ctx.name == ctx.task.name
     # a correct ctx.deployment.name tells us we kept the correct deployment_id
-    assert ctx.service_instance.name == mock.models.DEPLOYMENT_NAME
+    assert ctx.service.name == mock.models.SERVICE_NAME
     # Here we test that the resource storage was properly re-created
     test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME)
     assert test_file_content == TEST_FILE_CONTENT

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 28bd3d3..cf82127 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -24,7 +24,6 @@ from aria.orchestrator.context.toolbelt import RelationshipToolBelt
 from tests import mock, storage
 from . import (
     op_path,
-    op_name,
     execute,
 )
 
@@ -49,22 +48,22 @@ def executor():
 
 def _get_elements(workflow_context):
     dependency_node_template = workflow_context.model.node_template.get_by_name(
-        mock.models.DEPENDENCY_NODE_NAME)
+        mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
     dependency_node_template.host = dependency_node_template
     workflow_context.model.node.update(dependency_node_template)
 
     dependency_node = workflow_context.model.node.get_by_name(
-        mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        mock.models.DEPENDENCY_NODE_NAME)
     dependency_node.host_fk = dependency_node.id
     workflow_context.model.node.update(dependency_node)
 
     dependent_node_template = workflow_context.model.node_template.get_by_name(
-        mock.models.DEPENDENT_NODE_NAME)
+        mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
     dependent_node_template.host = dependency_node_template
     workflow_context.model.node_template.update(dependent_node_template)
 
     dependent_node = workflow_context.model.node.get_by_name(
-        mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+        mock.models.DEPENDENT_NODE_NAME)
     dependent_node.host = dependent_node
     workflow_context.model.node.update(dependent_node)
 
@@ -74,21 +73,26 @@ def _get_elements(workflow_context):
 
 
 def test_host_ip(workflow_context, executor):
-    operation_name = 'aria.interfaces.lifecycle.create'
+    interface_name = 'Standard'
+    operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
-    dependency_node.interfaces = [mock.models.get_interface(
-        operation_name,
+    interface = mock.models.create_interface(
+        dependency_node.service,
+        interface_name=interface_name,
+        operation_name=operation_name,
         operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__))
-    )]
+    )
+    dependency_node.interfaces[interface.name] = interface
     workflow_context.model.node.update(dependency_node)
     inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.node(
-                instance=dependency_node,
-                name=operation_name,
+            api.task.OperationTask.for_node(
+                node=dependency_node,
+                interface_name=interface_name,
+                operation_name=operation_name,
                 inputs=inputs
             )
         )
@@ -99,15 +103,16 @@ def test_host_ip(workflow_context, executor):
 
 
 def test_relationship_tool_belt(workflow_context, executor):
-    operation_name = 'aria.interfaces.relationship_lifecycle.post_configure'
+    interface_name = 'Configure'
+    operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
-    relationship.interfaces = [
-        mock.models.get_interface(
-            operation_name,
-            operation_kwargs=dict(
-                implementation=op_path(relationship_operation, module_path=__name__)),
-            edge='source')
-    ]
+    interface = mock.models.create_interface(
+        relationship.source_node.service,
+        interface_name=interface_name,
+        operation_name=operation_name,
+        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__))
+    )
+    relationship.interfaces[interface.name] = interface
     workflow_context.model.relationship.update(relationship)
 
     inputs = {'putput': True}
@@ -115,18 +120,22 @@ def test_relationship_tool_belt(workflow_context, executor):
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.relationship(
-                instance=relationship,
-                name=operation_name,
-                edge='source',
+            api.task.OperationTask.for_relationship(
+                relationship=relationship,
+                interface_name=interface_name,
+                operation_name=operation_name,
                 inputs=inputs
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert isinstance(global_test_holder.get(op_name(relationship, operation_name)),
-                      RelationshipToolBelt)
+    assert isinstance(global_test_holder.get(api.task.OperationTask.NAME_FORMAT.format(
+        type='relationship',
+        name=relationship.name,
+        interface=interface_name,
+        operation=operation_name
+    )), RelationshipToolBelt)
 
 
 def test_wrong_model_toolbelt():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index 4e8eca4..fa1f387 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -29,11 +29,11 @@ class TestWorkflowContext(object):
     def test_execution_creation_on_workflow_context_creation(self, storage):
         ctx = self._create_ctx(storage)
         execution = storage.execution.get(ctx.execution.id)             # pylint: disable=no-member
-        assert execution.service_instance == storage.service_instance.get_by_name(
-            models.DEPLOYMENT_NAME)
+        assert execution.service == storage.service.get_by_name(
+            models.SERVICE_NAME)
         assert execution.workflow_name == models.WORKFLOW_NAME
         assert execution.service_template == storage.service_template.get_by_name(
-            models.BLUEPRINT_NAME)
+            models.SERVICE_TEMPLATE_NAME)
         assert execution.status == storage.execution.model_cls.PENDING
         assert execution.parameters == {}
         assert execution.created_at <= datetime.utcnow()
@@ -53,7 +53,7 @@ class TestWorkflowContext(object):
             name='simple_context',
             model_storage=storage,
             resource_storage=None,
-            service_instance_id=storage.service_instance.get_by_name(models.DEPLOYMENT_NAME).id,
+            service_id=storage.service.get_by_name(models.SERVICE_NAME).id,
             workflow_name=models.WORKFLOW_NAME,
             task_max_attempts=models.TASK_MAX_ATTEMPTS,
             task_retry_interval=models.TASK_RETRY_INTERVAL
@@ -64,8 +64,8 @@ class TestWorkflowContext(object):
 def storage():
     workflow_storage = application_model_storage(
         sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage)
-    workflow_storage.service_template.put(models.get_blueprint())
-    blueprint = workflow_storage.service_template.get_by_name(models.BLUEPRINT_NAME)
-    workflow_storage.service_instance.put(models.get_deployment(blueprint))
+    workflow_storage.service_template.put(models.create_service_template())
+    service_template = workflow_storage.service_template.get_by_name(models.SERVICE_TEMPLATE_NAME)
+    workflow_storage.service.put(models.create_service(service_template))
     yield workflow_storage
     test_storage.release_sqlite_storage(workflow_storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/execution_plugin/test_common.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_common.py b/tests/orchestrator/execution_plugin/test_common.py
index 151b996..dd1e9fb 100644
--- a/tests/orchestrator/execution_plugin/test_common.py
+++ b/tests/orchestrator/execution_plugin/test_common.py
@@ -18,7 +18,7 @@ from collections import namedtuple
 import requests
 import pytest
 
-from aria.storage.modeling import model
+from aria.modeling import models
 from aria.orchestrator import exceptions
 from aria.orchestrator.execution_plugin import common
 
@@ -35,7 +35,7 @@ class TestDownloadScript(object):
 
     def _test_url(self, url):
         class Ctx(object):
-            task = model.Task
+            task = models.Task
 
         script_path = url
         result = common.download_script(Ctx, script_path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index a94fc83..e3612cf 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -463,7 +463,7 @@ if __name__ == '__main__':
         script_path = os.path.basename(local_script_path) if local_script_path else None
         if script_path:
             workflow_context.resource.deployment.upload(
-                entry_id=str(workflow_context.service_instance.id),
+                entry_id=str(workflow_context.service.id),
                 source=local_script_path,
                 path=script_path)
 
@@ -476,17 +476,20 @@ if __name__ == '__main__':
 
         @workflow
         def mock_workflow(ctx, graph):
-            op = 'test.op'
-            node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-            node.interfaces = [mock.models.get_interface(
-                op,
+            node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+            interface = mock.models.create_interface(
+                node.service,
+                'test',
+                'op',
                 operation_kwargs=dict(implementation='{0}.{1}'.format(
                     operations.__name__,
                     operations.run_script_locally.__name__))
-            )]
-            graph.add_tasks(api.task.OperationTask.node(
-                instance=node,
-                name=op,
+            )
+            node.interfaces[interface.name] = interface
+            graph.add_tasks(api.task.OperationTask.for_node(
+                node=node,
+                interface_name='test',
+                operation_name='op',
                 inputs=inputs))
             return graph
         tasks_graph = mock_workflow(ctx=workflow_context)  # pylint: disable=no-value-for-parameter
@@ -496,7 +499,7 @@ if __name__ == '__main__':
             tasks_graph=tasks_graph)
         eng.execute()
         return workflow_context.model.node.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
+            mock.models.DEPENDENCY_NODE_NAME).runtime_properties
 
     @pytest.fixture
     def executor(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index ad577f0..dd36466 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -24,7 +24,7 @@ import fabric.api
 from fabric.contrib import files
 from fabric import context_managers
 
-from aria.storage.modeling import model
+from aria.modeling import models
 from aria.orchestrator import events
 from aria.orchestrator import workflow
 from aria.orchestrator.workflows import api
@@ -124,10 +124,10 @@ class TestWithActualSSHServer(object):
 
     def test_run_script_download_resource_and_render(self, tmpdir):
         resource = tmpdir.join('resource')
-        resource.write('{{ctx.service_instance.name}}')
+        resource.write('{{ctx.service.name}}')
         self._upload(str(resource), 'test_resource')
         props = self._execute()
-        assert props['test_value'] == self._workflow_context.service_instance.name
+        assert props['test_value'] == self._workflow_context.service.name
 
     @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 'value'}])
     def test_run_script_inputs_as_env_variables_no_override(self, value):
@@ -216,15 +216,20 @@ class TestWithActualSSHServer(object):
 
         @workflow
         def mock_workflow(ctx, graph):
-            op = 'test.op'
-            node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-            node.interfaces = [mock.models.get_interface(
-                op,
-                dict(implementation='{0}.{1}'.format(operations.__name__, operation.__name__))
-            )]
-            graph.sequence(*[api.task.OperationTask.node(
-                instance=node,
-                name=op,
+            node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+            interface = mock.models.create_interface(
+                node.service,
+                'test',
+                'op',
+                operation_kwargs=dict(implementation='{0}.{1}'.format(
+                    operations.__name__,
+                    operation.__name__))
+            )
+            node.interfaces[interface.name] = interface
+            graph.sequence(*[api.task.OperationTask.for_node(
+                node=node,
+                interface_name='test',
+                operation_name='op',
                 inputs={
                     'script_path': script_path,
                     'fabric_env': _FABRIC_ENV,
@@ -243,7 +248,7 @@ class TestWithActualSSHServer(object):
             tasks_graph=tasks_graph)
         eng.execute()
         return self._workflow_context.model.node.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
+            mock.models.DEPENDENCY_NODE_NAME).runtime_properties
 
     def _execute_and_get_task_exception(self, *args, **kwargs):
         signal = events.on_failure_task_signal
@@ -254,7 +259,7 @@ class TestWithActualSSHServer(object):
 
     def _upload(self, source, path):
         self._workflow_context.resource.deployment.upload(
-            entry_id=str(self._workflow_context.service_instance.id),
+            entry_id=str(self._workflow_context.service.id),
             source=source,
             path=path)
 
@@ -407,7 +412,7 @@ class TestFabricEnvHideGroupsAndRunCommands(object):
         class Stub(object):
             @staticmethod
             def abort(message=None):
-                model.Task.abort(message)
+                models.Task.abort(message)
             ip = None
         task = Stub
         task.runs_on = Stub

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/test_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_runner.py b/tests/orchestrator/test_runner.py
index 17230df..74e98ad 100644
--- a/tests/orchestrator/test_runner.py
+++ b/tests/orchestrator/test_runner.py
@@ -49,8 +49,9 @@ def test_runner_tasks():
     def workflow_fn(ctx, graph):
         for node in ctx.model.node:
             graph.add_tasks(
-                OperationTask.node(instance=node,
-                                   name='tosca.interfaces.node.lifecycle.Standard.create'))
+                OperationTask.for_node(node=node,
+                                       interface_name='Standard',
+                                       operation_name='create'))
 
     _test_runner(workflow_fn)
 
@@ -69,5 +70,5 @@ def _test_runner(workflow_fn):
                     workflow_fn=workflow_fn,
                     inputs={},
                     initialize_model_storage_fn=_initialize_model_storage_fn,
-                    service_instance_id=1)
+                    service_id_fn=lambda: 1)
     runner.run()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 79618c1..b635a88 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -18,7 +18,8 @@ import pytest
 
 from aria.orchestrator import context
 from aria.orchestrator.workflows import api
-from aria.storage.modeling import model
+from aria.modeling import models
+
 from tests import mock, storage
 
 
@@ -30,7 +31,7 @@ def ctx(tmpdir):
     :return:
     """
     simple_context = mock.context.simple(str(tmpdir), inmemory=False)
-    simple_context.model.execution.put(mock.models.get_execution(simple_context.service_instance))
+    simple_context.model.execution.put(mock.models.create_execution(simple_context.service))
     yield simple_context
     storage.release_sqlite_storage(simple_context.model)
 
@@ -38,133 +39,183 @@ def ctx(tmpdir):
 class TestOperationTask(object):
 
     def test_node_operation_task_creation(self, ctx):
-        operation_name = 'aria.interfaces.lifecycle.create'
-        interface = mock.models.get_interface(
+        interface_name = 'test_interface'
+        operation_name = 'create'
+
+        plugin = mock.models.create_plugin('package', '0.1')
+        ctx.model.node.update(plugin)
+
+        plugin_specification = mock.models.create_plugin_specification('package', '0.1')
+
+        interface = mock.models.create_interface(
+            ctx.service,
+            interface_name,
             operation_name,
-            operation_kwargs=dict(plugin='plugin', implementation='op_path'))
-
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-        node.interfaces = [interface]
-        node.plugins = [{'name': 'plugin',
-                         'package_name': 'package',
-                         'package_version': '0.1'}]
-        ctx.model.node_template.update(node)
-        inputs = {'name': True}
+            operation_kwargs=dict(plugin_specification=plugin_specification,
+                                  implementation='op_path'))
+
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+        node.interfaces[interface_name] = interface
+        node.plugin_specifications[plugin_specification.name] = plugin_specification
+        ctx.model.node.update(node)
+        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
         ignore_failure = True
 
         with context.workflow.current.push(ctx):
-            api_task = api.task.OperationTask.node(
-                name=operation_name,
-                instance=node,
+            api_task = api.task.OperationTask.for_node(
+                node=node,
+                interface_name=interface_name,
+                operation_name=operation_name,
                 inputs=inputs,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval,
                 ignore_failure=ignore_failure)
 
-        assert api_task.name == '{0}.{1}'.format(operation_name, node.id)
+        assert api_task.name == api.task.OperationTask.NAME_FORMAT.format(
+            type='node',
+            name=node.name,
+            interface=interface_name,
+            operation=operation_name
+        )
         assert api_task.implementation == 'op_path'
         assert api_task.actor == node
-        assert api_task.inputs == inputs
+        assert api_task.inputs['test_input'].value is True
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.ignore_failure == ignore_failure
-        assert api_task.plugin == {'name': 'plugin',
-                                   'package_name': 'package',
-                                   'package_version': '0.1'}
-        assert api_task.runs_on == model.Task.RUNS_ON_NODE_INSTANCE
+        assert api_task.plugin.name == 'test_plugin'
+        assert api_task.runs_on == models.Task.RUNS_ON_NODE
 
     def test_source_relationship_operation_task_creation(self, ctx):
-        operation_name = 'aria.interfaces.relationship_lifecycle.preconfigure'
+        interface_name = 'test_interface'
+        operation_name = 'preconfigure'
 
-        interface = mock.models.get_interface(
+        plugin = mock.models.create_plugin('package', '0.1')
+        ctx.model.node.update(plugin)
+
+        plugin_specification = mock.models.create_plugin_specification('package', '0.1')
+
+        interface = mock.models.create_interface(
+            ctx.service,
+            interface_name,
             operation_name,
-            operation_kwargs=dict(implementation='op_path', plugin='plugin'),
-            edge='source'
+            operation_kwargs=dict(plugin_specification=plugin_specification,
+                                  implementation='op_path')
         )
 
         relationship = ctx.model.relationship.list()[0]
-        relationship.interfaces = [interface]
-        relationship.source_node.plugins = [{'name': 'plugin',
-                                             'package_name': 'package',
-                                             'package_version': '0.1'}]
-        inputs = {'name': True}
+        relationship.interfaces[interface.name] = interface
+        relationship.source_node.plugin_specifications[plugin_specification.name] = \
+            plugin_specification
+        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
 
         with context.workflow.current.push(ctx):
-            api_task = api.task.OperationTask.relationship(
-                name=operation_name,
-                instance=relationship,
-                edge='source',
+            api_task = api.task.OperationTask.for_relationship(
+                relationship=relationship,
+                interface_name=interface_name,
+                operation_name=operation_name,
                 inputs=inputs,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
-        assert api_task.name == '{0}.{1}'.format(operation_name, relationship.id)
+        assert api_task.name == api.task.OperationTask.NAME_FORMAT.format(
+            type='relationship',
+            name=relationship.name,
+            interface=interface_name,
+            operation=operation_name
+        )
         assert api_task.implementation == 'op_path'
         assert api_task.actor == relationship
-        assert api_task.inputs == inputs
+        assert api_task.inputs['test_input'].value is True
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
-        assert api_task.plugin == {'name': 'plugin',
-                                   'package_name': 'package',
-                                   'package_version': '0.1'}
-        assert api_task.runs_on == model.Task.RUNS_ON_SOURCE
+        assert api_task.plugin.name == 'test_plugin'
+        assert api_task.runs_on == models.Task.RUNS_ON_SOURCE
 
     def test_target_relationship_operation_task_creation(self, ctx):
-        operation_name = 'aria.interfaces.relationship_lifecycle.preconfigure'
-        interface = mock.models.get_interface(
+        interface_name = 'test_interface'
+        operation_name = 'preconfigure'
+
+        plugin = mock.models.create_plugin('package', '0.1')
+        ctx.model.node.update(plugin)
+
+        plugin_specification = mock.models.create_plugin_specification('package', '0.1')
+
+        interface = mock.models.create_interface(
+            ctx.service,
+            interface_name,
             operation_name,
-            operation_kwargs=dict(implementation='op_path', plugin='plugin'),
-            edge='target'
+            operation_kwargs=dict(plugin_specification=plugin_specification,
+                                  implementation='op_path')
         )
 
         relationship = ctx.model.relationship.list()[0]
-        relationship.interfaces = [interface]
-        relationship.target_node.plugins = [{'name': 'plugin',
-                                             'package_name': 'package',
-                                             'package_version': '0.1'}]
-        inputs = {'name': True}
+        relationship.interfaces[interface.name] = interface
+        relationship.target_node.plugin_specifications[plugin_specification.name] = \
+            plugin_specification
+        inputs = {'test_input': True}
         max_attempts = 10
         retry_interval = 10
 
         with context.workflow.current.push(ctx):
-            api_task = api.task.OperationTask.relationship(
-                name=operation_name,
-                instance=relationship,
-                edge='target',
+            api_task = api.task.OperationTask.for_relationship(
+                relationship=relationship,
+                interface_name=interface_name,
+                operation_name=operation_name,
                 inputs=inputs,
                 max_attempts=max_attempts,
-                retry_interval=retry_interval)
+                retry_interval=retry_interval,
+                runs_on=models.Task.RUNS_ON_TARGET)
 
-        assert api_task.name == '{0}.{1}'.format(operation_name, relationship.id)
+        assert api_task.name == api.task.OperationTask.NAME_FORMAT.format(
+            type='relationship',
+            name=relationship.name,
+            interface=interface_name,
+            operation=operation_name
+        )
         assert api_task.implementation == 'op_path'
         assert api_task.actor == relationship
-        assert api_task.inputs == inputs
+        assert api_task.inputs['test_input'].value is True
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
-        assert api_task.plugin == {'name': 'plugin',
-                                   'package_name': 'package',
-                                   'package_version': '0.1'}
-        assert api_task.runs_on == model.Task.RUNS_ON_TARGET
+        assert api_task.plugin.name == 'test_plugin'
+        assert api_task.runs_on == models.Task.RUNS_ON_TARGET
 
     def test_operation_task_default_values(self, ctx):
-        dependency_node_instance = ctx.model.node.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        interface_name = 'test_interface'
+        operation_name = 'create'
+
+        plugin = mock.models.create_plugin('package', '0.1')
+        ctx.model.node.update(plugin)
+
+        plugin_specification = mock.models.create_plugin_specification('package', '0.1')
+
+        dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+        interface = mock.models.create_interface(
+            ctx.service,
+            interface_name,
+            operation_name,
+            operation_kwargs=dict(plugin_specification=plugin_specification,
+                                  implementation='op_path'))
+        dependency_node.interfaces[interface_name] = interface
+
         with context.workflow.current.push(ctx):
-            task = api.task.OperationTask(
-                name='stub',
-                implementation='',
-                actor=dependency_node_instance)
+            task = api.task.OperationTask.for_node(
+                node=dependency_node,
+                interface_name=interface_name,
+                operation_name=operation_name)
 
         assert task.inputs == {}
         assert task.retry_interval == ctx._task_retry_interval
         assert task.max_attempts == ctx._task_max_attempts
         assert task.ignore_failure == ctx._task_ignore_failure
-        assert task.plugin == {}
-        assert task.runs_on is None
+        assert task.plugin is plugin
+        assert task.runs_on == models.Task.RUNS_ON_NODE
 
 
 class TestWorkflowTask(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index cc9a8a8..360e17d 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -18,28 +18,33 @@ import pytest
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.execute_operation import execute_operation
 
-from tests import mock
-from tests import storage
+from tests import mock, storage
 
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(str(tmpdir))
+    context = mock.context.simple(str(tmpdir), inmemory=False)
     yield context
     storage.release_sqlite_storage(context.model)
 
 
 def test_execute_operation(ctx):
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    node.interfaces = [mock.models.get_interface(mock.operations.NODE_OPERATIONS_INSTALL[0])]
+    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
+    interface = mock.models.create_interface(
+        ctx.service,
+        interface_name,
+        operation_name
+    )
+    node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
-    operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
 
     execute_tasks = list(
         task.WorkflowTask(
             execute_operation,
             ctx=ctx,
-            operation=operation_name,
+            interface_name=interface_name,
+            operation_name=operation_name,
             operation_kwargs={},
             allow_kwargs_override=False,
             run_by_dependency_order=False,
@@ -50,8 +55,12 @@ def test_execute_operation(ctx):
     )
 
     assert len(execute_tasks) == 1
-    assert execute_tasks[0].name == '{0}.{1}'.format(operation_name, node.id)
-
+    assert execute_tasks[0].name == task.OperationTask.NAME_FORMAT.format(
+        type='node',
+        name=node.name,
+        interface=interface_name,
+        operation=operation_name
+    )
 
 
 # TODO: add more scenarios

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/tests/orchestrator/workflows/builtin/test_heal.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_heal.py b/tests/orchestrator/workflows/builtin/test_heal.py
index b553049..92fa7ea 100644
--- a/tests/orchestrator/workflows/builtin/test_heal.py
+++ b/tests/orchestrator/workflows/builtin/test_heal.py
@@ -33,11 +33,11 @@ def ctx(tmpdir):
 
 @pytest.mark.skip(reason='heal is not implemented for now')
 def test_heal_dependent_node(ctx):
-    dependent_node_instance = \
-        ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-    dependent_node_instance.host_fk = dependent_node_instance.id
-    ctx.model.node.update(dependent_node_instance)
-    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id=dependent_node_instance.id)
+    dependent_node = \
+        ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+    dependent_node.host_fk = dependent_node.id
+    ctx.model.node.update(dependent_node)
+    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_id=dependent_node.id)
 
     assert len(list(heal_graph.tasks)) == 2
     uninstall_subgraph, install_subgraph = list(heal_graph.topological_order(reverse=True))
@@ -63,11 +63,11 @@ def test_heal_dependent_node(ctx):
 
 @pytest.mark.skip(reason='heal is not implemented for now')
 def test_heal_dependency_node(ctx):
-    dependency_node_instance = \
-        ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependency_node_instance.host_fk = dependency_node_instance.id
-    ctx.model.node.update(dependency_node_instance)
-    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id=dependency_node_instance.id)
+    dependency_node = \
+        ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    dependency_node.host_fk = dependency_node.id
+    ctx.model.node.update(dependency_node)
+    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_id=dependency_node.id)
     # both subgraphs should contain un\install for both the dependent and the dependency
     assert len(list(heal_graph.tasks)) == 2
     uninstall_subgraph, install_subgraph = list(heal_graph.topological_order(reverse=True))



[07/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/model_elements.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/model_elements.py b/aria/parser/modeling/model_elements.py
deleted file mode 100644
index 69da60e..0000000
--- a/aria/parser/modeling/model_elements.py
+++ /dev/null
@@ -1,1221 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 'types'
-
-from types import FunctionType
-
-from ...utils.collections import StrictList, StrictDict, deepcopy_with_locators, OrderedDict
-from ...utils.formatting import as_raw, as_raw_list, as_raw_dict, as_agnostic, safe_repr
-from ...utils.console import puts
-from ..validation import Issue
-from .elements import ModelElement, Parameter
-from .instance_elements import (ServiceInstance, Node, Capability, Relationship, Artifact, Group,
-                                Policy, GroupPolicy, GroupPolicyTrigger, Mapping, Substitution,
-                                Interface, Operation)
-from .utils import (validate_dict_values, validate_list_values, coerce_dict_values,
-                    coerce_list_values, instantiate_dict, dump_list_values, dump_dict_values,
-                    dump_parameters, dump_interfaces)
-
-
-class ServiceModel(ModelElement):
-    """
-    A service model is a normalized blueprint from which :class:`ServiceInstance` instances
-    can be created.
-
-    It is usually created by various DSL parsers, such as ARIA's TOSCA extension. However, it
-    can also be created programmatically.
-
-    Properties:
-
-    * :code:`description`: Human-readable description
-    * :code:`metadata`: :class:`Metadata`
-    * :code:`node_templates`: Dict of :class:`NodeTemplate`
-    * :code:`group_templates`: Dict of :class:`GroupTemplate`
-    * :code:`policy_templates`: Dict of :class:`PolicyTemplate`
-    * :code:`substitution_template`: :class:`SubstituionTemplate`
-    * :code:`inputs`: Dict of :class:`Parameter`
-    * :code:`outputs`: Dict of :class:`Parameter`
-    * :code:`operation_templates`: Dict of :class:`Operation`
-    """
-
-    def __init__(self):
-        self.description = None
-        self.metadata = None
-        self.node_templates = StrictDict(key_class=basestring, value_class=NodeTemplate)
-        self.group_templates = StrictDict(key_class=basestring, value_class=GroupTemplate)
-        self.policy_templates = StrictDict(key_class=basestring, value_class=PolicyTemplate)
-        self.substitution_template = None
-        self.inputs = StrictDict(key_class=basestring, value_class=Parameter)
-        self.outputs = StrictDict(key_class=basestring, value_class=Parameter)
-        self.operation_templates = StrictDict(key_class=basestring, value_class=OperationTemplate)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('description', self.description),
-            ('metadata', as_raw(self.metadata)),
-            ('node_templates', as_raw_list(self.node_templates)),
-            ('group_templates', as_raw_list(self.group_templates)),
-            ('policy_templates', as_raw_list(self.policy_templates)),
-            ('substitution_template', as_raw(self.substitution_template)),
-            ('inputs', as_raw_dict(self.inputs)),
-            ('outputs', as_raw_dict(self.outputs)),
-            ('operation_templates', as_raw_list(self.operation_templates))))
-
-    def instantiate(self, context, container):
-        service_instance = ServiceInstance()
-        context.modeling.instance = service_instance
-
-        service_instance.description = deepcopy_with_locators(self.description)
-
-        if self.metadata is not None:
-            service_instance.metadata = self.metadata.instantiate(context, container)
-
-        for node_template in self.node_templates.itervalues():
-            for _ in range(node_template.default_instances):
-                node = node_template.instantiate(context, container)
-                service_instance.nodes[node.id] = node
-
-        instantiate_dict(context, self, service_instance.groups, self.group_templates)
-        instantiate_dict(context, self, service_instance.policies, self.policy_templates)
-        instantiate_dict(context, self, service_instance.operations, self.operation_templates)
-
-        if self.substitution_template is not None:
-            service_instance.substitution = self.substitution_template.instantiate(context,
-                                                                                   container)
-
-        instantiate_dict(context, self, service_instance.inputs, self.inputs)
-        instantiate_dict(context, self, service_instance.outputs, self.outputs)
-
-        for name, the_input in context.modeling.inputs.iteritems():
-            if name not in service_instance.inputs:
-                context.validation.report('input "%s" is not supported' % name)
-            else:
-                service_instance.inputs[name].value = the_input
-
-        return service_instance
-
-    def validate(self, context):
-        if self.metadata is not None:
-            self.metadata.validate(context)
-        validate_dict_values(context, self.node_templates)
-        validate_dict_values(context, self.group_templates)
-        validate_dict_values(context, self.policy_templates)
-        if self.substitution_template is not None:
-            self.substitution_template.validate(context)
-        validate_dict_values(context, self.inputs)
-        validate_dict_values(context, self.outputs)
-        validate_dict_values(context, self.operation_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.metadata is not None:
-            self.metadata.coerce_values(context, container, report_issues)
-        coerce_dict_values(context, container, self.node_templates, report_issues)
-        coerce_dict_values(context, container, self.group_templates, report_issues)
-        coerce_dict_values(context, container, self.policy_templates, report_issues)
-        if self.substitution_template is not None:
-            self.substitution_template.coerce_values(context, container, report_issues)
-        coerce_dict_values(context, container, self.inputs, report_issues)
-        coerce_dict_values(context, container, self.outputs, report_issues)
-        coerce_dict_values(context, container, self.operation_templates, report_issues)
-
-    def dump(self, context):
-        if self.description is not None:
-            puts(context.style.meta(self.description))
-        if self.metadata is not None:
-            self.metadata.dump(context)
-        for node_template in self.node_templates.itervalues():
-            node_template.dump(context)
-        for group_template in self.group_templates.itervalues():
-            group_template.dump(context)
-        for policy_template in self.policy_templates.itervalues():
-            policy_template.dump(context)
-        if self.substitution_template is not None:
-            self.substitution_template.dump(context)
-        dump_parameters(context, self.inputs, 'Inputs')
-        dump_parameters(context, self.outputs, 'Outputs')
-        dump_dict_values(context, self.operation_templates, 'Operation templates')
-
-
-class NodeTemplate(ModelElement):
-    """
-    A template for creating zero or more :class:`Node` instances.
-
-    Properties:
-
-    * :code:`name`: Name (will be used as a prefix for node IDs)
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`default_instances`: Default number nodes that will appear in the deployment plan
-    * :code:`min_instances`: Minimum number nodes that will appear in the deployment plan
-    * :code:`max_instances`: Maximum number nodes that will appear in the deployment plan
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interface_templates`: Dict of :class:`InterfaceTemplate`
-    * :code:`artifact_templates`: Dict of :class:`ArtifactTemplate`
-    * :code:`capability_templates`: Dict of :class:`CapabilityTemplate`
-    * :code:`requirement_templates`: List of :class:`RequirementTemplate`
-    * :code:`target_node_template_constraints`: List of :class:`FunctionType`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.default_instances = 1
-        self.min_instances = 0
-        self.max_instances = None
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.interface_templates = StrictDict(key_class=basestring, value_class=InterfaceTemplate)
-        self.artifact_templates = StrictDict(key_class=basestring, value_class=ArtifactTemplate)
-        self.capability_templates = StrictDict(key_class=basestring, value_class=CapabilityTemplate)
-        self.requirement_templates = StrictList(value_class=RequirementTemplate)
-        self.target_node_template_constraints = StrictList(value_class=FunctionType)
-
-    def is_target_node_valid(self, target_node_template):
-        if self.target_node_template_constraints:
-            for node_type_constraint in self.target_node_template_constraints:
-                if not node_type_constraint(target_node_template, self):
-                    return False
-        return True
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('default_instances', self.default_instances),
-            ('min_instances', self.min_instances),
-            ('max_instances', self.max_instances),
-            ('properties', as_raw_dict(self.properties)),
-            ('interface_templates', as_raw_list(self.interface_templates)),
-            ('artifact_templates', as_raw_list(self.artifact_templates)),
-            ('capability_templates', as_raw_list(self.capability_templates)),
-            ('requirement_templates', as_raw_list(self.requirement_templates))))
-
-    def instantiate(self, context, container):
-        node = Node(context, self.type_name, self.name)
-        instantiate_dict(context, node, node.properties, self.properties)
-        instantiate_dict(context, node, node.interfaces, self.interface_templates)
-        instantiate_dict(context, node, node.artifacts, self.artifact_templates)
-        instantiate_dict(context, node, node.capabilities, self.capability_templates)
-        return node
-
-    def validate(self, context):
-        if context.modeling.node_types.get_descendant(self.type_name) is None:
-            context.validation.report('node template "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.interface_templates)
-        validate_dict_values(context, self.artifact_templates)
-        validate_dict_values(context, self.capability_templates)
-        validate_list_values(context, self.requirement_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.properties, report_issues)
-        coerce_dict_values(context, self, self.interface_templates, report_issues)
-        coerce_dict_values(context, self, self.artifact_templates, report_issues)
-        coerce_dict_values(context, self, self.capability_templates, report_issues)
-        coerce_list_values(context, self, self.requirement_templates, report_issues)
-
-    def dump(self, context):
-        puts('Node template: %s' % context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Type: %s' % context.style.type(self.type_name))
-            puts('Instances: %d (%d%s)'
-                 % (self.default_instances,
-                    self.min_instances,
-                    (' to %d' % self.max_instances
-                     if self.max_instances is not None
-                     else ' or more')))
-            dump_parameters(context, self.properties)
-            dump_interfaces(context, self.interface_templates)
-            dump_dict_values(context, self.artifact_templates, 'Artifact tempaltes')
-            dump_dict_values(context, self.capability_templates, 'Capability templates')
-            dump_list_values(context, self.requirement_templates, 'Requirement templates')
-
-
-class RequirementTemplate(ModelElement):
-    """
-    A requirement for a :class:`NodeTemplate`. During instantiation will be matched with a
-    capability of another
-    node.
-
-    Requirements may optionally contain a :class:`RelationshipTemplate` that will be created between
-    the nodes.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`target_node_type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`target_node_template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`target_node_template_constraints`: List of :class:`FunctionType`
-    * :code:`target_capability_type_name`: Type of capability in target node
-    * :code:`target_capability_name`: Name of capability in target node
-    * :code:`relationship_template`: :class:`RelationshipTemplate`
-    """
-
-    def __init__(self, name=None,
-                 target_node_type_name=None,
-                 target_node_template_name=None,
-                 target_capability_type_name=None,
-                 target_capability_name=None):
-        if name is not None and not isinstance(name, basestring):
-            raise ValueError('name must be a string or None')
-        if target_node_type_name is not None and not isinstance(target_node_type_name, basestring):
-            raise ValueError('target_node_type_name must be a string or None')
-        if target_node_template_name is not None and not isinstance(target_node_template_name,
-                                                                    basestring):
-            raise ValueError('target_node_template_name must be a string or None')
-        if target_capability_type_name is not None and not isinstance(target_capability_type_name,
-                                                                      basestring):
-            raise ValueError('target_capability_type_name must be a string or None')
-        if target_capability_name is not None and not isinstance(target_capability_name,
-                                                                 basestring):
-            raise ValueError('target_capability_name must be a string or None')
-        if target_node_type_name is not None and target_node_template_name is not None:
-            raise ValueError('can set either target_node_type_name or target_node_template_name')
-        if target_capability_type_name is not None and target_capability_name is not None:
-            raise ValueError('can set either target_capability_type_name or target_capability_name')
-
-        self.name = name
-        self.target_node_type_name = target_node_type_name
-        self.target_node_template_name = target_node_template_name
-        self.target_node_template_constraints = StrictList(value_class=FunctionType)
-        self.target_capability_type_name = target_capability_type_name
-        self.target_capability_name = target_capability_name
-        self.relationship_template = None  # optional
-
-    def instantiate(self, context, container):
-        raise NotImplementedError
-
-    def find_target(self, context, source_node_template):
-        # We might already have a specific node template, so we'll just verify it
-        if self.target_node_template_name is not None:
-            target_node_template = \
-                context.modeling.model.node_templates.get(self.target_node_template_name)
-
-            if not source_node_template.is_target_node_valid(target_node_template):
-                context.validation.report('requirement "%s" of node template "%s" is for node '
-                                          'template "%s" but it does not match constraints'
-                                          % (self.name,
-                                             self.target_node_template_name,
-                                             source_node_template.name),
-                                          level=Issue.BETWEEN_TYPES)
-                return None, None
-
-            if self.target_capability_type_name is not None \
-                    or self.target_capability_name is not None:
-                target_node_capability = self.find_target_capability(context,
-                                                                     source_node_template,
-                                                                     target_node_template)
-                if target_node_capability is None:
-                    return None, None
-            else:
-                target_node_capability = None
-
-            return target_node_template, target_node_capability
-
-        # Find first node that matches the type
-        elif self.target_node_type_name is not None:
-            for target_node_template in context.modeling.model.node_templates.itervalues():
-                if not context.modeling.node_types.is_descendant(self.target_node_type_name,
-                                                                 target_node_template.type_name):
-                    continue
-
-                if not source_node_template.is_target_node_valid(target_node_template):
-                    continue
-
-                target_node_capability = self.find_target_capability(context,
-                                                                     source_node_template,
-                                                                     target_node_template)
-                if target_node_capability is None:
-                    continue
-
-                return target_node_template, target_node_capability
-
-        return None, None
-
-    def find_target_capability(self, context, source_node_template, target_node_template):
-        for capability_template in target_node_template.capability_templates.itervalues():
-            if capability_template.satisfies_requirement(context,
-                                                         source_node_template,
-                                                         self,
-                                                         target_node_template):
-                return capability_template
-        return None
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('target_node_type_name', self.target_node_type_name),
-            ('target_node_template_name', self.target_node_template_name),
-            ('target_capability_type_name', self.target_capability_type_name),
-            ('target_capability_name', self.target_capability_name),
-            ('relationship_template', as_raw(self.relationship_template))))
-
-    def validate(self, context):
-        node_types = context.modeling.node_types
-        capability_types = context.modeling.capability_types
-        if self.target_node_type_name \
-                and node_types.get_descendant(self.target_node_type_name) is None:
-            context.validation.report('requirement "%s" refers to an unknown node type: %s'
-                                      % (self.name, safe_repr(self.target_node_type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-        if self.target_capability_type_name and \
-                capability_types.get_descendant(self.target_capability_type_name is None):
-            context.validation.report('requirement "%s" refers to an unknown capability type: %s'
-                                      % (self.name, safe_repr(self.target_capability_type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-        if self.relationship_template:
-            self.relationship_template.validate(context)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.relationship_template is not None:
-            self.relationship_template.coerce_values(context, container, report_issues)
-
-    def dump(self, context):
-        if self.name:
-            puts(context.style.node(self.name))
-        else:
-            puts('Requirement:')
-        with context.style.indent:
-            if self.target_node_type_name is not None:
-                puts('Target node type: %s'
-                     % context.style.type(self.target_node_type_name))
-            elif self.target_node_template_name is not None:
-                puts('Target node template: %s'
-                     % context.style.node(self.target_node_template_name))
-            if self.target_capability_type_name is not None:
-                puts('Target capability type: %s'
-                     % context.style.type(self.target_capability_type_name))
-            elif self.target_capability_name is not None:
-                puts('Target capability name: %s'
-                     % context.style.node(self.target_capability_name))
-            if self.target_node_template_constraints:
-                puts('Target node template constraints:')
-                with context.style.indent:
-                    for constraint in self.target_node_template_constraints:
-                        puts(context.style.literal(constraint))
-            if self.relationship_template:
-                puts('Relationship:')
-                with context.style.indent:
-                    self.relationship_template.dump(context)
-
-
-class CapabilityTemplate(ModelElement):
-    """
-    A capability of a :class:`NodeTemplate`. Nodes expose zero or more capabilities that can be
-    matched with :class:`Requirement` instances of other nodes.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`min_occurrences`: Minimum number of requirement matches required
-    * :code:`max_occurrences`: Maximum number of requirement matches allowed
-    * :code:`valid_source_node_type_names`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name, type_name, valid_source_node_type_names=None):
-        if not isinstance(name, basestring):
-            raise ValueError('name must be a string or None')
-        if not isinstance(type_name, basestring):
-            raise ValueError('type_name must be a string or None')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.min_occurrences = None  # optional
-        self.max_occurrences = None  # optional
-        self.valid_source_node_type_names = valid_source_node_type_names
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-
-    def satisfies_requirement(self,
-                              context,
-                              source_node_template,
-                              requirement,
-                              target_node_template):
-        # Do we match the required capability type?
-        capability_types = context.modeling.capability_types
-        if not capability_types.is_descendant(requirement.target_capability_type_name,
-                                              self.type_name):
-            return False
-
-        # Are we in valid_source_node_type_names?
-        if self.valid_source_node_type_names:
-            for valid_source_node_type_name in self.valid_source_node_type_names:
-                if not context.modeling.node_types.is_descendant(valid_source_node_type_name,
-                                                                 source_node_template.type_name):
-                    return False
-
-        # Apply requirement constraints
-        if requirement.target_node_template_constraints:
-            for node_type_constraint in requirement.target_node_template_constraints:
-                if not node_type_constraint(target_node_template, source_node_template):
-                    return False
-
-        return True
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('min_occurrences', self.min_occurrences),
-            ('max_occurrences', self.max_occurrences),
-            ('valid_source_node_type_names', self.valid_source_node_type_names),
-            ('properties', as_raw_dict(self.properties))))
-
-    def instantiate(self, context, container):
-        capability = Capability(self.name, self.type_name)
-        capability.min_occurrences = self.min_occurrences
-        capability.max_occurrences = self.max_occurrences
-        instantiate_dict(context, container, capability.properties, self.properties)
-        return capability
-
-    def validate(self, context):
-        if context.modeling.capability_types.get_descendant(self.type_name) is None:
-            context.validation.report(
-                'capability "%s" refers to an unknown type: %s'
-                % (self.name, safe_repr(self.type)),  # pylint: disable=no-member
-                # TODO fix self.type reference
-                level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.properties, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Type: %s' % context.style.type(self.type_name))
-            puts('Occurrences: %d%s'
-                 % (self.min_occurrences or 0, (' to %d' % self.max_occurrences)
-                    if self.max_occurrences is not None
-                    else ' or more'))
-            if self.valid_source_node_type_names:
-                puts('Valid source node types: %s'
-                     % ', '.join((str(context.style.type(v))
-                                  for v in self.valid_source_node_type_names)))
-            dump_parameters(context, self.properties)
-
-
-class RelationshipTemplate(ModelElement):
-    """
-    Optional addition to a :class:`Requirement` in :class:`NodeTemplate` that can be applied when
-    the requirement is matched with a capability.
-
-    Properties:
-
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`description`: Description
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`source_interface_templates`: Dict of :class:`InterfaceTemplate`
-    * :code:`target_interface_templates`: Dict of :class:`InterfaceTemplate`
-    """
-
-    def __init__(self, type_name=None, template_name=None):
-        if (type_name is not None) and (not isinstance(type_name, basestring)):
-            raise ValueError('type_name must be a string or None')
-        if (template_name is not None) and (not isinstance(template_name, basestring)):
-            raise ValueError('template_name must be a string or None')
-        if (type_name is None) and (template_name is None):
-            raise ValueError('must set either type_name or template_name')
-
-        self.type_name = type_name
-        self.template_name = template_name
-        self.description = None
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.source_interface_templates = StrictDict(key_class=basestring,
-                                                     value_class=InterfaceTemplate)
-        self.target_interface_templates = StrictDict(key_class=basestring,
-                                                     value_class=InterfaceTemplate)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('description', self.description),
-            ('properties', as_raw_dict(self.properties)),
-            ('source_interface_templates', as_raw_list(self.source_interface_templates)),
-            ('target_interface_templates', as_raw_list(self.target_interface_templates))))
-
-    def instantiate(self, context, container):
-        relationship = Relationship(name=self.template_name, type_name=self.type_name)
-        instantiate_dict(context, container,
-                         relationship.properties, self.properties)
-        instantiate_dict(context, container,
-                         relationship.source_interfaces, self.source_interface_templates)
-        instantiate_dict(context, container,
-                         relationship.target_interfaces, self.target_interface_templates)
-        return relationship
-
-    def validate(self, context):
-        if context.modeling.relationship_types.get_descendant(self.type_name) is None:
-            context.validation.report(
-                'relationship template "%s" has an unknown type: %s'
-                % (self.name, safe_repr(self.type_name)),  # pylint: disable=no-member
-                # TODO fix self.name reference
-                level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.source_interface_templates)
-        validate_dict_values(context, self.target_interface_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.properties, report_issues)
-        coerce_dict_values(context, self, self.source_interface_templates, report_issues)
-        coerce_dict_values(context, self, self.target_interface_templates, report_issues)
-
-    def dump(self, context):
-        if self.type_name is not None:
-            puts('Relationship type: %s' % context.style.type(self.type_name))
-        else:
-            puts('Relationship template: %s' % context.style.node(self.template_name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            dump_parameters(context, self.properties)
-            dump_interfaces(context, self.source_interface_templates, 'Source interface templates')
-            dump_interfaces(context, self.target_interface_templates, 'Target interface templates')
-
-
-class ArtifactTemplate(ModelElement):
-    """
-    A file associated with a :class:`NodeTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`source_path`: Source path (CSAR or repository)
-    * :code:`target_path`: Path at destination machine
-    * :code:`repository_url`: Repository URL
-    * :code:`repository_credential`: Dict of string
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name, type_name, source_path):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-        if not isinstance(source_path, basestring):
-            raise ValueError('must set source_path (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.source_path = source_path
-        self.target_path = None
-        self.repository_url = None
-        self.repository_credential = StrictDict(key_class=basestring, value_class=basestring)
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('source_path', self.source_path),
-            ('target_path', self.target_path),
-            ('repository_url', self.repository_url),
-            ('repository_credential', as_agnostic(self.repository_credential)),
-            ('properties', as_raw_dict(self.properties.iteritems()))))
-
-    def instantiate(self, context, container):
-        artifact = Artifact(self.name, self.type_name, self.source_path)
-        artifact.description = deepcopy_with_locators(self.description)
-        artifact.target_path = self.target_path
-        artifact.repository_url = self.repository_url
-        artifact.repository_credential = self.repository_credential
-        instantiate_dict(context, container, artifact.properties, self.properties)
-        return artifact
-
-    def validate(self, context):
-        if context.modeling.artifact_types.get_descendant(self.type_name) is None:
-            context.validation.report('artifact "%s" has an unknown type: %s'
-                                      % (self.name, safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Artifact type: %s' % context.style.type(self.type_name))
-            puts('Source path: %s' % context.style.literal(self.source_path))
-            if self.target_path is not None:
-                puts('Target path: %s' % context.style.literal(self.target_path))
-            if self.repository_url is not None:
-                puts('Repository URL: %s' % context.style.literal(self.repository_url))
-            if self.repository_credential:
-                puts('Repository credential: %s'
-                     % context.style.literal(self.repository_credential))
-            dump_parameters(context, self.properties)
-
-
-class GroupTemplate(ModelElement):
-    """
-    A template for creating zero or more :class:`Group` instances.
-
-    Groups are logical containers for zero or more nodes that allow applying zero or more
-    :class:`GroupPolicy` instances to the nodes together.
-
-    Properties:
-
-    * :code:`name`: Name (will be used as a prefix for group IDs)
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interface_templates`: Dict of :class:`InterfaceTemplate`
-    * :code:`policy_templates`: Dict of :class:`GroupPolicyTemplate`
-    * :code:`member_node_template_names`: Must be represented in the :class:`ServiceModel`
-    * :code:`member_group_template_names`: Must be represented in the :class:`ServiceModel`
-    """
-
-    def __init__(self, name, type_name=None):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if (type_name is not None) and (not isinstance(type_name, basestring)):
-            raise ValueError('type_name must be a string or None')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.interface_templates = StrictDict(key_class=basestring, value_class=InterfaceTemplate)
-        self.policy_templates = StrictDict(key_class=basestring, value_class=GroupPolicyTemplate)
-        self.member_node_template_names = StrictList(value_class=basestring)
-        self.member_group_template_names = StrictList(value_class=basestring)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('interface_templates', as_raw_list(self.interface_templates)),
-            ('policy_templates', as_raw_list(self.policy_templates)),
-            ('member_node_template_names', self.member_node_template_names),
-            ('member_group_template_names', self.member_group_template_names)))
-
-    def instantiate(self, context, container):
-        group = Group(context, self.type_name, self.name)
-        instantiate_dict(context, self, group.properties, self.properties)
-        instantiate_dict(context, self, group.interfaces, self.interface_templates)
-        instantiate_dict(context, self, group.policies, self.policy_templates)
-        for member_node_template_name in self.member_node_template_names:
-            group.member_node_ids += \
-                context.modeling.instance.get_node_ids(member_node_template_name)
-        for member_group_template_name in self.member_group_template_names:
-            group.member_group_ids += \
-                context.modeling.instance.get_group_ids(member_group_template_name)
-        return group
-
-    def validate(self, context):
-        if context.modeling.group_types.get_descendant(self.type_name) is None:
-            context.validation.report('group template "%s" has an unknown type: %s'
-                                      % (self.name, safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.interface_templates)
-        validate_dict_values(context, self.policy_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.properties, report_issues)
-        coerce_dict_values(context, self, self.interface_templates, report_issues)
-        coerce_dict_values(context, self, self.policy_templates, report_issues)
-
-    def dump(self, context):
-        puts('Group template: %s' % context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.type_name:
-                puts('Type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            dump_interfaces(context, self.interface_templates)
-            dump_dict_values(context, self.policy_templates, 'Policy templates')
-            if self.member_node_template_names:
-                puts('Member node templates: %s' % ', '.join(
-                    (str(context.style.node(v)) for v in self.member_node_template_names)))
-
-
-class PolicyTemplate(ModelElement):
-    """
-    Policies can be applied to zero or more :class:`NodeTemplate` or :class:`GroupTemplate`
-    instances.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`target_node_template_names`: Must be represented in the :class:`ServiceModel`
-    * :code:`target_group_template_names`: Must be represented in the :class:`ServiceModel`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.target_node_template_names = StrictList(value_class=basestring)
-        self.target_group_template_names = StrictList(value_class=basestring)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('target_node_template_names', self.target_node_template_names),
-            ('target_group_template_names', self.target_group_template_names)))
-
-    def instantiate(self, context, container):
-        policy = Policy(self.name, self.type_name)
-        instantiate_dict(context, self, policy.properties, self.properties)
-        for node_template_name in self.target_node_template_names:
-            policy.target_node_ids.extend(
-                context.modeling.instance.get_node_ids(node_template_name))
-        for group_template_name in self.target_group_template_names:
-            policy.target_group_ids.extend(
-                context.modeling.instance.get_group_ids(group_template_name))
-        return policy
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('policy template "%s" has an unknown type: %s'
-                                      % (self.name, safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.properties, report_issues)
-
-    def dump(self, context):
-        puts('Policy template: %s' % context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            if self.target_node_template_names:
-                puts('Target node templates: %s' % ', '.join(
-                    (str(context.style.node(v)) for v in self.target_node_template_names)))
-            if self.target_group_template_names:
-                puts('Target group templates: %s' % ', '.join(
-                    (str(context.style.node(v)) for v in self.target_group_template_names)))
-
-
-class GroupPolicyTemplate(ModelElement):
-    """
-    Policies applied to groups.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`triggers`: Dict of :class:`GroupPolicyTrigger`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(type_name, basestring):
-            raise ValueError('must set type_name (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-        self.triggers = StrictDict(key_class=basestring, value_class=GroupPolicyTriggerTemplate)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', as_raw_dict(self.properties)),
-            ('triggers', as_raw_list(self.triggers))))
-
-    def instantiate(self, context, container):
-        group_policy = GroupPolicy(self.name, self.type_name)
-        group_policy.description = deepcopy_with_locators(self.description)
-        instantiate_dict(context, container, group_policy.properties, self.properties)
-        instantiate_dict(context, container, group_policy.triggers, self.triggers)
-        return group_policy
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('group policy "%s" has an unknown type: %s'
-                                      % (self.name, safe_repr(self.type_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.properties)
-        validate_dict_values(context, self.triggers)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-        coerce_dict_values(context, container, self.triggers, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Group policy type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.properties)
-            dump_dict_values(context, self.triggers, 'Triggers')
-
-
-class GroupPolicyTriggerTemplate(ModelElement):
-    """
-    Triggers for :class:`GroupPolicyTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name, implementation):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-        if not isinstance(implementation, basestring):
-            raise ValueError('must set implementation (string)')
-
-        self.name = name
-        self.description = None
-        self.implementation = implementation
-        self.properties = StrictDict(key_class=basestring, value_class=Parameter)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('properties', as_raw_dict(self.properties))))
-
-    def instantiate(self, context, container):
-        group_policy_trigger = GroupPolicyTrigger(self.name, self.implementation)
-        group_policy_trigger.description = deepcopy_with_locators(self.description)
-        instantiate_dict(context, container, group_policy_trigger.properties, self.properties)
-        return group_policy_trigger
-
-    def validate(self, context):
-        validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Implementation: %s' % context.style.literal(self.implementation))
-            dump_parameters(context, self.properties)
-
-
-class MappingTemplate(ModelElement):
-    """
-    Used by :class:`SubstitutionTemplate` to map a capability or a requirement to a node.
-
-    Properties:
-
-    * :code:`mapped_name`: Exposed capability or requirement name
-    * :code:`node_template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`name`: Name of capability or requirement at the node template
-    """
-
-    def __init__(self, mapped_name, node_template_name, name):
-        if not isinstance(mapped_name, basestring):
-            raise ValueError('must set mapped_name (string)')
-        if not isinstance(node_template_name, basestring):
-            raise ValueError('must set node_template_name (string)')
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.mapped_name = mapped_name
-        self.node_template_name = node_template_name
-        self.name = name
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('mapped_name', self.mapped_name),
-            ('node_template_name', self.node_template_name),
-            ('name', self.name)))
-
-    def instantiate(self, context, container):
-        nodes = context.modeling.instance.find_nodes(self.node_template_name)
-        if len(nodes) == 0:
-            context.validation.report('mapping "%s" refer to node template "%s" but there are no '
-                                      'node instances' % (self.mapped_name,
-                                                          self.node_template_name),
-                                      level=Issue.BETWEEN_INSTANCES)
-            return None
-        return Mapping(self.mapped_name, nodes[0].id, self.name)
-
-    def validate(self, context):
-        if self.node_template_name not in context.modeling.model.node_templates:
-            context.validation.report('mapping "%s" refers to an unknown node template: %s'
-                                      % (self.mapped_name, safe_repr(self.node_template_name)),
-                                      level=Issue.BETWEEN_TYPES)
-
-    def dump(self, context):
-        puts('%s -> %s.%s' % (context.style.node(self.mapped_name),
-                              context.style.node(self.node_template_name),
-                              context.style.node(self.name)))
-
-
-class SubstitutionTemplate(ModelElement):
-    """
-    Used to substitute a single node for the entire deployment.
-
-    Properties:
-
-    * :code:`node_type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`capability_templates`: Dict of :class:`MappingTemplate`
-    * :code:`requirement_templates`: Dict of :class:`MappingTemplate`
-    """
-
-    def __init__(self, node_type_name):
-        if not isinstance(node_type_name, basestring):
-            raise ValueError('must set node_type_name (string)')
-
-        self.node_type_name = node_type_name
-        self.capability_templates = StrictDict(key_class=basestring, value_class=MappingTemplate)
-        self.requirement_templates = StrictDict(key_class=basestring, value_class=MappingTemplate)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('node_type_name', self.node_type_name),
-            ('capability_templates', as_raw_list(self.capability_templates)),
-            ('requirement_templates', as_raw_list(self.requirement_templates))))
-
-    def instantiate(self, context, container):
-        substitution = Substitution(self.node_type_name)
-        instantiate_dict(context, container, substitution.capabilities, self.capability_templates)
-        instantiate_dict(context, container, substitution.requirements, self.requirement_templates)
-        return substitution
-
-    def validate(self, context):
-        if context.modeling.node_types.get_descendant(self.node_type_name) is None:
-            context.validation.report('substitution template has an unknown type: %s'
-                                      % safe_repr(self.node_type_name),
-                                      level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.capability_templates)
-        validate_dict_values(context, self.requirement_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, self, self.capability_templates, report_issues)
-        coerce_dict_values(context, self, self.requirement_templates, report_issues)
-
-    def dump(self, context):
-        puts('Substitution template:')
-        with context.style.indent:
-            puts('Node type: %s' % context.style.type(self.node_type_name))
-            dump_dict_values(context, self.capability_templates, 'Capability template mappings')
-            dump_dict_values(context, self.requirement_templates, 'Requirement template mappings')
-
-
-class InterfaceTemplate(ModelElement):
-    """
-    A typed set of :class:`OperationTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`inputs`: Dict of :class:`Parameter`
-    * :code:`operation_templates`: Dict of :class:`OperationTemplate`
-    """
-
-    def __init__(self, name, type_name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.name = name
-        self.description = None
-        self.type_name = type_name
-        self.inputs = StrictDict(key_class=basestring, value_class=Parameter)
-        self.operation_templates = StrictDict(key_class=basestring, value_class=OperationTemplate)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('inputs', as_raw_dict(self.properties)),  # pylint: disable=no-member
-            # TODO fix self.properties reference
-            ('operation_templates', as_raw_list(self.operation_templates))))
-
-    def instantiate(self, context, container):
-        interface = Interface(self.name, self.type_name)
-        interface.description = deepcopy_with_locators(self.description)
-        instantiate_dict(context, container, interface.inputs, self.inputs)
-        instantiate_dict(context, container, interface.operations, self.operation_templates)
-        return interface
-
-    def validate(self, context):
-        if self.type_name:
-            if context.modeling.interface_types.get_descendant(self.type_name) is None:
-                context.validation.report('interface "%s" has an unknown type: %s'
-                                          % (self.name, safe_repr(self.type_name)),
-                                          level=Issue.BETWEEN_TYPES)
-
-        validate_dict_values(context, self.inputs)
-        validate_dict_values(context, self.operation_templates)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.inputs, report_issues)
-        coerce_dict_values(context, container, self.operation_templates, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            puts('Interface type: %s' % context.style.type(self.type_name))
-            dump_parameters(context, self.inputs, 'Inputs')
-            dump_dict_values(context, self.operation_templates, 'Operation templates')
-
-
-class OperationTemplate(ModelElement):
-    """
-    An operation in a :class:`InterfaceTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`dependencies`: List of strings (interpreted by the orchestrator)
-    * :code:`executor`: Executor string (interpreted by the orchestrator)
-    * :code:`max_retries`: Maximum number of retries allowed in case of failure
-    * :code:`retry_interval`: Interval between retries
-    * :code:`inputs`: Dict of :class:`Parameter`
-    """
-
-    def __init__(self, name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.name = name
-        self.description = None
-        self.implementation = None
-        self.dependencies = StrictList(value_class=basestring)
-        self.executor = None
-        self.max_retries = None
-        self.retry_interval = None
-        self.inputs = StrictDict(key_class=basestring, value_class=Parameter)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_retries', self.max_retries),
-            ('retry_interval', self.retry_interval),
-            ('inputs', as_raw_dict(self.inputs))))
-
-    def instantiate(self, context, container):
-        operation = Operation(self.name)
-        operation.description = deepcopy_with_locators(self.description)
-        operation.implementation = self.implementation
-        operation.dependencies = self.dependencies
-        operation.executor = self.executor
-        operation.max_retries = self.max_retries
-        operation.retry_interval = self.retry_interval
-        instantiate_dict(context, container, operation.inputs, self.inputs)
-        return operation
-
-    def validate(self, context):
-        validate_dict_values(context, self.inputs)
-
-    def coerce_values(self, context, container, report_issues):
-        coerce_dict_values(context, container, self.inputs, report_issues)
-
-    def dump(self, context):
-        puts(context.style.node(self.name))
-        if self.description:
-            puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.implementation is not None:
-                puts('Implementation: %s' % context.style.literal(self.implementation))
-            if self.dependencies:
-                puts('Dependencies: %s' % ', '.join(
-                    (str(context.style.literal(v)) for v in self.dependencies)))
-            if self.executor is not None:
-                puts('Executor: %s' % context.style.literal(self.executor))
-            if self.max_retries is not None:
-                puts('Max retries: %s' % context.style.literal(self.max_retries))
-            if self.retry_interval is not None:
-                puts('Retry interval: %s' % context.style.literal(self.retry_interval))
-            dump_parameters(context, self.inputs, 'Inputs')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/storage.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/storage.py b/aria/parser/modeling/storage.py
deleted file mode 100644
index ff1e536..0000000
--- a/aria/parser/modeling/storage.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-This solution is temporary, as we plan to combine aria.parser.modeling and aria.storage.modeling
-into one package (aria.modeling?).
-"""
-
-from datetime import datetime
-from threading import RLock
-
-from ...storage.modeling import model
-from ...orchestrator.decorators import operation
-from ...utils.console import puts, Colored
-from ...utils.formatting import safe_repr
-
-
-def initialize_storage(context, model_storage, service_instance_id):
-    s_service_template = create_service_template(context)
-    model_storage.service_template.put(s_service_template)
-
-    s_service_instance = create_service_instance(context, s_service_template, service_instance_id)
-    model_storage.service_instance.put(s_service_instance)
-
-    # Create node templates and nodes
-    for node_template in context.modeling.model.node_templates.itervalues():
-        s_node_template = create_node_template(s_service_template, node_template)
-        model_storage.node_template.put(s_node_template)
-
-        for node in context.modeling.instance.find_nodes(node_template.name):
-            s_node = create_node(s_service_instance, s_node_template, node)
-            model_storage.node.put(s_node)
-            create_interfaces(context, model_storage, node.interfaces,
-                              s_node, 'node', None, '_dry_node')
-
-    # Create relationships between nodes
-    for source_node in context.modeling.instance.nodes.itervalues():
-        for relationship in source_node.relationships:
-            s_source_node = model_storage.node.get_by_name(source_node.id)
-            s_target_node = model_storage.node.get_by_name(relationship.target_node_id)
-            s_relationship = create_relationship(s_source_node, s_target_node)
-            model_storage.relationship.put(s_relationship)
-            # TOSCA always uses the "source" edge
-            create_interfaces(context, model_storage, relationship.source_interfaces,
-                              s_relationship, 'relationship', 'source', '_dry_relationship')
-
-
-def create_service_template(context):
-    now = datetime.utcnow()
-    main_file_name = unicode(context.presentation.location)
-    try:
-        name = context.modeling.model.metadata.values.get('template_name')
-    except AttributeError:
-        name = None
-    return model.ServiceTemplate(
-        name=name or main_file_name,
-        description=context.modeling.model.description or '',
-        created_at=now,
-        updated_at=now,
-        main_file_name=main_file_name,
-        plan={}
-    )
-
-
-def create_service_instance(context, service_template, service_instance_id):
-    now = datetime.utcnow()
-    return model.ServiceInstance(
-        name='{0}_{1}'.format(service_template.name, service_instance_id),
-        service_template=service_template,
-        description=context.modeling.instance.description or '',
-        created_at=now,
-        updated_at=now)
-
-
-def create_node_template(service_template, node_template):
-    return model.NodeTemplate(
-        name=node_template.name,
-        type_name=node_template.type_name,
-        default_instances=node_template.default_instances,
-        min_instances=node_template.min_instances,
-        max_instances=node_template.max_instances or 100,
-        service_template=service_template)
-
-
-def create_node(service_instance, node_template, node):
-    return model.Node(
-        name=node.id,
-        state='',
-        node_template=node_template,
-        service_instance=service_instance)
-
-
-def create_relationship(source_node, target_node):
-    return model.Relationship(
-        source_node=source_node,
-        target_node=target_node)
-
-
-def create_interfaces(context, model_storage, interfaces, node_or_relationship, type_name, edge,
-                      fn_name):
-    for interface_name, interface in interfaces.iteritems():
-        s_interface = model.Interface(name=interface_name,
-                                      type_name=interface.type_name,
-                                      edge=edge)
-        setattr(s_interface, type_name, node_or_relationship)
-        model_storage.interface.put(s_interface)
-        for operation_name, oper in interface.operations.iteritems():
-            operation_name = '{0}.{1}'.format(interface_name, operation_name)
-            s_operation = model.Operation(name=operation_name,
-                                          implementation='{0}.{1}'.format(__name__, fn_name),
-                                          interface=s_interface)
-            plugin, implementation = _parse_implementation(context, oper.implementation)
-            # TODO: operation's user inputs
-            s_operation.inputs.append(model.Parameter(name='_plugin', # pylint: disable=no-member
-                                                      str_value=str(plugin),
-                                                      type='str'))
-            s_operation.inputs.append(model.Parameter(name='_implementation', # pylint: disable=no-member
-                                                      str_value=str(implementation),
-                                                      type='str'))
-            model_storage.operation.put(s_operation)
-
-
-def _parse_implementation(context, implementation):
-    if not implementation:
-        return '', ''
-
-    index = implementation.find('>')
-    if index == -1:
-        return 'execution', implementation
-    plugin = implementation[:index].strip()
-
-    # TODO: validation should happen in parser
-    if (plugin != 'execution') and (_get_plugin(context, plugin) is None):
-        raise ValueError('unknown plugin: "%s"' % plugin)
-
-    implementation = implementation[index+1:].strip()
-    return plugin, implementation
-
-
-def _get_plugin(context, plugin_name):
-    def is_plugin(type_name):
-        return context.modeling.policy_types.get_role(type_name) == 'plugin'
-
-    for policy in context.modeling.instance.policies.itervalues():
-        if (policy.name == plugin_name) and is_plugin(policy.type_name):
-            return policy
-
-    return None
-
-
-_TERMINAL_LOCK = RLock()
-
-
-@operation
-def _dry_node(ctx, _plugin, _implementation, **kwargs):
-    with _TERMINAL_LOCK:
-        print '> node instance: %s' % Colored.red(ctx.node.name)
-        _dump_implementation(_plugin, _implementation)
-
-
-@operation
-def _dry_relationship(ctx, _plugin, _implementation, **kwargs):
-    with _TERMINAL_LOCK:
-        puts('> relationship instance: %s -> %s' % (
-            Colored.red(ctx.relationship.source_node.name),
-            Colored.red(ctx.relationship.target_node.name)))
-        _dump_implementation(_plugin, _implementation)
-
-
-def _dump_implementation(plugin, implementation):
-    if plugin:
-        print '  plugin: %s' % Colored.magenta(plugin)
-    if implementation:
-        print '  implementation: %s' % Colored.yellow(safe_repr(implementation))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/types.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/types.py b/aria/parser/modeling/types.py
deleted file mode 100644
index 0a232fc..0000000
--- a/aria/parser/modeling/types.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ...utils.collections import StrictList, StrictDict, OrderedDict
-from ...utils.formatting import as_raw
-from ...utils.console import puts
-
-
-class Type(object):
-    """
-    Represents a type and its children.
-    """
-
-    def __init__(self, name):
-        if not isinstance(name, basestring):
-            raise ValueError('must set name (string)')
-
-        self.name = name
-        self.description = None
-        self.role = None
-        self.children = StrictList(value_class=Type)
-
-    def get_parent(self, name):
-        for child in self.children:
-            if child.name == name:
-                return self
-            parent = child.get_parent(name)
-            if parent is not None:
-                return parent
-        return None
-
-    def is_descendant(self, base_name, name):
-        base = self.get_descendant(base_name)
-        if base is not None:
-            if base.get_descendant(name) is not None:
-                return True
-        return False
-
-    def get_descendant(self, name):
-        if self.name == name:
-            return self
-        for child in self.children:
-            found = child.get_descendant(name)
-            if found is not None:
-                return found
-        return None
-
-    def iter_descendants(self):
-        for child in self.children:
-            yield child
-            for descendant in child.iter_descendants():
-                yield descendant
-
-    def get_role(self, name):
-        def _get_role(the_type):
-            if the_type is None:
-                return None
-            elif the_type.role is None:
-                return _get_role(self.get_parent(the_type.name))
-            return the_type.role
-
-        return _get_role(self.get_descendant(name))
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('role', self.role)))
-
-    def dump(self, context):
-        if self.name:
-            puts(context.style.type(self.name))
-        with context.style.indent:
-            for child in self.children:
-                child.dump(context)
-
-    def append_raw_children(self, types):
-        for child in self.children:
-            raw_child = as_raw(child)
-            raw_child['parent'] = self.name
-            types.append(raw_child)
-            child.append_raw_children(types)
-
-
-class RelationshipType(Type):
-    def __init__(self, name):
-        super(RelationshipType, self).__init__(name)
-
-        self.properties = StrictDict(key_class=basestring)
-        self.source_interfaces = StrictDict(key_class=basestring)
-        self.target_interfaces = StrictDict(key_class=basestring)
-
-
-class PolicyType(Type):
-    def __init__(self, name):
-        super(PolicyType, self).__init__(name)
-
-        self.implementation = None
-        self.properties = StrictDict(key_class=basestring)
-
-
-class PolicyTriggerType(Type):
-    def __init__(self, name):
-        super(PolicyTriggerType, self).__init__(name)
-
-        self.implementation = None
-        self.properties = StrictDict(key_class=basestring)
-
-
-class TypeHierarchy(Type):
-    """
-    Represents a single-parent derivation :class:`Type` hierarchy.
-    """
-
-    def __init__(self):
-        super(TypeHierarchy, self).__init__(name='')
-        self.name = None  # TODO Calling the super __init__ with name='' and then setting it to None
-        # is an ugly workaround. We need to improve this. here is the reason for the current state:
-        # In this module there is a class named `Type`. Its `__init__` gets has a `name` argument
-        # that raises an exception of `name` is not an instance of `basestring`. Here are some
-        # classes that inherit from `Type`: RelationshipType, PolicyType, PolicyTriggerType.
-        # But `TypeHierarchy` also inherits from `Type`. And its `__init__` does not call its super
-        # `__init__`, which causes pylint to yell. As you can clearly see, it also sets `name` to
-        # None. But calling super __init__ with name=None raises an exception. We tried modifying
-        # the Type class hierarchies, but it was not that simple. Also calling with name='' without
-        # setting `name` to None later on raises parsing validation issues.
-        self.children = StrictList(value_class=Type)
-
-    @property
-    def as_raw(self):
-        types = []
-        self.append_raw_children(types)
-        return types

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/utils.py b/aria/parser/modeling/utils.py
deleted file mode 100644
index 21db433..0000000
--- a/aria/parser/modeling/utils.py
+++ /dev/null
@@ -1,146 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from random import randrange
-
-from shortuuid import ShortUUID
-
-from ...utils.collections import OrderedDict
-from ...utils.console import puts
-from ..exceptions import InvalidValueError
-from ..presentation import Value
-from .exceptions import CannotEvaluateFunctionException
-
-# UUID = ShortUUID() # default alphabet is base57, which is alphanumeric without visually ambiguous
-# characters; ID length is 22
-UUID = ShortUUID(alphabet='abcdefghijklmnopqrstuvwxyz0123456789')  # alphanumeric; ID length is 25
-
-
-def generate_id_string(length=None):
-    """
-    A random string with a strong guarantee of universal uniqueness (uses UUID).
-
-    The default length is 25 characters.
-    """
-
-    the_id = UUID.uuid()
-    if length is not None:
-        the_id = the_id[:length]
-    return the_id
-
-
-def generate_hex_string():
-    """
-    A random string of 5 hex digits with no guarantee of universal uniqueness.
-    """
-
-    return '%05x' % randrange(16 ** 5)
-
-
-def coerce_value(context, container, value, report_issues=False):
-    if isinstance(value, Value):
-        value = value.value
-
-    if isinstance(value, list):
-        return [coerce_value(context, container, v, report_issues) for v in value]
-    elif isinstance(value, dict):
-        return OrderedDict((k, coerce_value(context, container, v, report_issues))
-                           for k, v in value.items())
-    elif hasattr(value, '_evaluate'):
-        try:
-            value = value._evaluate(context, container)
-            value = coerce_value(context, container, value, report_issues)
-        except CannotEvaluateFunctionException:
-            pass
-        except InvalidValueError as e:
-            if report_issues:
-                context.validation.report(e.issue)
-    return value
-
-
-def validate_dict_values(context, the_dict):
-    if not the_dict:
-        return
-    validate_list_values(context, the_dict.values())
-
-
-def validate_list_values(context, the_list):
-    if not the_list:
-        return
-    for value in the_list:
-        value.validate(context)
-
-
-def coerce_dict_values(context, container, the_dict, report_issues=False):
-    if not the_dict:
-        return
-    coerce_list_values(context, container, the_dict.itervalues(), report_issues)
-
-
-def coerce_list_values(context, container, the_list, report_issues=False):
-    if not the_list:
-        return
-    for value in the_list:
-        value.coerce_values(context, container, report_issues)
-
-
-def instantiate_dict(context, container, the_dict, from_dict):
-    if not from_dict:
-        return
-    for name, value in from_dict.iteritems():
-        value = value.instantiate(context, container)
-        if value is not None:
-            the_dict[name] = value
-
-
-def dump_list_values(context, the_list, name):
-    if not the_list:
-        return
-    puts('%s:' % name)
-    with context.style.indent:
-        for value in the_list:
-            value.dump(context)
-
-
-def dump_dict_values(context, the_dict, name):
-    if not the_dict:
-        return
-    dump_list_values(context, the_dict.itervalues(), name)
-
-
-def dump_parameters(context, parameters, name='Properties'):
-    if not parameters:
-        return
-    puts('%s:' % name)
-    with context.style.indent:
-        for parameter_name, parameter in parameters.iteritems():
-            if parameter.type_name is not None:
-                puts('%s = %s (%s)' % (context.style.property(parameter_name),
-                                       context.style.literal(parameter.value),
-                                       context.style.type(parameter.type_name)))
-            else:
-                puts('%s = %s' % (context.style.property(parameter_name),
-                                  context.style.literal(parameter.value)))
-            if parameter.description:
-                puts(context.style.meta(parameter.description))
-
-
-def dump_interfaces(context, interfaces, name='Interfaces'):
-    if not interfaces:
-        return
-    puts('%s:' % name)
-    with context.style.indent:
-        for interface in interfaces.itervalues():
-            interface.dump(context)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/reading/__init__.py
----------------------------------------------------------------------
diff --git a/aria/parser/reading/__init__.py b/aria/parser/reading/__init__.py
index 32aa5b5..b5c0709 100644
--- a/aria/parser/reading/__init__.py
+++ b/aria/parser/reading/__init__.py
@@ -13,7 +13,7 @@
 from .raw import RawReader
 from .reader import Reader
 from .yaml import YamlReader
-from .locator import Locator
+from .locator import (Locator, deepcopy_with_locators, copy_locators)
 from .json import JsonReader
 from .jinja import JinjaReader
 from .context import ReadingContext
@@ -34,6 +34,8 @@ __all__ = (
     'ReadingContext',
     'RawReader',
     'Locator',
+    'deepcopy_with_locators',
+    'copy_locators',
     'YamlReader',
     'JsonReader',
     'JinjaReader')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/parser/reading/locator.py
----------------------------------------------------------------------
diff --git a/aria/parser/reading/locator.py b/aria/parser/reading/locator.py
index a1cfa9c..4142ee7 100644
--- a/aria/parser/reading/locator.py
+++ b/aria/parser/reading/locator.py
@@ -10,12 +10,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from copy import deepcopy
+
+
 from ...utils.console import puts, Colored, indent
 
+
 # We are inheriting the primitive types in order to add the ability to set
 # an attribute (_locator) on them.
 
-
 class LocatableString(unicode):
     pass
 
@@ -117,3 +120,35 @@ class Locator(object):
     def __str__(self):
         # Should be in same format as Issue.locator_as_str
         return '"%s":%d:%d' % (self.location, self.line, self.column)
+
+
+def deepcopy_with_locators(value):
+    """
+    Like :code:`deepcopy`, but also copies over locators.
+    """
+
+    res = deepcopy(value)
+    copy_locators(res, value)
+    return res
+
+
+def copy_locators(target, source):
+    """
+    Copies over :code:`_locator` for all elements, recursively.
+
+    Assumes that target and source have exactly the same list/dict structure.
+    """
+
+    locator = getattr(source, '_locator', None)
+    if locator is not None:
+        try:
+            setattr(target, '_locator', locator)
+        except AttributeError:
+            pass
+
+    if isinstance(target, list) and isinstance(source, list):
+        for i, _ in enumerate(target):
+            copy_locators(target[i], source[i])
+    elif isinstance(target, dict) and isinstance(source, dict):
+        for k, v in target.items():
+            copy_locators(v, source[k])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/__init__.py
----------------------------------------------------------------------
diff --git a/aria/storage/__init__.py b/aria/storage/__init__.py
index 45af1be..bd7c8c1 100644
--- a/aria/storage/__init__.py
+++ b/aria/storage/__init__.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 """
-Aria's storage Sub-Package
+ARIA's storage Sub-Package
 Path: aria.storage
 
 Storage package is a generic abstraction over different storage types.
@@ -26,15 +26,15 @@ We define this abstraction with the following components:
 4. field: defines a field/item in the model.
 
 API:
-    * application_storage_factory - function, default Aria storage factory.
+    * application_storage_factory - function, default ARIA storage factory.
     * Storage - class, simple storage mapi.
-    * models - module, default Aria standard models.
-    * structures - module, default Aria structures - holds the base model,
+    * models - module, default ARIA standard models.
+    * structures - module, default ARIA structures - holds the base model,
                    and different fields types.
     * Model - class, abstract model implementation.
     * Field - class, base field implementation.
     * IterField - class, base iterable field implementation.
-    * drivers - module, a pool of Aria standard drivers.
+    * drivers - module, a pool of ARIA standard drivers.
     * StorageDriver - class, abstract model implementation.
 """
 from .core import (

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index 883f708..8302fc9 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 """
-Aria's storage Sub-Package
+ARIA's storage Sub-Package
 Path: aria.storage
 
 Storage package is a generic abstraction over different storage types.
@@ -26,15 +26,15 @@ We define this abstraction with the following components:
 4. field: defines a field/item in the model.
 
 API:
-    * application_storage_factory - function, default Aria storage factory.
+    * application_storage_factory - function, default ARIA storage factory.
     * Storage - class, simple storage mapi.
-    * models - module, default Aria standard models.
-    * structures - module, default Aria structures - holds the base model,
+    * models - module, default ARIA standard models.
+    * structures - module, default ARIA structures - holds the base model,
                    and different fields types.
     * Model - class, abstract model implementation.
     * Field - class, base field implementation.
     * IterField - class, base iterable field implementation.
-    * drivers - module, a pool of Aria standard drivers.
+    * drivers - module, a pool of ARIA standard drivers.
     * StorageDriver - class, abstract model implementation.
 """
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/instrumentation.py
----------------------------------------------------------------------
diff --git a/aria/storage/instrumentation.py b/aria/storage/instrumentation.py
index 8fb9d82..fb95fcf 100644
--- a/aria/storage/instrumentation.py
+++ b/aria/storage/instrumentation.py
@@ -16,17 +16,16 @@
 import copy
 import json
 
-import sqlalchemy
 import sqlalchemy.event
 
-from . import exceptions
+from ..modeling import models as _models
+from ..storage.exceptions import StorageError
 
-from .modeling import model as _model
 
 _VERSION_ID_COL = 'version'
 _STUB = object()
 _INSTRUMENTED = {
-    _model.Node.runtime_properties: dict
+    _models.Node.runtime_properties: dict
 }
 
 
@@ -207,7 +206,7 @@ def _validate_version_id(instance, mapi):
     if version_id and getattr(instance, _VERSION_ID_COL) != version_id:
         object_version_id = getattr(instance, _VERSION_ID_COL)
         mapi._session.rollback()
-        raise exceptions.StorageError(
+        raise StorageError(
             'Version conflict: committed and object {0} differ '
             '[committed {0}={1}, object {0}={2}]'
             .format(_VERSION_ID_COL,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/__init__.py b/aria/storage/modeling/__init__.py
deleted file mode 100644
index 697ed09..0000000
--- a/aria/storage/modeling/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from collections import namedtuple
-
-from . import (
-    model,
-    instance_elements as _instance_base,
-    orchestrator_elements as _orchestrator_base,
-    template_elements as _template_base,
-)
-
-_ModelBaseCls = namedtuple('ModelBase', 'instance_elements,'
-                                        'orchestrator_elements,'
-                                        'template_elements')
-model_base = _ModelBaseCls(instance_elements=_instance_base,
-                           orchestrator_elements=_orchestrator_base,
-                           template_elements=_template_base)
-
-__all__ = (
-    'model',
-    'model_base',
-)


[06/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/elements.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/elements.py b/aria/storage/modeling/elements.py
deleted file mode 100644
index 8c720b9..0000000
--- a/aria/storage/modeling/elements.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from sqlalchemy import (
-    Column,
-    Text
-)
-
-from ...parser.modeling import utils
-from ...utils.collections import OrderedDict
-from ...utils.console import puts
-from .. import exceptions
-
-from . import structure
-from . import type
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-
-
-class ParameterBase(structure.ModelMixin):
-    """
-    Represents a typed value.
-
-    This class is used by both service model and service instance elements.
-    """
-    __tablename__ = 'parameter'
-    name = Column(Text, nullable=False)
-    type = Column(Text, nullable=False)
-
-    # Check: value type
-    str_value = Column(Text)
-    description = Column(Text)
-
-    @property
-    def as_raw(self):
-        return OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type),
-            ('value', self.value),
-            ('description', self.description)))
-
-    @property
-    def value(self):
-        if self.type is None:
-            return
-        try:
-            if self.type.lower() in ['str', 'unicode']:
-                return self.str_value.decode('utf-8')
-            elif self.type.lower() == 'int':
-                return int(self.str_value)
-            elif self.type.lower() == 'bool':
-                return bool(self.str_value)
-            elif self.type.lower() == 'float':
-                return float(self.str_value)
-            else:
-                raise exceptions.StorageError('No supported type_name was provided')
-        except ValueError:
-            raise exceptions.StorageError('Trying to cast {0} to {1} failed'.format(self.str_value,
-                                                                                    self.type))
-
-    def instantiate(self, context, container):
-        return ParameterBase(self.type, self.str_value, self.description)
-
-    def coerce_values(self, context, container, report_issues):
-        if self.str_value is not None:
-            self.str_value = utils.coerce_value(context, container, self.str_value, report_issues)
-
-
-class MetadataBase(structure.ModelMixin):
-    """
-    Custom values associated with the deployment template and its plans.
-
-    This class is used by both service model and service instance elements.
-
-    Properties:
-
-    * :code:`values`: Dict of custom values
-    """
-    values = Column(type.StrictDict(key_cls=basestring))
-
-    @property
-    def as_raw(self):
-        return self.values
-
-    def instantiate(self, context, container):
-        metadata = MetadataBase()
-        metadata.values.update(self.values)
-        return metadata
-
-    def dump(self, context):
-        puts('Metadata:')
-        with context.style.indent:
-            for name, value in self.values.iteritems():
-                puts('%s: %s' % (name, context.style.meta(value)))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/instance_elements.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/instance_elements.py b/aria/storage/modeling/instance_elements.py
deleted file mode 100644
index 86e2ea6..0000000
--- a/aria/storage/modeling/instance_elements.py
+++ /dev/null
@@ -1,1288 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from sqlalchemy import (
-    Column,
-    Text,
-    Integer,
-    Boolean,
-)
-from sqlalchemy import DateTime
-from sqlalchemy.ext.associationproxy import association_proxy
-from sqlalchemy.ext.declarative import declared_attr
-from sqlalchemy.ext.orderinglist import ordering_list
-
-from aria.parser import validation
-from aria.utils import collections, formatting, console
-
-from . import (
-    utils,
-    structure,
-    type as aria_types
-)
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-
-# region Element instances
-
-
-class ServiceInstanceBase(structure.ModelMixin):
-    __tablename__ = 'service_instance'
-
-    __private_fields__ = ['substituion_fk',
-                          'service_template_fk']
-
-    description = Column(Text)
-    _metadata = Column(Text)
-
-    # region orchestrator required columns
-
-    created_at = Column(DateTime, nullable=False, index=True)
-    permalink = Column(Text)
-    policy_triggers = Column(aria_types.Dict)
-    policy_types = Column(aria_types.Dict)
-    scaling_groups = Column(aria_types.Dict)
-    updated_at = Column(DateTime)
-    workflows = Column(aria_types.Dict)
-
-    @declared_attr
-    def service_template_name(cls):
-        return association_proxy('service_template', 'name')
-
-    # endregion
-
-    # region foreign keys
-    @declared_attr
-    def substitution_fk(cls):
-        return cls.foreign_key('substitution', nullable=True)
-
-    @declared_attr
-    def service_template_fk(cls):
-        return cls.foreign_key('service_template')
-
-    # endregion
-
-    # region one-to-one relationships
-    @declared_attr
-    def substitution(cls):
-        return cls.one_to_one_relationship('substitution')
-    # endregion
-
-    # region many-to-one relationships
-    @declared_attr
-    def service_template(cls):
-        return cls.many_to_one_relationship('service_template')
-
-    # endregion
-
-    # region many-to-many relationships
-    @declared_attr
-    def inputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='inputs')
-
-    @declared_attr
-    def outputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='outputs')
-
-    # endregion
-
-    # association proxies
-
-    def satisfy_requirements(self, context):
-        satisfied = True
-        for node in self.nodes.all():
-            if not node.satisfy_requirements(context):
-                satisfied = False
-        return satisfied
-
-    def validate_capabilities(self, context):
-        satisfied = True
-        for node in self.nodes.all():
-            if not node.validate_capabilities(context):
-                satisfied = False
-        return satisfied
-
-    def find_nodes(self, node_template_name):
-        nodes = []
-        for node in self.nodes.all():
-            if node.template_name == node_template_name:
-                nodes.append(node)
-        return collections.FrozenList(nodes)
-
-    def get_node_ids(self, node_template_name):
-        return collections.FrozenList((node.id for node in self.find_nodes(node_template_name)))
-
-    def find_groups(self, group_template_name):
-        groups = []
-        for group in self.groups.all():
-            if group.template_name == group_template_name:
-                groups.append(group)
-        return collections.FrozenList(groups)
-
-    def get_group_ids(self, group_template_name):
-        return collections.FrozenList((group.id for group in self.find_groups(group_template_name)))
-
-    def is_node_a_target(self, context, target_node):
-        for node in self.nodes.all():
-            if self._is_node_a_target(context, node, target_node):
-                return True
-        return False
-
-    def _is_node_a_target(self, context, source_node, target_node):
-        if source_node.relationships:
-            for relationship in source_node.relationships:
-                if relationship.target_node_id == target_node.id:
-                    return True
-                else:
-                    node = context.modeling.instance.nodes.get(relationship.target_node_id)
-                    if node is not None:
-                        if self._is_node_a_target(context, node, target_node):
-                            return True
-        return False
-
-
-class OperationBase(structure.ModelMixin):
-    """
-    An operation in a :class:`Interface`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`dependencies`: List of strings (interpreted by the orchestrator)
-    * :code:`executor`: Executor string (interpreted by the orchestrator)
-    * :code:`max_retries`: Maximum number of retries allowed in case of failure
-    * :code:`retry_interval`: Interval between retries
-    * :code:`inputs`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'operation'
-
-    __private_fields__ = ['service_template_fk',
-                          'interface_instance_fk']
-
-    # region foreign_keys
-
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance', nullable=True)
-
-    @declared_attr
-    def interface_instance_fk(cls):
-        return cls.foreign_key('interface', nullable=True)
-
-    # endregion
-    description = Column(Text)
-    implementation = Column(Text)
-    dependencies = Column(aria_types.StrictList(item_cls=basestring))
-
-    executor = Column(Text)
-    max_retries = Column(Integer, default=None)
-    retry_interval = Column(Integer, default=None)
-    plugin = Column(Text)
-    operation = Column(Boolean)
-
-    # region many-to-one relationships
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance')
-
-    @declared_attr
-    def interface(cls):
-        return cls.many_to_one_relationship('interface')
-    # region many-to-many relationships
-
-    @declared_attr
-    def inputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='inputs')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_retries', self.max_retries),
-            ('retry_interval', self.retry_interval),
-            ('inputs', formatting.as_raw_dict(self.inputs))))
-
-    def validate(self, context):
-        utils.validate_dict_values(context, self.inputs)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.inputs, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            if self.implementation is not None:
-                console.puts('Implementation: %s' % context.style.literal(self.implementation))
-            if self.dependencies:
-                console.puts(
-                    'Dependencies: %s'
-                    % ', '.join((str(context.style.literal(v)) for v in self.dependencies)))
-            if self.executor is not None:
-                console.puts('Executor: %s' % context.style.literal(self.executor))
-            if self.max_retries is not None:
-                console.puts('Max retries: %s' % context.style.literal(self.max_retries))
-            if self.retry_interval is not None:
-                console.puts('Retry interval: %s' % context.style.literal(self.retry_interval))
-            utils.dump_parameters(context, self.inputs, 'Inputs')
-
-
-class InterfaceBase(structure.ModelMixin):
-    """
-    A typed set of :class:`Operation`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`inputs`: Dict of :class:`Parameter`
-    * :code:`operations`: Dict of :class:`Operation`
-    """
-    __tablename__ = 'interface'
-
-    __private_fields__ = ['group_fk',
-                          'node_fk',
-                          'relationship_fk']
-
-
-    # region foreign_keys
-    @declared_attr
-    def group_fk(cls):
-        return cls.foreign_key('group', nullable=True)
-
-    @declared_attr
-    def node_fk(cls):
-        return cls.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def relationship_fk(cls):
-        return cls.foreign_key('relationship', nullable=True)
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    edge = Column(Text)
-
-    # region many-to-one relationships
-
-    @declared_attr
-    def node(cls):
-        return cls.many_to_one_relationship('node')
-
-    @declared_attr
-    def relationship(cls):
-        return cls.many_to_one_relationship('relationship')
-
-    @declared_attr
-    def group(cls):
-        return cls.many_to_one_relationship('group')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def inputs(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='inputs')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('inputs', formatting.as_raw_dict(self.inputs)),
-            ('operations', formatting.as_raw_list(self.operations))))
-
-    def validate(self, context):
-        if self.type_name:
-            if context.modeling.interface_types.get_descendant(self.type_name) is None:
-                context.validation.report('interface "%s" has an unknown type: %s'
-                                          % (self.name,
-                                             formatting.safe_repr(self.type_name)),
-                                          level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.inputs)
-        utils.validate_dict_values(context, self.operations)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.inputs, report_issues)
-        utils.coerce_dict_values(context, container, self.operations, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Interface type: %s' % context.style.type(self.type_name))
-            utils.dump_parameters(context, self.inputs, 'Inputs')
-            utils.dump_dict_values(context, self.operations, 'Operations')
-
-
-class CapabilityBase(structure.ModelMixin):
-    """
-    A capability of a :class:`Node`.
-
-    An instance of a :class:`CapabilityTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`min_occurrences`: Minimum number of requirement matches required
-    * :code:`max_occurrences`: Maximum number of requirement matches allowed
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'capability'
-
-    __private_fields__ = ['node_fk']
-
-    # region foreign_keys
-    @declared_attr
-    def node_fk(cls):
-        return cls.foreign_key('node')
-
-    # endregion
-    type_name = Column(Text)
-
-    min_occurrences = Column(Integer, default=None) # optional
-    max_occurrences = Column(Integer, default=None) # optional
-    occurrences = Column(Integer, default=0)
-
-    # region many-to-one relationships
-    @declared_attr
-    def node(cls):
-        return cls.many_to_one_relationship('node')
-
-    # endregion
-
-
-    # region many-to-many relationships
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def has_enough_relationships(self):
-        if self.min_occurrences is not None:
-            return self.occurrences >= self.min_occurrences
-        return True
-
-    def relate(self):
-        if self.max_occurrences is not None:
-            if self.occurrences == self.max_occurrences:
-                return False
-        self.occurrences += 1
-        return True
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type_name),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def validate(self, context):
-        if context.modeling.capability_types.get_descendant(self.type_name) is None:
-            context.validation.report('capability "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        with context.style.indent:
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            console.puts('Occurrences: %s (%s%s)'
-                         % (self.occurrences,
-                            self.min_occurrences or 0,
-                            (' to %d' % self.max_occurrences)
-                            if self.max_occurrences is not None
-                            else ' or more'))
-            utils.dump_parameters(context, self.properties)
-
-
-class ArtifactBase(structure.ModelMixin):
-    """
-    A file associated with a :class:`Node`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`source_path`: Source path (CSAR or repository)
-    * :code:`target_path`: Path at destination machine
-    * :code:`repository_url`: Repository URL
-    * :code:`repository_credential`: Dict of string
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'artifact'
-
-    __private_fields__ = ['node_fk']
-
-    # region foreign_keys
-
-    @declared_attr
-    def node_fk(cls):
-        return cls.foreign_key('node')
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-    source_path = Column(Text)
-    target_path = Column(Text)
-    repository_url = Column(Text)
-    repository_credential = Column(aria_types.StrictDict(basestring, basestring))
-
-    # region many-to-one relationships
-    @declared_attr
-    def node(cls):
-        return cls.many_to_one_relationship('node')
-
-    # endregion
-
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('source_path', self.source_path),
-            ('target_path', self.target_path),
-            ('repository_url', self.repository_url),
-            ('repository_credential', formatting.as_agnostic(self.repository_credential)),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def validate(self, context):
-        if context.modeling.artifact_types.get_descendant(self.type_name) is None:
-            context.validation.report('artifact "%s" has an unknown type: %s'
-                                      % (self.name,
-                                         formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Artifact type: %s' % context.style.type(self.type_name))
-            console.puts('Source path: %s' % context.style.literal(self.source_path))
-            if self.target_path is not None:
-                console.puts('Target path: %s' % context.style.literal(self.target_path))
-            if self.repository_url is not None:
-                console.puts('Repository URL: %s' % context.style.literal(self.repository_url))
-            if self.repository_credential:
-                console.puts('Repository credential: %s'
-                             % context.style.literal(self.repository_credential))
-            utils.dump_parameters(context, self.properties)
-
-
-class PolicyBase(structure.ModelMixin):
-    """
-    An instance of a :class:`PolicyTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`target_node_ids`: Must be represented in the :class:`ServiceInstance`
-    * :code:`target_group_ids`: Must be represented in the :class:`ServiceInstance`
-    """
-    __tablename__ = 'policy'
-
-    __private_fields__ = ['service_instance_fk']
-
-    # region foreign_keys
-
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance')
-
-    # endregion
-    type_name = Column(Text)
-    target_node_ids = Column(aria_types.StrictList(basestring))
-    target_group_ids = Column(aria_types.StrictList(basestring))
-
-    # region many-to-one relationships
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance')
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('type_name', self.type_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('target_node_ids', self.target_node_ids),
-            ('target_group_ids', self.target_group_ids)))
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report('policy "%s" has an unknown type: %s'
-                                      % (self.name, utils.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts('Policy: %s' % context.style.node(self.name))
-        with context.style.indent:
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            utils.dump_parameters(context, self.properties)
-            if self.target_node_ids:
-                console.puts('Target nodes:')
-                with context.style.indent:
-                    for node_id in self.target_node_ids:
-                        console.puts(context.style.node(node_id))
-            if self.target_group_ids:
-                console.puts('Target groups:')
-                with context.style.indent:
-                    for group_id in self.target_group_ids:
-                        console.puts(context.style.node(group_id))
-
-
-class GroupPolicyBase(structure.ModelMixin):
-    """
-    Policies applied to groups.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`triggers`: Dict of :class:`GroupPolicyTrigger`
-    """
-    __tablename__ = 'group_policy'
-
-    __private_fields__ = ['group_fk']
-
-    # region foreign_keys
-
-    @declared_attr
-    def group_fk(cls):
-        return cls.foreign_key('group')
-
-    # endregion
-
-    description = Column(Text)
-    type_name = Column(Text)
-
-    # region many-to-one relationships
-    @declared_attr
-    def group(cls):
-        return cls.many_to_one_relationship('group')
-
-    # end region
-
-    # region many-to-many relationships
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('type_name', self.type_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('triggers', formatting.as_raw_list(self.triggers))))
-
-    def validate(self, context):
-        if context.modeling.policy_types.get_descendant(self.type_name) is None:
-            context.validation.report(
-                'group policy "%s" has an unknown type: %s'
-                % (self.name,
-                   formatting.safe_repr(self.type_name)),
-                level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.triggers)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-        utils.coerce_dict_values(context, container, self.triggers, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Group policy type: %s' % context.style.type(self.type_name))
-            utils.dump_parameters(context, self.properties)
-            utils.dump_dict_values(context, self.triggers, 'Triggers')
-
-
-class GroupPolicyTriggerBase(structure.ModelMixin):
-    """
-    Triggers for :class:`GroupPolicy`.
-
-    Properties:
-
-    * :code:`name`: Name
-    * :code:`description`: Description
-    * :code:`implementation`: Implementation string (interpreted by the orchestrator)
-    * :code:`properties`: Dict of :class:`Parameter`
-    """
-    __tablename__ = 'group_policy_trigger'
-
-    __private_fields__ = ['group_policy_fk']
-
-    # region foreign keys
-
-    @declared_attr
-    def group_policy_fk(cls):
-        return cls.foreign_key('group_policy')
-
-    # endregion
-
-    description = Column(Text)
-    implementation = Column(Text)
-
-    # region many-to-one relationships
-
-    @declared_attr
-    def group_policy(cls):
-        return cls.many_to_one_relationship('group_policy')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('description', self.description),
-            ('implementation', self.implementation),
-            ('properties', formatting.as_raw_dict(self.properties))))
-
-    def validate(self, context):
-        utils.validate_dict_values(context, self.properties)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-
-    def dump(self, context):
-        console.puts(context.style.node(self.name))
-        if self.description:
-            console.puts(context.style.meta(self.description))
-        with context.style.indent:
-            console.puts('Implementation: %s' % context.style.literal(self.implementation))
-            utils.dump_parameters(context, self.properties)
-
-
-class MappingBase(structure.ModelMixin):
-    """
-    An instance of a :class:`MappingTemplate`.
-
-    Properties:
-
-    * :code:`mapped_name`: Exposed capability or requirement name
-    * :code:`node_id`: Must be represented in the :class:`ServiceInstance`
-    * :code:`name`: Name of capability or requirement at the node
-    """
-    __tablename__ = 'mapping'
-
-    mapped_name = Column(Text)
-    node_id = Column(Text)
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('mapped_name', self.mapped_name),
-            ('node_id', self.node_id),
-            ('name', self.name)))
-
-    def dump(self, context):
-        console.puts('%s -> %s.%s'
-                     % (context.style.node(self.mapped_name),
-                        context.style.node(self.node_id),
-                        context.style.node(self.name)))
-
-
-class SubstitutionBase(structure.ModelMixin):
-    """
-    An instance of a :class:`SubstitutionTemplate`.
-
-    Properties:
-
-    * :code:`node_type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`capabilities`: Dict of :class:`Mapping`
-    * :code:`requirements`: Dict of :class:`Mapping`
-    """
-    __tablename__ = 'substitution'
-
-    node_type_name = Column(Text)
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def capabilities(cls):
-        return cls.many_to_many_relationship('mapping', table_prefix='capabilities')
-
-    @declared_attr
-    def requirements(cls):
-        return cls.many_to_many_relationship('mapping',
-                                             table_prefix='requirements',
-                                             relationship_kwargs=dict(lazy='dynamic'))
-
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('node_type_name', self.node_type_name),
-            ('capabilities', formatting.as_raw_list(self.capabilities)),
-            ('requirements', formatting.as_raw_list(self.requirements))))
-
-    def validate(self, context):
-        if context.modeling.node_types.get_descendant(self.node_type_name) is None:
-            context.validation.report('substitution "%s" has an unknown type: %s'
-                                      % (self.name,  # pylint: disable=no-member
-                                         # TODO fix self.name reference
-                                         formatting.safe_repr(self.node_type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.capabilities)
-        utils.validate_dict_values(context, self.requirements)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.capabilities, report_issues)
-        utils.coerce_dict_values(context, container, self.requirements, report_issues)
-
-    def dump(self, context):
-        console.puts('Substitution:')
-        with context.style.indent:
-            console.puts('Node type: %s' % context.style.type(self.node_type_name))
-            utils.dump_dict_values(context, self.capabilities, 'Capability mappings')
-            utils.dump_dict_values(context, self.requirements, 'Requirement mappings')
-
-
-# endregion
-
-# region Node instances
-
-class NodeBase(structure.ModelMixin):
-    """
-    An instance of a :class:`NodeTemplate`.
-
-    Nodes may have zero or more :class:`Relationship` instances to other nodes.
-
-    Properties:
-
-    * :code:`id`: Unique ID (prefixed with the template name)
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interfaces`: Dict of :class:`Interface`
-    * :code:`artifacts`: Dict of :class:`Artifact`
-    * :code:`capabilities`: Dict of :class:`CapabilityTemplate`
-    * :code:`relationships`: List of :class:`Relationship`
-    """
-    __tablename__ = 'node'
-    version = Column(Integer, nullable=False)
-    __mapper_args__ = {'version_id_col': version}
-
-    __private_fields__ = ['service_instance_fk',
-                          'host_fk',
-                          'node_template_fk']
-
-    # region foreign_keys
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance')
-
-    @declared_attr
-    def host_fk(cls):
-        return cls.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def node_template_fk(cls):
-        return cls.foreign_key('node_template')
-
-    # endregion
-
-    type_name = Column(Text)
-    template_name = Column(Text)
-
-    # region orchestrator required columns
-    runtime_properties = Column(aria_types.Dict)
-    scaling_groups = Column(aria_types.List)
-    state = Column(Text, nullable=False)
-
-    @declared_attr
-    def plugins(cls):
-        return association_proxy('node_template', 'plugins')
-
-    @declared_attr
-    def host(cls):
-        return cls.relationship_to_self('host_fk')
-
-    @declared_attr
-    def service_instance_name(cls):
-        return association_proxy('service_instance', 'name')
-
-    @property
-    def ip(self):
-        if not self.host_fk:
-            return None
-        host_node = self.host
-        if 'ip' in host_node.runtime_properties:  # pylint: disable=no-member
-            return host_node.runtime_properties['ip']  # pylint: disable=no-member
-        host_node = host_node.node_template  # pylint: disable=no-member
-        host_ip_property = [prop for prop in host_node.properties if prop.name == 'ip']
-        if host_ip_property:
-            return host_ip_property[0].value
-        return None
-
-    @declared_attr
-    def node_template(cls):
-        return cls.many_to_one_relationship('node_template')
-
-    @declared_attr
-    def service_template(cls):
-        return association_proxy('service_instance', 'service_template')
-    # endregion
-
-    # region many-to-one relationships
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance')
-
-    # endregion
-
-    # region many-to-many relationships
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    def satisfy_requirements(self, context):
-        node_template = context.modeling.model.node_templates.get(self.template_name)
-        satisfied = True
-        for i in range(len(node_template.requirement_templates)):
-            requirement_template = node_template.requirement_templates[i]
-
-            # Find target template
-            target_node_template, target_node_capability = \
-                requirement_template.find_target(context, node_template)
-            if target_node_template is not None:
-                satisfied = self._satisfy_capability(context,
-                                                     target_node_capability,
-                                                     target_node_template,
-                                                     requirement_template,
-                                                     requirement_template_index=i)
-            else:
-                context.validation.report('requirement "%s" of node "%s" has no target node '
-                                          'template' % (requirement_template.name,
-                                                        self.id),
-                                          level=validation.Issue.BETWEEN_INSTANCES)
-                satisfied = False
-        return satisfied
-
-    def _satisfy_capability(self, context, target_node_capability, target_node_template,
-                            requirement_template, requirement_template_index):
-        # Find target nodes
-        target_nodes = context.modeling.instance.find_nodes(target_node_template.name)
-        if target_nodes:
-            target_node = None
-            target_capability = None
-
-            if target_node_capability is not None:
-                # Relate to the first target node that has capacity
-                for node in target_nodes:
-                    target_capability = node.capabilities.get(target_node_capability.name)
-                    if target_capability.relate():
-                        target_node = node
-                        break
-            else:
-                # Use first target node
-                target_node = target_nodes[0]
-
-            if target_node is not None:
-                relationship = RelationshipBase(
-                    name=requirement_template.name,
-                    source_requirement_index=requirement_template_index,
-                    target_node_id=target_node.id,
-                    target_capability_name=target_capability.name
-                )
-                self.relationships.append(relationship)
-            else:
-                context.validation.report('requirement "%s" of node "%s" targets node '
-                                          'template "%s" but its instantiated nodes do not '
-                                          'have enough capacity'
-                                          % (requirement_template.name,
-                                             self.id,
-                                             target_node_template.name),
-                                          level=validation.Issue.BETWEEN_INSTANCES)
-                return False
-        else:
-            context.validation.report('requirement "%s" of node "%s" targets node template '
-                                      '"%s" but it has no instantiated nodes'
-                                      % (requirement_template.name,
-                                         self.id,
-                                         target_node_template.name),
-                                      level=validation.Issue.BETWEEN_INSTANCES)
-            return False
-
-    def validate_capabilities(self, context):
-        satisfied = False
-        for capability in self.capabilities.itervalues():
-            if not capability.has_enough_relationships:
-                context.validation.report('capability "%s" of node "%s" requires at least %d '
-                                          'relationships but has %d'
-                                          % (capability.name,
-                                             self.id,
-                                             capability.min_occurrences,
-                                             capability.occurrences),
-                                          level=validation.Issue.BETWEEN_INSTANCES)
-                satisfied = False
-        return satisfied
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('id', self.id),
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interfaces', formatting.as_raw_list(self.interfaces)),
-            ('artifacts', formatting.as_raw_list(self.artifacts)),
-            ('capabilities', formatting.as_raw_list(self.capabilities)),
-            ('relationships', formatting.as_raw_list(self.relationships))))
-
-    def validate(self, context):
-        if len(self.id) > context.modeling.id_max_length:
-            context.validation.report('"%s" has an ID longer than the limit of %d characters: %d'
-                                      % (self.id,
-                                         context.modeling.id_max_length,
-                                         len(self.id)),
-                                      level=validation.Issue.BETWEEN_INSTANCES)
-
-        # TODO: validate that node template is of type?
-
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.interfaces)
-        utils.validate_dict_values(context, self.artifacts)
-        utils.validate_dict_values(context, self.capabilities)
-        utils.validate_list_values(context, self.relationships)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, self, self.properties, report_issues)
-        utils.coerce_dict_values(context, self, self.interfaces, report_issues)
-        utils.coerce_dict_values(context, self, self.artifacts, report_issues)
-        utils.coerce_dict_values(context, self, self.capabilities, report_issues)
-        utils.coerce_list_values(context, self, self.relationships, report_issues)
-
-    def dump(self, context):
-        console.puts('Node: %s' % context.style.node(self.id))
-        with context.style.indent:
-            console.puts('Template: %s' % context.style.node(self.template_name))
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            utils.dump_parameters(context, self.properties)
-            utils.dump_interfaces(context, self.interfaces)
-            utils.dump_dict_values(context, self.artifacts, 'Artifacts')
-            utils.dump_dict_values(context, self.capabilities, 'Capabilities')
-            utils.dump_list_values(context, self.relationships, 'Relationships')
-
-
-class GroupBase(structure.ModelMixin):
-    """
-    An instance of a :class:`GroupTemplate`.
-
-    Properties:
-
-    * :code:`id`: Unique ID (prefixed with the template name)
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`interfaces`: Dict of :class:`Interface`
-    * :code:`policies`: Dict of :class:`GroupPolicy`
-    * :code:`member_node_ids`: Must be represented in the :class:`ServiceInstance`
-    * :code:`member_group_ids`: Must be represented in the :class:`ServiceInstance`
-    """
-    __tablename__ = 'group'
-
-    __private_fields__ = ['service_instance_fk']
-
-    # region foreign_keys
-
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance')
-
-    # endregion
-
-    type_name = Column(Text)
-    template_name = Column(Text)
-    member_node_ids = Column(aria_types.StrictList(basestring))
-    member_group_ids = Column(aria_types.StrictList(basestring))
-
-    # region many-to-one relationships
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance')
-
-    # region many-to-many relationships
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('id', self.id),
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('interfaces', formatting.as_raw_list(self.interfaces)),
-            ('policies', formatting.as_raw_list(self.policies)),
-            ('member_node_ids', self.member_node_ids),
-            ('member_group_ids', self.member_group_ids)))
-
-    def validate(self, context):
-        if context.modeling.group_types.get_descendant(self.type_name) is None:
-            context.validation.report('group "%s" has an unknown type: %s'
-                                      % (self.name,  # pylint: disable=no-member
-                                         # TODO fix self.name reference
-                                         formatting.safe_repr(self.type_name)),
-                                      level=validation.Issue.BETWEEN_TYPES)
-
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.interfaces)
-        utils.validate_dict_values(context, self.policies)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-        utils.coerce_dict_values(context, container, self.interfaces, report_issues)
-        utils.coerce_dict_values(context, container, self.policies, report_issues)
-
-    def dump(self, context):
-        console.puts('Group: %s' % context.style.node(self.id))
-        with context.style.indent:
-            console.puts('Type: %s' % context.style.type(self.type_name))
-            console.puts('Template: %s' % context.style.type(self.template_name))
-            utils.dump_parameters(context, self.properties)
-            utils.dump_interfaces(context, self.interfaces)
-            utils.dump_dict_values(context, self.policies, 'Policies')
-            if self.member_node_ids:
-                console.puts('Member nodes:')
-                with context.style.indent:
-                    for node_id in self.member_node_ids:
-                        console.puts(context.style.node(node_id))
-
-# endregion
-
-# region Relationship instances
-
-
-class RelationshipBase(structure.ModelMixin):
-    """
-    Connects :class:`Node` to another node.
-
-    An instance of a :class:`RelationshipTemplate`.
-
-    Properties:
-
-    * :code:`name`: Name (usually the name of the requirement at the source node template)
-    * :code:`source_requirement_index`: Must be represented in the source node template
-    * :code:`target_node_id`: Must be represented in the :class:`ServiceInstance`
-    * :code:`target_capability_name`: Matches the capability at the target node
-    * :code:`type_name`: Must be represented in the :class:`ModelingContext`
-    * :code:`template_name`: Must be represented in the :class:`ServiceModel`
-    * :code:`properties`: Dict of :class:`Parameter`
-    * :code:`source_interfaces`: Dict of :class:`Interface`
-    * :code:`target_interfaces`: Dict of :class:`Interface`
-    """
-    __tablename__ = 'relationship'
-
-    __private_fields__ = ['source_node_fk',
-                          'target_node_fk']
-
-    source_requirement_index = Column(Integer)
-    target_node_id = Column(Text)
-    target_capability_name = Column(Text)
-    type_name = Column(Text)
-    template_name = Column(Text)
-    type_hierarchy = Column(aria_types.List)
-
-    # region orchestrator required columns
-    source_position = Column(Integer)
-    target_position = Column(Integer)
-
-    @declared_attr
-    def source_node_fk(cls):
-        return cls.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def source_node(cls):
-        return cls.many_to_one_relationship(
-            'node',
-            'source_node_fk',
-            backreference='outbound_relationships',
-            backref_kwargs=dict(
-                order_by=cls.source_position,
-                collection_class=ordering_list('source_position', count_from=0),
-            )
-        )
-
-    @declared_attr
-    def source_node_name(cls):
-        return association_proxy('source_node', cls.name_column_name())
-
-    @declared_attr
-    def target_node_fk(cls):
-        return cls.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def target_node(cls):
-        return cls.many_to_one_relationship(
-            'node',
-            'target_node_fk',
-            backreference='inbound_relationships',
-            backref_kwargs=dict(
-                order_by=cls.target_position,
-                collection_class=ordering_list('target_position', count_from=0),
-            )
-        )
-
-    @declared_attr
-    def target_node_name(cls):
-        return association_proxy('target_node', cls.name_column_name())
-    # endregion
-
-    # region many-to-many relationship
-
-    @declared_attr
-    def properties(cls):
-        return cls.many_to_many_relationship('parameter', table_prefix='properties')
-
-    # endregion
-
-    @property
-    def as_raw(self):
-        return collections.OrderedDict((
-            ('name', self.name),
-            ('source_requirement_index', self.source_requirement_index),
-            ('target_node_id', self.target_node_id),
-            ('target_capability_name', self.target_capability_name),
-            ('type_name', self.type_name),
-            ('template_name', self.template_name),
-            ('properties', formatting.as_raw_dict(self.properties)),
-            ('source_interfaces', formatting.as_raw_list(self.source_interfaces)),
-            ('target_interfaces', formatting.as_raw_list(self.target_interfaces))))
-
-    def validate(self, context):
-        if self.type_name:
-            if context.modeling.relationship_types.get_descendant(self.type_name) is None:
-                context.validation.report('relationship "%s" has an unknown type: %s'
-                                          % (self.name,
-                                             formatting.safe_repr(self.type_name)),
-                                          level=validation.Issue.BETWEEN_TYPES)
-        utils.validate_dict_values(context, self.properties)
-        utils.validate_dict_values(context, self.source_interfaces)
-        utils.validate_dict_values(context, self.target_interfaces)
-
-    def coerce_values(self, context, container, report_issues):
-        utils.coerce_dict_values(context, container, self.properties, report_issues)
-        utils.coerce_dict_values(context, container, self.source_interfaces, report_issues)
-        utils.coerce_dict_values(context, container, self.target_interfaces, report_issues)
-
-    def dump(self, context):
-        if self.name:
-            if self.source_requirement_index is not None:
-                console.puts('%s (%d) ->' % (
-                    context.style.node(self.name),
-                    self.source_requirement_index))
-            else:
-                console.puts('%s ->' % context.style.node(self.name))
-        else:
-            console.puts('->')
-        with context.style.indent:
-            console.puts('Node: %s' % context.style.node(self.target_node_id))
-            if self.target_capability_name is not None:
-                console.puts('Capability: %s' % context.style.node(self.target_capability_name))
-            if self.type_name is not None:
-                console.puts('Relationship type: %s' % context.style.type(self.type_name))
-            if self.template_name is not None:
-                console.puts('Relationship template: %s' % context.style.node(self.template_name))
-            utils.dump_parameters(context, self.properties)
-            utils.dump_interfaces(context, self.source_interfaces, 'Source interfaces')
-            utils.dump_interfaces(context, self.target_interfaces, 'Target interfaces')
-
-# endregion

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/model.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/model.py b/aria/storage/modeling/model.py
deleted file mode 100644
index cf7d933..0000000
--- a/aria/storage/modeling/model.py
+++ /dev/null
@@ -1,223 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from sqlalchemy.ext.declarative import declarative_base
-
-from . import (
-    template_elements,
-    instance_elements,
-    orchestrator_elements,
-    elements,
-    structure,
-)
-
-__all__ = (
-    'aria_declarative_base',
-
-    'Parameter',
-
-    'MappingTemplate',
-    'InterfaceTemplate',
-    'OperationTemplate',
-    'ServiceTemplate',
-    'NodeTemplate',
-    'GroupTemplate',
-    'ArtifactTemplate',
-    'PolicyTemplate',
-    'GroupPolicyTemplate',
-    'GroupPolicyTriggerTemplate',
-    'RequirementTemplate',
-    'CapabilityTemplate',
-
-    'Mapping',
-    'Substitution',
-    'ServiceInstance',
-    'Node',
-    'Relationship',
-    'Artifact',
-    'Group',
-    'Interface',
-    'Operation',
-    'Capability',
-    'Policy',
-    'GroupPolicy',
-    'GroupPolicyTrigger',
-
-    'Execution',
-    'ServiceInstanceUpdate',
-    'ServiceInstanceUpdateStep',
-    'ServiceInstanceModification',
-    'Plugin',
-    'Task'
-)
-
-aria_declarative_base = declarative_base(cls=structure.ModelIDMixin)
-
-# pylint: disable=abstract-method
-
-# region elements
-
-
-class Parameter(aria_declarative_base, elements.ParameterBase):
-    pass
-
-# endregion
-
-# region template models
-
-
-class MappingTemplate(aria_declarative_base, template_elements.MappingTemplateBase):
-    pass
-
-
-class SubstitutionTemplate(aria_declarative_base, template_elements.SubstitutionTemplateBase):
-    pass
-
-
-class InterfaceTemplate(aria_declarative_base, template_elements.InterfaceTemplateBase):
-    pass
-
-
-class OperationTemplate(aria_declarative_base, template_elements.OperationTemplateBase):
-    pass
-
-
-class ServiceTemplate(aria_declarative_base, template_elements.ServiceTemplateBase):
-    pass
-
-
-class NodeTemplate(aria_declarative_base, template_elements.NodeTemplateBase):
-    pass
-
-
-class GroupTemplate(aria_declarative_base, template_elements.GroupTemplateBase):
-    pass
-
-
-class ArtifactTemplate(aria_declarative_base, template_elements.ArtifactTemplateBase):
-    pass
-
-
-class PolicyTemplate(aria_declarative_base, template_elements.PolicyTemplateBase):
-    pass
-
-
-class GroupPolicyTemplate(aria_declarative_base, template_elements.GroupPolicyTemplateBase):
-    pass
-
-
-class GroupPolicyTriggerTemplate(aria_declarative_base,
-                                 template_elements.GroupPolicyTriggerTemplateBase):
-    pass
-
-
-class RequirementTemplate(aria_declarative_base, template_elements.RequirementTemplateBase):
-    pass
-
-
-class CapabilityTemplate(aria_declarative_base, template_elements.CapabilityTemplateBase):
-    pass
-
-
-# endregion
-
-# region instance models
-
-class Mapping(aria_declarative_base, instance_elements.MappingBase):
-    pass
-
-
-class Substitution(aria_declarative_base, instance_elements.SubstitutionBase):
-    pass
-
-
-class ServiceInstance(aria_declarative_base, instance_elements.ServiceInstanceBase):
-    pass
-
-
-class Node(aria_declarative_base, instance_elements.NodeBase):
-    pass
-
-
-class Relationship(aria_declarative_base, instance_elements.RelationshipBase):
-    pass
-
-
-class Artifact(aria_declarative_base, instance_elements.ArtifactBase):
-    pass
-
-
-class Group(aria_declarative_base, instance_elements.GroupBase):
-    pass
-
-
-class Interface(aria_declarative_base, instance_elements.InterfaceBase):
-    pass
-
-
-class Operation(aria_declarative_base, instance_elements.OperationBase):
-    pass
-
-
-class Capability(aria_declarative_base, instance_elements.CapabilityBase):
-    pass
-
-
-class Policy(aria_declarative_base, instance_elements.PolicyBase):
-    pass
-
-
-class GroupPolicy(aria_declarative_base, instance_elements.GroupPolicyBase):
-    pass
-
-
-class GroupPolicyTrigger(aria_declarative_base, instance_elements.GroupPolicyTriggerBase):
-    pass
-
-
-# endregion
-
-# region orchestrator models
-
-class Execution(aria_declarative_base, orchestrator_elements.Execution):
-    pass
-
-
-class ServiceInstanceUpdate(aria_declarative_base,
-                            orchestrator_elements.ServiceInstanceUpdateBase):
-    pass
-
-
-class ServiceInstanceUpdateStep(aria_declarative_base,
-                                orchestrator_elements.ServiceInstanceUpdateStepBase):
-    pass
-
-
-class ServiceInstanceModification(aria_declarative_base,
-                                  orchestrator_elements.ServiceInstanceModificationBase):
-    pass
-
-
-class Plugin(aria_declarative_base, orchestrator_elements.PluginBase):
-    pass
-
-
-class Task(aria_declarative_base, orchestrator_elements.TaskBase):
-    pass
-
-
-class Log(aria_declarative_base, orchestrator_elements.LogBase):
-    pass
-# endregion

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/orchestrator_elements.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/orchestrator_elements.py b/aria/storage/modeling/orchestrator_elements.py
deleted file mode 100644
index ef773ed..0000000
--- a/aria/storage/modeling/orchestrator_elements.py
+++ /dev/null
@@ -1,497 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Aria's storage.models module
-Path: aria.storage.models
-
-models module holds aria's models.
-
-classes:
-    * Field - represents a single field.
-    * IterField - represents an iterable field.
-    * Model - abstract model implementation.
-    * Snapshot - snapshots implementation model.
-    * Deployment - deployment implementation model.
-    * DeploymentUpdateStep - deployment update step implementation model.
-    * DeploymentUpdate - deployment update implementation model.
-    * DeploymentModification - deployment modification implementation model.
-    * Execution - execution implementation model.
-    * Node - node implementation model.
-    * Relationship - relationship implementation model.
-    * NodeInstance - node instance implementation model.
-    * RelationshipInstance - relationship instance implementation model.
-    * Plugin - plugin implementation model.
-"""
-from collections import namedtuple
-from datetime import datetime
-
-from sqlalchemy import (
-    Column,
-    Integer,
-    Text,
-    DateTime,
-    Boolean,
-    Enum,
-    String,
-    Float,
-    orm,
-)
-from sqlalchemy.ext.associationproxy import association_proxy
-from sqlalchemy.ext.declarative import declared_attr
-
-from aria.orchestrator.exceptions import TaskAbortException, TaskRetryException
-
-from .type import List, Dict
-from .structure import ModelMixin
-
-__all__ = (
-    'ServiceInstanceUpdateStepBase',
-    'ServiceInstanceUpdateBase',
-    'ServiceInstanceModificationBase',
-    'Execution',
-    'PluginBase',
-    'TaskBase'
-)
-
-# pylint: disable=no-self-argument, no-member, abstract-method
-
-
-class Execution(ModelMixin):
-    """
-    Execution model representation.
-    """
-    # Needed only for pylint. the id will be populated by sqlalcehmy and the proper column.
-    __tablename__ = 'execution'
-
-    __private_fields__ = ['service_instance_fk']
-
-    TERMINATED = 'terminated'
-    FAILED = 'failed'
-    CANCELLED = 'cancelled'
-    PENDING = 'pending'
-    STARTED = 'started'
-    CANCELLING = 'cancelling'
-    FORCE_CANCELLING = 'force_cancelling'
-
-    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, FORCE_CANCELLING]
-    END_STATES = [TERMINATED, FAILED, CANCELLED]
-    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
-
-    VALID_TRANSITIONS = {
-        PENDING: [STARTED, CANCELLED],
-        STARTED: END_STATES + [CANCELLING],
-        CANCELLING: END_STATES + [FORCE_CANCELLING]
-    }
-
-    @orm.validates('status')
-    def validate_status(self, key, value):
-        """Validation function that verifies execution status transitions are OK"""
-        try:
-            current_status = getattr(self, key)
-        except AttributeError:
-            return
-        valid_transitions = self.VALID_TRANSITIONS.get(current_status, [])
-        if all([current_status is not None,
-                current_status != value,
-                value not in valid_transitions]):
-            raise ValueError('Cannot change execution status from {current} to {new}'.format(
-                current=current_status,
-                new=value))
-        return value
-
-    created_at = Column(DateTime, index=True)
-    started_at = Column(DateTime, nullable=True, index=True)
-    ended_at = Column(DateTime, nullable=True, index=True)
-    error = Column(Text, nullable=True)
-    is_system_workflow = Column(Boolean, nullable=False, default=False)
-    parameters = Column(Dict)
-    status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
-    workflow_name = Column(Text)
-
-    @declared_attr
-    def service_template(cls):
-        return association_proxy('service_instance', 'service_template')
-
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance')
-
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance')
-
-    @declared_attr
-    def service_instance_name(cls):
-        return association_proxy('service_instance', cls.name_column_name())
-
-    @declared_attr
-    def service_template_name(cls):
-        return association_proxy('service_instance', 'service_template_name')
-
-    def __str__(self):
-        return '<{0} id=`{1}` (status={2})>'.format(
-            self.__class__.__name__,
-            getattr(self, self.name_column_name()),
-            self.status
-        )
-
-
-class ServiceInstanceUpdateBase(ModelMixin):
-    """
-    Deployment update model representation.
-    """
-    # Needed only for pylint. the id will be populated by sqlalcehmy and the proper column.
-    steps = None
-
-    __tablename__ = 'service_instance_update'
-    __private_fields__ = ['service_instance_fk',
-                          'execution_fk']
-
-    _private_fields = ['execution_fk', 'deployment_fk']
-
-    created_at = Column(DateTime, nullable=False, index=True)
-    service_instance_plan = Column(Dict, nullable=False)
-    service_instance_update_node_instances = Column(Dict)
-    service_instance_update_service_instance = Column(Dict)
-    service_instance_update_nodes = Column(List)
-    modified_entity_ids = Column(Dict)
-    state = Column(Text)
-
-    @declared_attr
-    def execution_fk(cls):
-        return cls.foreign_key('execution', nullable=True)
-
-    @declared_attr
-    def execution(cls):
-        return cls.many_to_one_relationship('execution')
-
-    @declared_attr
-    def execution_name(cls):
-        return association_proxy('execution', cls.name_column_name())
-
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance')
-
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance')
-
-    @declared_attr
-    def service_instance_name(cls):
-        return association_proxy('service_instance', cls.name_column_name())
-
-    def to_dict(self, suppress_error=False, **kwargs):
-        dep_update_dict = super(ServiceInstanceUpdateBase, self).to_dict(suppress_error)     #pylint: disable=no-member
-        # Taking care of the fact the DeploymentSteps are _BaseModels
-        dep_update_dict['steps'] = [step.to_dict() for step in self.steps]
-        return dep_update_dict
-
-
-class ServiceInstanceUpdateStepBase(ModelMixin):
-    """
-    Deployment update step model representation.
-    """
-    # Needed only for pylint. the id will be populated by sqlalcehmy and the proper column.
-    __tablename__ = 'service_instance_update_step'
-    __private_fields__ = ['service_instance_update_fk']
-
-    _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
-    ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
-    _entity_types = namedtuple(
-        'ENTITY_TYPES',
-        'NODE, RELATIONSHIP, PROPERTY, OPERATION, WORKFLOW, OUTPUT, DESCRIPTION, GROUP, '
-        'POLICY_TYPE, POLICY_TRIGGER, PLUGIN')
-    ENTITY_TYPES = _entity_types(
-        NODE='node',
-        RELATIONSHIP='relationship',
-        PROPERTY='property',
-        OPERATION='operation',
-        WORKFLOW='workflow',
-        OUTPUT='output',
-        DESCRIPTION='description',
-        GROUP='group',
-        POLICY_TYPE='policy_type',
-        POLICY_TRIGGER='policy_trigger',
-        PLUGIN='plugin'
-    )
-
-    action = Column(Enum(*ACTION_TYPES, name='action_type'), nullable=False)
-    entity_id = Column(Text, nullable=False)
-    entity_type = Column(Enum(*ENTITY_TYPES, name='entity_type'), nullable=False)
-
-    @declared_attr
-    def service_instance_update_fk(cls):
-        return cls.foreign_key('service_instance_update')
-
-    @declared_attr
-    def service_instance_update(cls):
-        return cls.many_to_one_relationship('service_instance_update',
-                                            backreference='steps')
-
-    @declared_attr
-    def deployment_update_name(cls):
-        return association_proxy('deployment_update', cls.name_column_name())
-
-    def __hash__(self):
-        return hash((getattr(self, self.id_column_name()), self.entity_id))
-
-    def __lt__(self, other):
-        """
-        the order is 'remove' < 'modify' < 'add'
-        :param other:
-        :return:
-        """
-        if not isinstance(other, self.__class__):
-            return not self >= other
-
-        if self.action != other.action:
-            if self.action == 'remove':
-                return_value = True
-            elif self.action == 'add':
-                return_value = False
-            else:
-                return_value = other.action == 'add'
-            return return_value
-
-        if self.action == 'add':
-            return self.entity_type == 'node' and other.entity_type == 'relationship'
-        if self.action == 'remove':
-            return self.entity_type == 'relationship' and other.entity_type == 'node'
-        return False
-
-
-class ServiceInstanceModificationBase(ModelMixin):
-    """
-    Deployment modification model representation.
-    """
-    __tablename__ = 'service_instance_modification'
-    __private_fields__ = ['service_instance_fk']
-
-    STARTED = 'started'
-    FINISHED = 'finished'
-    ROLLEDBACK = 'rolledback'
-
-    STATES = [STARTED, FINISHED, ROLLEDBACK]
-    END_STATES = [FINISHED, ROLLEDBACK]
-
-    context = Column(Dict)
-    created_at = Column(DateTime, nullable=False, index=True)
-    ended_at = Column(DateTime, index=True)
-    modified_nodes = Column(Dict)
-    node_instances = Column(Dict)
-    status = Column(Enum(*STATES, name='deployment_modification_status'))
-
-    @declared_attr
-    def service_instance_fk(cls):
-        return cls.foreign_key('service_instance')
-
-    @declared_attr
-    def service_instance(cls):
-        return cls.many_to_one_relationship('service_instance',
-                                            backreference='modifications')
-
-    @declared_attr
-    def service_instance_name(cls):
-        return association_proxy('service_instance', cls.name_column_name())
-
-
-class PluginBase(ModelMixin):
-    """
-    Plugin model representation.
-    """
-    __tablename__ = 'plugin'
-
-    archive_name = Column(Text, nullable=False, index=True)
-    distribution = Column(Text)
-    distribution_release = Column(Text)
-    distribution_version = Column(Text)
-    package_name = Column(Text, nullable=False, index=True)
-    package_source = Column(Text)
-    package_version = Column(Text)
-    supported_platform = Column(Text)
-    supported_py_versions = Column(List)
-    uploaded_at = Column(DateTime, nullable=False, index=True)
-    wheels = Column(List, nullable=False)
-
-
-class TaskBase(ModelMixin):
-    """
-    A Model which represents an task
-    """
-    __tablename__ = 'task'
-    __private_fields__ = ['node_fk',
-                          'relationship_fk',
-                          'execution_fk',
-                          'plugin_fk']
-
-    @declared_attr
-    def node_fk(cls):
-        return cls.foreign_key('node', nullable=True)
-
-    @declared_attr
-    def node_name(cls):
-        return association_proxy('node', cls.name_column_name())
-
-    @declared_attr
-    def node(cls):
-        return cls.many_to_one_relationship('node')
-
-    @declared_attr
-    def relationship_fk(cls):
-        return cls.foreign_key('relationship', nullable=True)
-
-    @declared_attr
-    def relationship_name(cls):
-        return association_proxy('relationships', cls.name_column_name())
-
-    @declared_attr
-    def relationship(cls):
-        return cls.many_to_one_relationship('relationship')
-
-    @declared_attr
-    def plugin_fk(cls):
-        return cls.foreign_key('plugin', nullable=True)
-
-    @declared_attr
-    def plugin(cls):
-        return cls.many_to_one_relationship('plugin')
-
-    @declared_attr
-    def execution_fk(cls):
-        return cls.foreign_key('execution', nullable=True)
-
-    @declared_attr
-    def execution(cls):
-        return cls.many_to_one_relationship('execution')
-
-    @declared_attr
-    def execution_name(cls):
-        return association_proxy('execution', cls.name_column_name())
-
-    PENDING = 'pending'
-    RETRYING = 'retrying'
-    SENT = 'sent'
-    STARTED = 'started'
-    SUCCESS = 'success'
-    FAILED = 'failed'
-    STATES = (
-        PENDING,
-        RETRYING,
-        SENT,
-        STARTED,
-        SUCCESS,
-        FAILED,
-    )
-
-    WAIT_STATES = [PENDING, RETRYING]
-    END_STATES = [SUCCESS, FAILED]
-
-    RUNS_ON_SOURCE = 'source'
-    RUNS_ON_TARGET = 'target'
-    RUNS_ON_NODE_INSTANCE = 'node_instance'
-    RUNS_ON = (RUNS_ON_NODE_INSTANCE, RUNS_ON_SOURCE, RUNS_ON_TARGET)
-
-    @orm.validates('max_attempts')
-    def validate_max_attempts(self, _, value):                                  # pylint: disable=no-self-use
-        """Validates that max attempts is either -1 or a positive number"""
-        if value < 1 and value != TaskBase.INFINITE_RETRIES:
-            raise ValueError('Max attempts can be either -1 (infinite) or any positive number. '
-                             'Got {value}'.format(value=value))
-        return value
-
-    INFINITE_RETRIES = -1
-
-    status = Column(Enum(*STATES, name='status'), default=PENDING)
-
-    due_at = Column(DateTime, default=datetime.utcnow)
-    started_at = Column(DateTime, default=None)
-    ended_at = Column(DateTime, default=None)
-    max_attempts = Column(Integer, default=1)
-    retry_count = Column(Integer, default=0)
-    retry_interval = Column(Float, default=0)
-    ignore_failure = Column(Boolean, default=False)
-
-    # Operation specific fields
-    implementation = Column(String)
-    inputs = Column(Dict)
-    # This is unrelated to the plugin of the task. This field is related to the plugin name
-    # received from the blueprint.
-    plugin_name = Column(String)
-    _runs_on = Column(Enum(*RUNS_ON, name='runs_on'), name='runs_on')
-
-    @property
-    def runs_on(self):
-        if self._runs_on == self.RUNS_ON_NODE_INSTANCE:
-            return self.node
-        elif self._runs_on == self.RUNS_ON_SOURCE:
-            return self.relationship.source_node  # pylint: disable=no-member
-        elif self._runs_on == self.RUNS_ON_TARGET:
-            return self.relationship.target_node  # pylint: disable=no-member
-        return None
-
-    @property
-    def actor(self):
-        """
-        Return the actor of the task
-        :return:
-        """
-        return self.node or self.relationship
-
-    @classmethod
-    def as_node_instance(cls, instance, runs_on, **kwargs):
-        return cls(node=instance, _runs_on=runs_on, **kwargs)
-
-    @classmethod
-    def as_relationship_instance(cls, instance, runs_on, **kwargs):
-        return cls(relationship=instance, _runs_on=runs_on, **kwargs)
-
-    @staticmethod
-    def abort(message=None):
-        raise TaskAbortException(message)
-
-    @staticmethod
-    def retry(message=None, retry_interval=None):
-        raise TaskRetryException(message, retry_interval=retry_interval)
-
-
-class LogBase(ModelMixin):
-    __tablename__ = 'log'
-
-    @declared_attr
-    def execution_fk(cls):
-        return cls.foreign_key('execution')
-
-    @declared_attr
-    def execution(cls):
-        return cls.many_to_one_relationship('execution')
-
-    @declared_attr
-    def task_fk(cls):
-        return cls.foreign_key('task', nullable=True)
-
-    @declared_attr
-    def task(cls):
-        return cls.many_to_one_relationship('task')
-
-    level = Column(String)
-    msg = Column(String)
-    created_at = Column(DateTime, index=True)
-    actor = Column(String)
-
-    def __repr__(self):
-        return "<{self.created_at}: [{self.level}] @{self.actor}> {msg}".format(
-            self=self, msg=self.msg[:50])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/storage/modeling/structure.py
----------------------------------------------------------------------
diff --git a/aria/storage/modeling/structure.py b/aria/storage/modeling/structure.py
deleted file mode 100644
index eacdb44..0000000
--- a/aria/storage/modeling/structure.py
+++ /dev/null
@@ -1,320 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Aria's storage.structures module
-Path: aria.storage.structures
-
-models module holds aria's models.
-
-classes:
-    * Field - represents a single field.
-    * IterField - represents an iterable field.
-    * PointerField - represents a single pointer field.
-    * IterPointerField - represents an iterable pointers field.
-    * Model - abstract model implementation.
-"""
-
-from sqlalchemy.orm import relationship, backref
-from sqlalchemy.ext import associationproxy
-from sqlalchemy import (
-    Column,
-    ForeignKey,
-    Integer,
-    Text,
-    Table,
-)
-
-from . import utils
-
-
-class Function(object):
-    """
-    An intrinsic function.
-
-    Serves as a placeholder for a value that should eventually be derived
-    by calling the function.
-    """
-
-    @property
-    def as_raw(self):
-        raise NotImplementedError
-
-    def _evaluate(self, context, container):
-        raise NotImplementedError
-
-    def __deepcopy__(self, memo):
-        # Circumvent cloning in order to maintain our state
-        return self
-
-
-class ElementBase(object):
-    """
-    Base class for :class:`ServiceInstance` elements.
-
-    All elements support validation, diagnostic dumping, and representation as
-    raw data (which can be translated into JSON or YAML) via :code:`as_raw`.
-    """
-
-    @property
-    def as_raw(self):
-        raise NotImplementedError
-
-    def validate(self, context):
-        pass
-
-    def coerce_values(self, context, container, report_issues):
-        pass
-
-    def dump(self, context):
-        pass
-
-
-class ModelElementBase(ElementBase):
-    """
-    Base class for :class:`ServiceModel` elements.
-
-    All model elements can be instantiated into :class:`ServiceInstance` elements.
-    """
-
-    def instantiate(self, context, container):
-        raise NotImplementedError
-
-
-class ModelMixin(ModelElementBase):
-
-    @utils.classproperty
-    def __modelname__(cls):                                                                         # pylint: disable=no-self-argument
-        return getattr(cls, '__mapiname__', cls.__tablename__)
-
-    @classmethod
-    def id_column_name(cls):
-        raise NotImplementedError
-
-    @classmethod
-    def name_column_name(cls):
-        raise NotImplementedError
-
-    @classmethod
-    def _get_cls_by_tablename(cls, tablename):
-        """Return class reference mapped to table.
-
-         :param tablename: String with name of table.
-         :return: Class reference or None.
-         """
-        if tablename in (cls.__name__, cls.__tablename__):
-            return cls
-
-        for table_cls in cls._decl_class_registry.values():
-            if tablename == getattr(table_cls, '__tablename__', None):
-                return table_cls
-
-    @classmethod
-    def foreign_key(cls, table_name, nullable=False):
-        """Return a ForeignKey object with the relevant
-
-        :param table: Unique id column in the parent table
-        :param nullable: Should the column be allowed to remain empty
-        """
-        return Column(Integer,
-                      ForeignKey('{tablename}.id'.format(tablename=table_name), ondelete='CASCADE'),
-                      nullable=nullable)
-
-    @classmethod
-    def one_to_one_relationship(cls, table_name, backreference=None):
-        return relationship(lambda: cls._get_cls_by_tablename(table_name),
-                            backref=backref(backreference or cls.__tablename__, uselist=False))
-
-    @classmethod
-    def many_to_one_relationship(cls,
-                                 parent_table_name,
-                                 foreign_key_column=None,
-                                 backreference=None,
-                                 backref_kwargs=None,
-                                 **kwargs):
-        """Return a one-to-many SQL relationship object
-        Meant to be used from inside the *child* object
-
-        :param parent_class: Class of the parent table
-        :param cls: Class of the child table
-        :param foreign_key_column: The column of the foreign key (from the child table)
-        :param backreference: The name to give to the reference to the child (on the parent table)
-        """
-        relationship_kwargs = kwargs
-        if foreign_key_column:
-            relationship_kwargs.setdefault('foreign_keys', getattr(cls, foreign_key_column))
-
-        backref_kwargs = backref_kwargs or {}
-        backref_kwargs.setdefault('lazy', 'dynamic')
-        # The following line make sure that when the *parent* is
-        #  deleted, all its connected children are deleted as well
-        backref_kwargs.setdefault('cascade', 'all')
-
-        return relationship(lambda: cls._get_cls_by_tablename(parent_table_name),
-                            backref=backref(backreference or utils.pluralize(cls.__tablename__),
-                                            **backref_kwargs or {}),
-                            **relationship_kwargs)
-
-    @classmethod
-    def relationship_to_self(cls, local_column):
-
-        remote_side_str = '{cls.__name__}.{remote_column}'.format(
-            cls=cls,
-            remote_column=cls.id_column_name()
-        )
-        primaryjoin_str = '{remote_side_str} == {cls.__name__}.{local_column}'.format(
-            remote_side_str=remote_side_str,
-            cls=cls,
-            local_column=local_column)
-        return relationship(cls._get_cls_by_tablename(cls.__tablename__).__name__,
-                            primaryjoin=primaryjoin_str,
-                            remote_side=remote_side_str,
-                            post_update=True)
-
-    @classmethod
-    def many_to_many_relationship(cls, other_table_name, table_prefix, relationship_kwargs=None):
-        """Return a many-to-many SQL relationship object
-
-        Notes:
-        1. The backreference name is the current table's table name
-        2. This method creates a new helper table in the DB
-
-        :param cls: The class of the table we're connecting from
-        :param other_table_name: The class of the table we're connecting to
-        :param table_prefix: Custom prefix for the helper table name and the
-        backreference name
-        """
-        current_table_name = cls.__tablename__
-        current_column_name = '{0}_id'.format(current_table_name)
-        current_foreign_key = '{0}.id'.format(current_table_name)
-
-        other_column_name = '{0}_id'.format(other_table_name)
-        other_foreign_key = '{0}.id'.format(other_table_name)
-
-        helper_table_name = '{0}_{1}'.format(current_table_name, other_table_name)
-
-        backref_name = current_table_name
-        if table_prefix:
-            helper_table_name = '{0}_{1}'.format(table_prefix, helper_table_name)
-            backref_name = '{0}_{1}'.format(table_prefix, backref_name)
-
-        secondary_table = cls.get_secondary_table(
-            cls.metadata,
-            helper_table_name,
-            current_column_name,
-            other_column_name,
-            current_foreign_key,
-            other_foreign_key
-        )
-
-        return relationship(
-            lambda: cls._get_cls_by_tablename(other_table_name),
-            secondary=secondary_table,
-            backref=backref(backref_name),
-            **(relationship_kwargs or {})
-        )
-
-    @staticmethod
-    def get_secondary_table(metadata,
-                            helper_table_name,
-                            first_column_name,
-                            second_column_name,
-                            first_foreign_key,
-                            second_foreign_key):
-        """Create a helper table for a many-to-many relationship
-
-        :param helper_table_name: The name of the table
-        :param first_column_name: The name of the first column in the table
-        :param second_column_name: The name of the second column in the table
-        :param first_foreign_key: The string representing the first foreign key,
-        for example `blueprint.storage_id`, or `tenants.id`
-        :param second_foreign_key: The string representing the second foreign key
-        :return: A Table object
-        """
-        return Table(
-            helper_table_name,
-            metadata,
-            Column(
-                first_column_name,
-                Integer,
-                ForeignKey(first_foreign_key)
-            ),
-            Column(
-                second_column_name,
-                Integer,
-                ForeignKey(second_foreign_key)
-            )
-        )
-
-    def to_dict(self, fields=None, suppress_error=False):
-        """Return a dict representation of the model
-
-        :param suppress_error: If set to True, sets `None` to attributes that
-        it's unable to retrieve (e.g., if a relationship wasn't established
-        yet, and so it's impossible to access a property through it)
-        """
-        res = dict()
-        fields = fields or self.fields()
-        for field in fields:
-            try:
-                field_value = getattr(self, field)
-            except AttributeError:
-                if suppress_error:
-                    field_value = None
-                else:
-                    raise
-            if isinstance(field_value, list):
-                field_value = list(field_value)
-            elif isinstance(field_value, dict):
-                field_value = dict(field_value)
-            elif isinstance(field_value, ModelMixin):
-                field_value = field_value.to_dict()
-            res[field] = field_value
-
-        return res
-
-    @classmethod
-    def _association_proxies(cls):
-        for col, value in vars(cls).items():
-            if isinstance(value, associationproxy.AssociationProxy):
-                yield col
-
-    @classmethod
-    def fields(cls):
-        """Return the list of field names for this table
-
-        Mostly for backwards compatibility in the code (that uses `fields`)
-        """
-        fields = set(cls._association_proxies())
-        fields.update(cls.__table__.columns.keys())
-        return fields - set(getattr(cls, '__private_fields__', []))
-
-    def __repr__(self):
-        return '<{__class__.__name__} id=`{id}`>'.format(
-            __class__=self.__class__,
-            id=getattr(self, self.name_column_name()))
-
-
-class ModelIDMixin(object):
-    id = Column(Integer, primary_key=True, autoincrement=True)
-    name = Column(Text, nullable=True, index=True)
-
-    @classmethod
-    def id_column_name(cls):
-        return 'id'
-
-    @classmethod
-    def name_column_name(cls):
-        return 'name'


[10/12] incubator-ariatosca git commit: ARIA-105 Integrate parser and orchestrator models

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
new file mode 100644
index 0000000..5d667e3
--- /dev/null
+++ b/aria/modeling/service_template.py
@@ -0,0 +1,1701 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-lines, no-self-argument, no-member, abstract-method
+
+from __future__ import absolute_import  # so we can import standard 'types'
+
+from types import FunctionType
+from datetime import datetime
+
+from sqlalchemy import (
+    Column,
+    Text,
+    Integer,
+    DateTime
+)
+from sqlalchemy.ext.declarative import declared_attr
+from sqlalchemy.ext.associationproxy import association_proxy
+
+from ..parser import validation
+from ..parser.consumption import ConsumptionContext
+from ..parser.reading import deepcopy_with_locators
+from ..utils import collections, formatting, console
+from .mixins import TemplateModelMixin
+from . import (
+    relationship,
+    utils,
+    types as modeling_types
+)
+
+
+class ServiceTemplateBase(TemplateModelMixin): # pylint: disable=too-many-public-methods
+    """
+    A service template is a source for creating :class:`Service` instances.
+
+    It is usually created by various DSL parsers, such as ARIA's TOSCA extension. However, it can
+    also be created programmatically.
+
+    :ivar name: Name (unique for this ARIA installation)
+    :vartype name: basestring
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar main_file_name: Filename of CSAR or YAML file from which this service template was parsed
+    :vartype main_file_name: basestring
+    :ivar meta_data: Custom annotations
+    :vartype meta_data: {basestring: :class:`Metadata`}
+    :ivar node_templates: Templates for creating nodes
+    :vartype node_templates: {basestring: :class:`NodeTemplate`}
+    :ivar group_templates: Templates for creating groups
+    :vartype group_templates: {basestring: :class:`GroupTemplate`}
+    :ivar policy_templates: Templates for creating policies
+    :vartype policy_templates: {basestring: :class:`PolicyTemplate`}
+    :ivar substitution_template: The entire service can appear as a node
+    :vartype substitution_template: :class:`SubstitutionTemplate`
+    :ivar inputs: Externally provided parameters
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar outputs: These parameters are filled in after service installation
+    :vartype outputs: {basestring: :class:`Parameter`}
+    :ivar workflow_templates: Custom workflows that can be performed on the service
+    :vartype workflow_templates: {basestring: :class:`OperationTemplate`}
+    :ivar plugin_specifications: Plugins required by services
+    :vartype plugin_specifications: {basestring: :class:`PluginSpecification`}
+    :ivar node_types: Base for the node type hierarchy
+    :vartype node_types: :class:`Type`
+    :ivar group_types: Base for the group type hierarchy
+    :vartype group_types: :class:`Type`
+    :ivar policy_types: Base for the policy type hierarchy
+    :vartype policy_types: :class:`Type`
+    :ivar relationship_types: Base for the relationship type hierarchy
+    :vartype relationship_types: :class:`Type`
+    :ivar capability_types: Base for the capability type hierarchy
+    :vartype capability_types: :class:`Type`
+    :ivar interface_types: Base for the interface type hierarchy
+    :vartype interface_types: :class:`Type`
+    :ivar artifact_types: Base for the artifact type hierarchy
+    :vartype artifact_types: :class:`Type`
+    :ivar plugin_specifications: Plugins required to be installed
+    :vartype plugin_specifications: {basestring: :class:`PluginSpecification`}
+    :ivar created_at: Creation timestamp
+    :vartype created_at: :class:`datetime.datetime`
+    :ivar updated_at: Update timestamp
+    :vartype updated_at: :class:`datetime.datetime`
+
+    :ivar services: Instantiated services
+    :vartype services: [:class:`Service`]
+    """
+
+    __tablename__ = 'service_template'
+
+    __private_fields__ = ['substitution_template_fk',
+                          'node_type_fk',
+                          'group_type_fk',
+                          'policy_type_fk',
+                          'relationship_type_fk',
+                          'capability_type_fk',
+                          'interface_type_fk',
+                          'artifact_type_fk']
+
+    description = Column(Text)
+    main_file_name = Column(Text)
+
+    @declared_attr
+    def meta_data(cls):
+        # Warning! We cannot use the attr name "metadata" because it's used by SQLAlchemy!
+        return relationship.many_to_many(cls, 'metadata', dict_key='name')
+
+    @declared_attr
+    def node_templates(cls):
+        return relationship.one_to_many(cls, 'node_template', dict_key='name')
+
+    @declared_attr
+    def group_templates(cls):
+        return relationship.one_to_many(cls, 'group_template', dict_key='name')
+
+    @declared_attr
+    def policy_templates(cls):
+        return relationship.one_to_many(cls, 'policy_template', dict_key='name')
+
+    @declared_attr
+    def substitution_template(cls):
+        return relationship.one_to_one(cls, 'substitution_template')
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    @declared_attr
+    def outputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='outputs', dict_key='name')
+
+    @declared_attr
+    def workflow_templates(cls):
+        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
+
+    @declared_attr
+    def plugin_specifications(cls):
+        return relationship.one_to_many(cls, 'plugin_specification', dict_key='name')
+
+    @declared_attr
+    def node_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='node_type_fk', other_property=False)
+
+    @declared_attr
+    def group_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='group_type_fk', other_property=False)
+
+    @declared_attr
+    def policy_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='policy_type_fk', other_property=False)
+
+    @declared_attr
+    def relationship_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='relationship_type_fk',
+                                       other_property=False)
+
+    @declared_attr
+    def capability_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='capability_type_fk', other_property=False)
+
+    @declared_attr
+    def interface_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='interface_type_fk', other_property=False)
+
+    @declared_attr
+    def artifact_types(cls):
+        return relationship.one_to_one(cls, 'type', fk='artifact_type_fk', other_property=False)
+
+    # region orchestration
+
+    created_at = Column(DateTime, nullable=False, index=True)
+    updated_at = Column(DateTime)
+
+    # endregion
+
+    # region foreign keys
+
+    @declared_attr
+    def substitution_template_fk(cls):
+        """For ServiceTemplate one-to-one to SubstitutionTemplate"""
+        return relationship.foreign_key('substitution_template', nullable=True)
+
+    @declared_attr
+    def node_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def group_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def policy_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def relationship_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def capability_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def interface_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def artifact_type_fk(cls):
+        """For ServiceTemplate one-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('description', self.description),
+            ('metadata', formatting.as_raw_dict(self.meta_data)),
+            ('node_templates', formatting.as_raw_list(self.node_templates)),
+            ('group_templates', formatting.as_raw_list(self.group_templates)),
+            ('policy_templates', formatting.as_raw_list(self.policy_templates)),
+            ('substitution_template', formatting.as_raw(self.substitution_template)),
+            ('inputs', formatting.as_raw_dict(self.inputs)),
+            ('outputs', formatting.as_raw_dict(self.outputs)),
+            ('workflow_templates', formatting.as_raw_list(self.workflow_templates))))
+
+    @property
+    def types_as_raw(self):
+        return collections.OrderedDict((
+            ('node_types', formatting.as_raw(self.node_types)),
+            ('group_types', formatting.as_raw(self.group_types)),
+            ('policy_types', formatting.as_raw(self.policy_types)),
+            ('relationship_types', formatting.as_raw(self.relationship_types)),
+            ('capability_types', formatting.as_raw(self.capability_types)),
+            ('interface_types', formatting.as_raw(self.interface_types)),
+            ('artifact_types', formatting.as_raw(self.artifact_types))))
+
+    def instantiate(self, container):
+        from . import models
+        context = ConsumptionContext.get_thread_local()
+        now = datetime.now()
+        service = models.Service(created_at=now,
+                                 updated_at=now,
+                                 description=deepcopy_with_locators(self.description),
+                                 service_template=self)
+        #service.name = '{0}_{1}'.format(self.name, service.id)
+
+        context.modeling.instance = service
+
+        utils.instantiate_dict(self, service.meta_data, self.meta_data)
+
+        for node_template in self.node_templates.itervalues():
+            for _ in range(node_template.default_instances):
+                node = node_template.instantiate(container)
+                service.nodes[node.name] = node
+
+        utils.instantiate_dict(self, service.groups, self.group_templates)
+        utils.instantiate_dict(self, service.policies, self.policy_templates)
+        utils.instantiate_dict(self, service.workflows, self.workflow_templates)
+
+        if self.substitution_template is not None:
+            service.substitution = self.substitution_template.instantiate(container)
+
+        utils.instantiate_dict(self, service.inputs, self.inputs)
+        utils.instantiate_dict(self, service.outputs, self.outputs)
+
+        for name, the_input in context.modeling.inputs.iteritems():
+            if name not in service.inputs:
+                context.validation.report('input "{0}" is not supported'.format(name))
+            else:
+                service.inputs[name].value = the_input
+
+        return service
+
+    def validate(self):
+        utils.validate_dict_values(self.meta_data)
+        utils.validate_dict_values(self.node_templates)
+        utils.validate_dict_values(self.group_templates)
+        utils.validate_dict_values(self.policy_templates)
+        if self.substitution_template is not None:
+            self.substitution_template.validate()
+        utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.outputs)
+        utils.validate_dict_values(self.workflow_templates)
+        if self.node_types is not None:
+            self.node_types.validate()
+        if self.group_types is not None:
+            self.group_types.validate()
+        if self.policy_types is not None:
+            self.policy_types.validate()
+        if self.relationship_types is not None:
+            self.relationship_types.validate()
+        if self.capability_types is not None:
+            self.capability_types.validate()
+        if self.interface_types is not None:
+            self.interface_types.validate()
+        if self.artifact_types is not None:
+            self.artifact_types.validate()
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.meta_data, report_issues)
+        utils.coerce_dict_values(container, self.node_templates, report_issues)
+        utils.coerce_dict_values(container, self.group_templates, report_issues)
+        utils.coerce_dict_values(container, self.policy_templates, report_issues)
+        if self.substitution_template is not None:
+            self.substitution_template.coerce_values(container, report_issues)
+        utils.coerce_dict_values(container, self.inputs, report_issues)
+        utils.coerce_dict_values(container, self.outputs, report_issues)
+        utils.coerce_dict_values(container, self.workflow_templates, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.description is not None:
+            console.puts(context.style.meta(self.description))
+        utils.dump_dict_values(self.meta_data, 'Metadata')
+        for node_template in self.node_templates.itervalues():
+            node_template.dump()
+        for group_template in self.group_templates.itervalues():
+            group_template.dump()
+        for policy_template in self.policy_templates.itervalues():
+            policy_template.dump()
+        if self.substitution_template is not None:
+            self.substitution_template.dump()
+        utils.dump_dict_values(self.inputs, 'Inputs')
+        utils.dump_dict_values(self.outputs, 'Outputs')
+        utils.dump_dict_values(self.workflow_templates, 'Workflow templates')
+
+    def dump_types(self):
+        if self.node_types.children:
+            console.puts('Node types:')
+            self.node_types.dump()
+        if self.group_types.children:
+            console.puts('Group types:')
+            self.group_types.dump()
+        if self.capability_types.children:
+            console.puts('Capability types:')
+            self.capability_types.dump()
+        if self.relationship_types.children:
+            console.puts('Relationship types:')
+            self.relationship_types.dump()
+        if self.policy_types.children:
+            console.puts('Policy types:')
+            self.policy_types.dump()
+        if self.artifact_types.children:
+            console.puts('Artifact types:')
+            self.artifact_types.dump()
+        if self.interface_types.children:
+            console.puts('Interface types:')
+            self.interface_types.dump()
+
+
+class NodeTemplateBase(TemplateModelMixin):
+    """
+    A template for creating zero or more :class:`Node` instances.
+
+    :ivar name: Name (unique for this service template; will usually be used as a prefix for node
+                names)
+    :vartype name: basestring
+    :ivar type: Node type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar default_instances: Default number nodes that will appear in the service
+    :vartype default_instances: int
+    :ivar min_instances: Minimum number nodes that will appear in the service
+    :vartype min_instances: int
+    :ivar max_instances: Maximum number nodes that will appear in the service
+    :vartype max_instances: int
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+    :ivar interface_templates: Bundles of operations
+    :vartype interface_templates: {basestring: :class:`InterfaceTemplate`}
+    :ivar artifact_templates: Associated files
+    :vartype artifact_templates: {basestring: :class:`ArtifactTemplate`}
+    :ivar capability_templates: Exposed capabilities
+    :vartype capability_templates: {basestring: :class:`CapabilityTemplate`}
+    :ivar requirement_templates: Potential relationships with other nodes
+    :vartype requirement_templates: [:class:`RequirementTemplate`]
+    :ivar target_node_template_constraints: Constraints for filtering relationship targets
+    :vartype target_node_template_constraints: [:class:`FunctionType`]
+    :ivar plugin_specifications: Plugins required to be installed on the node's host
+    :vartype plugin_specifications: {basestring: :class:`PluginSpecification`}
+
+    :ivar service_template: Containing service template
+    :vartype service_template: :class:`ServiceTemplate`
+    :ivar group_templates: We are a member of these groups
+    :vartype group_templates: [:class:`GroupTemplate`]
+    :ivar policy_templates: Policy templates enacted on this node
+    :vartype policy_templates: [:class:`PolicyTemplate`]
+    :ivar substitution_template_mapping: Our contribution to service substitution
+    :vartype substitution_template_mapping: :class:`SubstitutionTemplateMapping`
+    :ivar nodes: Instantiated nodes
+    :vartype nodes: [:class:`Node`]
+    """
+
+    __tablename__ = 'node_template'
+
+    __private_fields__ = ['type_fk',
+                          'service_template_fk',
+                          'service_template_name']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+    default_instances = Column(Integer, default=1)
+    min_instances = Column(Integer, default=0)
+    max_instances = Column(Integer, default=None)
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def interface_templates(cls):
+        return relationship.one_to_many(cls, 'interface_template', dict_key='name')
+
+    @declared_attr
+    def artifact_templates(cls):
+        return relationship.one_to_many(cls, 'artifact_template', dict_key='name')
+
+    @declared_attr
+    def capability_templates(cls):
+        return relationship.one_to_many(cls, 'capability_template', dict_key='name')
+
+    @declared_attr
+    def requirement_templates(cls):
+        return relationship.one_to_many(cls, 'requirement_template', child_fk='node_template_fk',
+                                        child_property='node_template')
+
+    target_node_template_constraints = Column(modeling_types.StrictList(FunctionType))
+
+    @declared_attr
+    def plugin_specifications(cls):
+        return relationship.many_to_many(cls, 'plugin_specification', dict_key='name')
+
+    # region foreign_keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For NodeTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to NodeTemplate"""
+        return relationship.foreign_key('service_template')
+
+    # endregion
+
+    # region association proxies
+
+    @declared_attr
+    def service_template_name(cls):
+        """Required for use by SQLAlchemy queries"""
+        return association_proxy('service_template', 'name')
+
+    # endregion
+
+    def is_target_node_valid(self, target_node_template):
+        if self.target_node_template_constraints:
+            for node_type_constraint in self.target_node_template_constraints:
+                if not node_type_constraint(target_node_template, self):
+                    return False
+        return True
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('default_instances', self.default_instances),
+            ('min_instances', self.min_instances),
+            ('max_instances', self.max_instances),
+            ('properties', formatting.as_raw_dict(self.properties)),
+            ('interface_templates', formatting.as_raw_list(self.interface_templates)),
+            ('artifact_templates', formatting.as_raw_list(self.artifact_templates)),
+            ('capability_templates', formatting.as_raw_list(self.capability_templates)),
+            ('requirement_templates', formatting.as_raw_list(self.requirement_templates))))
+
+    def instantiate(self, container):
+        context = ConsumptionContext.get_thread_local()
+        from . import models
+        name = context.modeling.generate_node_id(self.name)
+        node = models.Node(name=name,
+                           type=self.type,
+                           description=deepcopy_with_locators(self.description),
+                           state='',
+                           node_template=self)
+        utils.instantiate_dict(node, node.properties, self.properties)
+        utils.instantiate_dict(node, node.interfaces, self.interface_templates)
+        utils.instantiate_dict(node, node.artifacts, self.artifact_templates)
+        utils.instantiate_dict(node, node.capabilities, self.capability_templates)
+        return node
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+        utils.validate_dict_values(self.interface_templates)
+        utils.validate_dict_values(self.artifact_templates)
+        utils.validate_dict_values(self.capability_templates)
+        utils.validate_list_values(self.requirement_templates)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.properties, report_issues)
+        utils.coerce_dict_values(self, self.interface_templates, report_issues)
+        utils.coerce_dict_values(self, self.artifact_templates, report_issues)
+        utils.coerce_dict_values(self, self.capability_templates, report_issues)
+        utils.coerce_list_values(self, self.requirement_templates, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Node template: {0}'.format(context.style.node(self.name)))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            console.puts('Instances: {0:d} ({1:d}{2})'.format(
+                self.default_instances,
+                self.min_instances,
+                ' to {0:d}'.format(self.max_instances)
+                if self.max_instances is not None
+                else ' or more'))
+            utils.dump_dict_values(self.properties, 'Properties')
+            utils.dump_interfaces(self.interface_templates)
+            utils.dump_dict_values(self.artifact_templates, 'Artifact templates')
+            utils.dump_dict_values(self.capability_templates, 'Capability templates')
+            utils.dump_list_values(self.requirement_templates, 'Requirement templates')
+
+
+class GroupTemplateBase(TemplateModelMixin):
+    """
+    A template for creating a :class:`Group` instance.
+
+    Groups are logical containers for zero or more nodes.
+
+    :ivar name: Name (unique for this service template)
+    :vartype name: basestring
+    :ivar type: Group type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar node_templates: All nodes instantiated by these templates will be members of the group
+    :vartype node_templates: [:class:`NodeTemplate`]
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+    :ivar interface_templates: Bundles of operations
+    :vartype interface_templates: {basestring: :class:`InterfaceTemplate`}
+
+    :ivar service_template: Containing service template
+    :vartype service_template: :class:`ServiceTemplate`
+    :ivar policy_templates: Policy templates enacted on this group
+    :vartype policy_templates: [:class:`PolicyTemplate`]
+    :ivar groups: Instantiated groups
+    :vartype groups: [:class:`Group`]
+    """
+
+    __tablename__ = 'group_template'
+
+    __private_fields__ = ['type_fk',
+                          'service_template_fk']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def node_templates(cls):
+        return relationship.many_to_many(cls, 'node_template')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def interface_templates(cls):
+        return relationship.one_to_many(cls, 'interface_template', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For GroupTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to GroupTemplate"""
+        return relationship.foreign_key('service_template')
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('properties', formatting.as_raw_dict(self.properties)),
+            ('interface_templates', formatting.as_raw_list(self.interface_templates))))
+
+    def instantiate(self, container):
+        from . import models
+        group = models.Group(name=self.name,
+                             type=self.type,
+                             description=deepcopy_with_locators(self.description),
+                             group_template=self)
+        utils.instantiate_dict(self, group.properties, self.properties)
+        utils.instantiate_dict(self, group.interfaces, self.interface_templates)
+        if self.node_templates:
+            for node_template in self.node_templates:
+                group.nodes += node_template.nodes.all()
+        return group
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+        utils.validate_dict_values(self.interface_templates)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.properties, report_issues)
+        utils.coerce_dict_values(self, self.interface_templates, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Group template: {0}'.format(context.style.node(self.name)))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            utils.dump_dict_values(self.properties, 'Properties')
+            utils.dump_interfaces(self.interface_templates)
+            if self.node_templates:
+                console.puts('Member node templates: {0}'.format(', '.join(
+                    (str(context.style.node(v.name)) for v in self.node_templates))))
+
+
+class PolicyTemplateBase(TemplateModelMixin):
+    """
+    Policies can be applied to zero or more :class:`NodeTemplate` or :class:`GroupTemplate`
+    instances.
+
+    :ivar name: Name (unique for this service template)
+    :vartype name: basestring
+    :ivar type: Policy type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar node_templates: Policy will be enacted on all nodes instantiated by these templates
+    :vartype node_templates: [:class:`NodeTemplate`]
+    :ivar group_templates: Policy will be enacted on all nodes in these groups
+    :vartype group_templates: [:class:`GroupTemplate`]
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+
+    :ivar service_template: Containing service template
+    :vartype service_template: :class:`ServiceTemplate`
+    :ivar policies: Instantiated policies
+    :vartype policies: [:class:`Policy`]
+    """
+
+    __tablename__ = 'policy_template'
+
+    __private_fields__ = ['type_fk',
+                          'service_template_fk']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def node_templates(cls):
+        return relationship.many_to_many(cls, 'node_template')
+
+    @declared_attr
+    def group_templates(cls):
+        return relationship.many_to_many(cls, 'group_template')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For PolicyTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to PolicyTemplate"""
+        return relationship.foreign_key('service_template')
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('properties', formatting.as_raw_dict(self.properties))))
+
+    def instantiate(self, container):
+        from . import models
+        policy = models.Policy(name=self.name,
+                               type=self.type,
+                               description=deepcopy_with_locators(self.description),
+                               policy_template=self)
+        utils.instantiate_dict(self, policy.properties, self.properties)
+        if self.node_templates:
+            for node_template in self.node_templates:
+                policy.nodes += node_template.nodes.all()
+        if self.group_templates:
+            for group_template in self.group_templates:
+                policy.groups += group_template.groups.all()
+        return policy
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.properties, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Policy template: {0}'.format(context.style.node(self.name)))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            utils.dump_dict_values(self.properties, 'Properties')
+            if self.node_templates:
+                console.puts('Target node templates: {0}'.format(', '.join(
+                    (str(context.style.node(v.name)) for v in self.node_templates))))
+            if self.group_templates:
+                console.puts('Target group templates: {0}'.format(', '.join(
+                    (str(context.style.node(v.name)) for v in self.group_templates))))
+
+
+class SubstitutionTemplateBase(TemplateModelMixin):
+    """
+    Used to substitute a single node for the entire deployment.
+
+    :ivar node_type: Exposed node type
+    :vartype node_type: :class:`Type`
+    :ivar mappings: Requirement and capability mappings
+    :vartype mappings: {basestring: :class:`SubstitutionTemplateMapping`}
+
+    :ivar service_template: Containing service template
+    :vartype service_template: :class:`ServiceTemplate`
+    :ivar substitutions: Instantiated substitutions
+    :vartype substitutions: [:class:`Substitution`]
+    """
+
+    __tablename__ = 'substitution_template'
+
+    __private_fields__ = ['node_type_fk']
+
+    @declared_attr
+    def node_type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    @declared_attr
+    def mappings(cls):
+        return relationship.one_to_many(cls, 'substitution_template_mapping', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def node_type_fk(cls):
+        """For SubstitutionTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('node_type_name', self.node_type.name),
+            ('mappings', formatting.as_raw_dict(self.mappings))))
+
+    def instantiate(self, container):
+        from . import models
+        substitution = models.Substitution(node_type=self.node_type,
+                                           substitution_template=self)
+        utils.instantiate_dict(container, substitution.mappings, self.mappings)
+        return substitution
+
+    def validate(self):
+        utils.validate_dict_values(self.mappings)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.mappings, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('Substitution template:')
+        with context.style.indent:
+            console.puts('Node type: {0}'.format(context.style.type(self.node_type.name)))
+            utils.dump_dict_values(self.mappings, 'Mappings')
+
+
+class SubstitutionTemplateMappingBase(TemplateModelMixin):
+    """
+    Used by :class:`SubstitutionTemplate` to map a capability or a requirement to a node.
+
+    Only one of `capability_template` and `requirement_template` can be set.
+
+    :ivar name: Exposed capability or requirement name
+    :vartype name: basestring
+    :ivar node_template: Node template
+    :vartype node_template: :class:`NodeTemplate`
+    :ivar capability_template: Capability template in the node template
+    :vartype capability_template: :class:`CapabilityTemplate`
+    :ivar requirement_template: Requirement template in the node template
+    :vartype requirement_template: :class:`RequirementTemplate`
+
+    :ivar substitution_template: Containing substitution template
+    :vartype substitution_template: :class:`SubstitutionTemplate`
+    """
+
+    __tablename__ = 'substitution_template_mapping'
+
+    __private_fields__ = ['substitution_template_fk',
+                          'node_template_fk',
+                          'capability_template_fk',
+                          'requirement_template_fk']
+
+    @declared_attr
+    def node_template(cls):
+        return relationship.one_to_one(cls, 'node_template')
+
+    @declared_attr
+    def capability_template(cls):
+        return relationship.one_to_one(cls, 'capability_template')
+
+    @declared_attr
+    def requirement_template(cls):
+        return relationship.one_to_one(cls, 'requirement_template')
+
+    # region foreign keys
+
+    @declared_attr
+    def substitution_template_fk(cls):
+        """For SubstitutionTemplate one-to-many to SubstitutionTemplateMapping"""
+        return relationship.foreign_key('substitution_template')
+
+    @declared_attr
+    def node_template_fk(cls):
+        """For SubstitutionTemplate one-to-one to NodeTemplate"""
+        return relationship.foreign_key('node_template')
+
+    @declared_attr
+    def capability_template_fk(cls):
+        """For SubstitutionTemplate one-to-one to CapabilityTemplate"""
+        return relationship.foreign_key('capability_template', nullable=True)
+
+    @declared_attr
+    def requirement_template_fk(cls):
+        """For SubstitutionTemplate one-to-one to RequirementTemplate"""
+        return relationship.foreign_key('requirement_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name)))
+
+    def coerce_values(self, container, report_issues):
+        pass
+
+    def instantiate(self, container):
+        from . import models
+        context = ConsumptionContext.get_thread_local()
+        nodes = self.node_template.nodes.all()
+        if len(nodes) == 0:
+            context.validation.report(
+                'mapping "{0}" refers to node template "{1}" but there are no '
+                'node instances'.format(self.mapped_name, self.node_template.name),
+                level=validation.Issue.BETWEEN_INSTANCES)
+            return None
+        # The TOSCA spec does not provide a way to choose the node,
+        # so we will just pick the first one
+        node = nodes[0]
+        capability = None
+        if self.capability_template:
+            for a_capability in node.capabilities.itervalues():
+                if a_capability.capability_template.name == self.capability_template.name:
+                    capability = a_capability
+        return models.SubstitutionMapping(name=self.name,
+                                          node=node,
+                                          capability=capability,
+                                          requirement_template=self.requirement_template)
+
+    def validate(self):
+        context = ConsumptionContext.get_thread_local()
+        if (self.capability_template is None) and (self.requirement_template is None):
+            context.validation.report('mapping "{0}" refers to neither capability nor a requirement'
+                                      ' in node template: {1}'.format(
+                                          self.name,
+                                          formatting.safe_repr(self.node_template.name)),
+                                      level=validation.Issue.BETWEEN_TYPES)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts('{0} -> {1}.{2}'.format(
+            context.style.node(self.name),
+            context.style.node(self.node_template.name),
+            context.style.node(self.capability_template.name
+                               if self.capability_template
+                               else self.requirement_template.name)))
+
+
+class RequirementTemplateBase(TemplateModelMixin):
+    """
+    A requirement for a :class:`NodeTemplate`. During instantiation will be matched with a
+    capability of another node.
+
+    Requirements may optionally contain a :class:`RelationshipTemplate` that will be created between
+    the nodes.
+
+    :ivar name: Name (a node template can have multiple requirements with the same name)
+    :vartype name: basestring
+    :ivar target_node_type: Required node type (optional)
+    :vartype target_node_type: :class:`Type`
+    :ivar target_node_template: Required node template (optional)
+    :vartype target_node_template: :class:`NodeTemplate`
+    :ivar target_capability_type: Required capability type (optional)
+    :vartype target_capability_type: :class:`Type`
+    :ivar target_capability_name: Name of capability in target node (optional)
+    :vartype target_capability_name: basestring
+    :ivar target_node_template_constraints: Constraints for filtering relationship targets
+    :vartype target_node_template_constraints: [:class:`FunctionType`]
+    :ivar relationship_template: Template for relationships (optional)
+    :vartype relationship_template: :class:`RelationshipTemplate`
+
+    :ivar node_template: Containing node template
+    :vartype node_template: :class:`NodeTemplate`
+    :ivar substitution_template_mapping: Our contribution to service substitution
+    :vartype substitution_template_mapping: :class:`SubstitutionTemplateMapping`
+    :ivar substitution_mapping: Our contribution to service substitution
+    :vartype substitution_mapping: :class:`SubstitutionMapping`
+    """
+
+    __tablename__ = 'requirement_template'
+
+    __private_fields__ = ['target_node_type_fk',
+                          'target_node_template_fk',
+                          'target_capability_type_fk'
+                          'node_template_fk',
+                          'relationship_template_fk']
+
+    @declared_attr
+    def target_node_type(cls):
+        return relationship.many_to_one(cls, 'type', fk='target_node_type_fk',
+                                        parent_property=False)
+
+    @declared_attr
+    def target_node_template(cls):
+        return relationship.one_to_one(cls, 'node_template', fk='target_node_template_fk',
+                                       other_property=False)
+
+    @declared_attr
+    def target_capability_type(cls):
+        return relationship.one_to_one(cls, 'type', fk='target_capability_type_fk',
+                                       other_property=False)
+
+    target_capability_name = Column(Text)
+    target_node_template_constraints = Column(modeling_types.StrictList(FunctionType))
+
+    @declared_attr
+    def relationship_template(cls):
+        return relationship.one_to_one(cls, 'relationship_template')
+
+    # region foreign keys
+
+    @declared_attr
+    def target_node_type_fk(cls):
+        """For RequirementTemplate many-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def target_node_template_fk(cls):
+        """For RequirementTemplate one-to-one to NodeTemplate"""
+        return relationship.foreign_key('node_template', nullable=True)
+
+    @declared_attr
+    def target_capability_type_fk(cls):
+        """For RequirementTemplate one-to-one to NodeTemplate"""
+        return relationship.foreign_key('type', nullable=True)
+
+    @declared_attr
+    def node_template_fk(cls):
+        """For NodeTemplate one-to-many to RequirementTemplate"""
+        return relationship.foreign_key('node_template')
+
+    @declared_attr
+    def relationship_template_fk(cls):
+        """For RequirementTemplate one-to-one to RelationshipTemplate"""
+        return relationship.foreign_key('relationship_template', nullable=True)
+
+    # endregion
+
+    def find_target(self, source_node_template):
+        context = ConsumptionContext.get_thread_local()
+
+        # We might already have a specific node template, so we'll just verify it
+        if self.target_node_template is not None:
+            if not source_node_template.is_target_node_valid(self.target_node_template):
+                context.validation.report('requirement "{0}" of node template "{1}" is for node '
+                                          'template "{2}" but it does not match constraints'.format(
+                                              self.name,
+                                              self.target_node_template_name,
+                                              source_node_template.name),
+                                          level=validation.Issue.BETWEEN_TYPES)
+            if (self.target_capability_type is not None) \
+                or (self.target_capability_name is not None):
+                target_node_capability = self.find_target_capability(source_node_template,
+                                                                     self.target_node_template)
+                if target_node_capability is None:
+                    return None, None
+            else:
+                target_node_capability = None
+
+            return self.target_node_template, target_node_capability
+
+        # Find first node that matches the type
+        elif self.target_node_type is not None:
+            for target_node_template in context.modeling.template.node_templates.itervalues():
+                if self.target_node_type.get_descendant(target_node_template.type.name) is None:
+                    continue
+
+                if not source_node_template.is_target_node_valid(target_node_template):
+                    continue
+
+                target_node_capability = self.find_target_capability(source_node_template,
+                                                                     target_node_template)
+                if target_node_capability is None:
+                    continue
+
+                return target_node_template, target_node_capability
+
+        return None, None
+
+    def find_target_capability(self, source_node_template, target_node_template):
+        for capability_template in target_node_template.capability_templates.itervalues():
+            if capability_template.satisfies_requirement(source_node_template,
+                                                         self,
+                                                         target_node_template):
+                return capability_template
+        return None
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('target_node_type_name', self.target_node_type.name
+             if self.target_node_type is not None else None),
+            ('target_node_template_name', self.target_node_template.name
+             if self.target_node_template is not None else None),
+            ('target_capability_type_name', self.target_capability_type.name
+             if self.target_capability_type is not None else None),
+            ('target_capability_name', self.target_capability_name),
+            ('relationship_template', formatting.as_raw(self.relationship_template))))
+
+    def validate(self):
+        if self.relationship_template:
+            self.relationship_template.validate()
+
+    def coerce_values(self, container, report_issues):
+        if self.relationship_template is not None:
+            self.relationship_template.coerce_values(container, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.name:
+            console.puts(context.style.node(self.name))
+        else:
+            console.puts('Requirement:')
+        with context.style.indent:
+            if self.target_node_type is not None:
+                console.puts('Target node type: {0}'.format(
+                    context.style.type(self.target_node_type.name)))
+            elif self.target_node_template is not None:
+                console.puts('Target node template: {0}'.format(
+                    context.style.node(self.target_node_template.name)))
+            if self.target_capability_type is not None:
+                console.puts('Target capability type: {0}'.format(
+                    context.style.type(self.target_capability_type.name)))
+            elif self.target_capability_name is not None:
+                console.puts('Target capability name: {0}'.format(
+                    context.style.node(self.target_capability_name)))
+            if self.target_node_template_constraints:
+                console.puts('Target node template constraints:')
+                with context.style.indent:
+                    for constraint in self.target_node_template_constraints:
+                        console.puts(context.style.literal(constraint))
+            if self.relationship_template:
+                console.puts('Relationship:')
+                with context.style.indent:
+                    self.relationship_template.dump()
+
+
+class RelationshipTemplateBase(TemplateModelMixin):
+    """
+    Optional addition to a :class:`RequirementTemplate` in :class:`NodeTemplate` that can be applied
+    when the requirement is matched with a capability.
+
+    Note that a relationship template here is not equivalent to a relationship template entity in
+    TOSCA. For example, a TOSCA requirement specifying a relationship type instead of a template
+    would still be represented here as a relationship template.
+
+    :ivar name: Name (optional; if present is unique for this service template)
+    :vartype name: basestring
+    :ivar type: Relationship type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+    :ivar interface_templates: Bundles of operations
+    :vartype interface_templates: {basestring: :class:`InterfaceTemplate`}
+
+    :ivar requirement_template: Containing requirement template
+    :vartype requirement_template: :class:`RequirementTemplate`
+    :ivar relationships: Instantiated relationships
+    :vartype relationships: [:class:`Relationship`]
+    """
+
+    __tablename__ = 'relationship_template'
+
+    __private_fields__ = ['type_fk']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    @declared_attr
+    def interface_templates(cls):
+        return relationship.one_to_many(cls, 'interface_template', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For RelationshipTemplate many-to-one to Type"""
+        return relationship.foreign_key('type', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('type_name', self.type.name if self.type is not None else None),
+            ('name', self.name),
+            ('description', self.description),
+            ('properties', formatting.as_raw_dict(self.properties)),
+            ('interface_templates', formatting.as_raw_list(self.interface_templates))))
+
+    def instantiate(self, container):
+        from . import models
+        relationship_model = models.Relationship(name=self.name,
+                                                 type=self.type,
+                                                 relationship_template=self)
+        utils.instantiate_dict(container, relationship_model.properties, self.properties)
+        utils.instantiate_dict(container, relationship_model.interfaces, self.interface_templates)
+        return relationship_model
+
+    def validate(self):
+        # TODO: either type or name must be set
+        utils.validate_dict_values(self.properties)
+        utils.validate_dict_values(self.interface_templates)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.properties, report_issues)
+        utils.coerce_dict_values(self, self.interface_templates, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        if self.type is not None:
+            console.puts('Relationship type: {0}'.format(context.style.type(self.type.name)))
+        else:
+            console.puts('Relationship template: {0}'.format(
+                context.style.node(self.name)))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            utils.dump_dict_values(self.properties, 'Properties')
+            utils.dump_interfaces(self.interface_templates, 'Interface templates')
+
+
+class CapabilityTemplateBase(TemplateModelMixin):
+    """
+    A capability of a :class:`NodeTemplate`. Nodes expose zero or more capabilities that can be
+    matched with :class:`Requirement` instances of other nodes.
+
+    :ivar name: Name (unique for the node template)
+    :vartype name: basestring
+    :ivar type: Capability type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar valid_source_node_types: Reject requirements that are not from these node types (optional)
+    :vartype valid_source_node_types: [:class:`Type`]
+    :ivar min_occurrences: Minimum number of requirement matches required
+    :vartype min_occurrences: int
+    :ivar max_occurrences: Maximum number of requirement matches allowed
+    :vartype min_occurrences: int
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+
+    :ivar node_template: Containing node template
+    :vartype node_template: :class:`NodeTemplate`
+    :ivar substitution_template_mapping: Our contribution to service substitution
+    :vartype substitution_template_mapping: :class:`SubstitutionTemplateMapping`
+    :ivar capabilities: Instantiated capabilities
+    :vartype capabilities: [:class:`Capability`]
+    """
+
+    __tablename__ = 'capability_template'
+
+    __private_fields__ = ['type_fk',
+                          'node_template_fk']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+    min_occurrences = Column(Integer, default=None)  # optional
+    max_occurrences = Column(Integer, default=None)  # optional
+
+    @declared_attr
+    def valid_source_node_types(cls):
+        return relationship.many_to_many(cls, 'type', prefix='valid_sources')
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For CapabilityTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def node_template_fk(cls):
+        """For NodeTemplate one-to-many to CapabilityTemplate"""
+        return relationship.foreign_key('node_template')
+
+    # endregion
+
+    def satisfies_requirement(self,
+                              source_node_template,
+                              requirement,
+                              target_node_template):
+        # Do we match the required capability type?
+        if requirement.target_capability_type and \
+            requirement.target_capability_type.get_descendant(self.type.name) is None:
+            return False
+
+        # Are we in valid_source_node_types?
+        if self.valid_source_node_types:
+            for valid_source_node_type in self.valid_source_node_types:
+                if valid_source_node_type.get_descendant(source_node_template.type.name) is None:
+                    return False
+
+        # Apply requirement constraints
+        if requirement.target_node_template_constraints:
+            for node_type_constraint in requirement.target_node_template_constraints:
+                if not node_type_constraint(target_node_template, source_node_template):
+                    return False
+
+        return True
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('min_occurrences', self.min_occurrences),
+            ('max_occurrences', self.max_occurrences),
+            ('valid_source_node_types', [v.name for v in self.valid_source_node_types]),
+            ('properties', formatting.as_raw_dict(self.properties))))
+
+    def instantiate(self, container):
+        from . import models
+        capability = models.Capability(name=self.name,
+                                       type=self.type,
+                                       min_occurrences=self.min_occurrences,
+                                       max_occurrences=self.max_occurrences,
+                                       occurrences=0,
+                                       capability_template=self)
+        utils.instantiate_dict(container, capability.properties, self.properties)
+        return capability
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(self, self.properties, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Type: {0}'.format(context.style.type(self.type.name)))
+            console.puts(
+                'Occurrences: {0:d}{1}'.format(
+                    self.min_occurrences or 0,
+                    ' to {0:d}'.format(self.max_occurrences)
+                    if self.max_occurrences is not None
+                    else ' or more'))
+            if self.valid_source_node_types:
+                console.puts('Valid source node types: {0}'.format(
+                    ', '.join((str(context.style.type(v.name))
+                               for v in self.valid_source_node_types))))
+            utils.dump_dict_values(self.properties, 'Properties')
+
+
+class InterfaceTemplateBase(TemplateModelMixin):
+    """
+    A typed set of :class:`OperationTemplate`.
+
+    :ivar name: Name (unique for the node, group, or relationship template)
+    :vartype name: basestring
+    :ivar type: Interface type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar inputs: Parameters that can be used by all operations in the interface
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar operation_templates: Operations
+    :vartype operation_templates: {basestring: :class:`OperationTemplate`}
+
+    :ivar node_template: Containing node template
+    :vartype node_template: :class:`NodeTemplate`
+    :ivar group_template: Containing group template
+    :vartype group_template: :class:`GroupTemplate`
+    :ivar relationship_template: Containing relationship template
+    :vartype relationship_template: :class:`RelationshipTemplate`
+    :ivar interfaces: Instantiated interfaces
+    :vartype interfaces: [:class:`Interface`]
+    """
+
+    __tablename__ = 'interface_template'
+
+    __private_fields__ = ['type_fk',
+                          'node_template_fk',
+                          'group_template_fk',
+                          'relationship_template_fk']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    @declared_attr
+    def operation_templates(cls):
+        return relationship.one_to_many(cls, 'operation_template', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For InterfaceTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def node_template_fk(cls):
+        """For NodeTemplate one-to-many to InterfaceTemplate"""
+        return relationship.foreign_key('node_template', nullable=True)
+
+    @declared_attr
+    def group_template_fk(cls):
+        """For GroupTemplate one-to-many to InterfaceTemplate"""
+        return relationship.foreign_key('group_template', nullable=True)
+
+    @declared_attr
+    def relationship_template_fk(cls):
+        """For RelationshipTemplate one-to-many to InterfaceTemplate"""
+        return relationship.foreign_key('relationship_template', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('inputs', formatting.as_raw_dict(self.inputs)),  # pylint: disable=no-member
+            # TODO fix self.properties reference
+            ('operation_templates', formatting.as_raw_list(self.operation_templates))))
+
+    def instantiate(self, container):
+        from . import models
+        interface = models.Interface(name=self.name,
+                                     type=self.type,
+                                     description=deepcopy_with_locators(self.description),
+                                     interface_template=self)
+        utils.instantiate_dict(container, interface.inputs, self.inputs)
+        utils.instantiate_dict(container, interface.operations, self.operation_templates)
+        return interface
+
+    def validate(self):
+        utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.operation_templates)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.inputs, report_issues)
+        utils.coerce_dict_values(container, self.operation_templates, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Interface type: {0}'.format(context.style.type(self.type.name)))
+            utils.dump_dict_values(self.inputs, 'Inputs')
+            utils.dump_dict_values(self.operation_templates, 'Operation templates')
+
+
+class OperationTemplateBase(TemplateModelMixin):
+    """
+    An operation in a :class:`InterfaceTemplate`.
+
+    Operations are executed by an associated :class:`PluginSpecification` via an executor.
+
+    :ivar name: Name (unique for the interface or service template)
+    :vartype name: basestring
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar plugin_specification: Associated plugin
+    :vartype plugin_specification: :class:`PluginSpecification`
+    :ivar implementation: Implementation string (interpreted by the plugin)
+    :vartype implementation: basestring
+    :ivar dependencies: Dependency strings (interpreted by the plugin)
+    :vartype dependencies: [basestring]
+    :ivar inputs: Parameters that can be used by this operation
+    :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar executor: Executor name
+    :vartype executor: basestring
+    :ivar max_retries: Maximum number of retries allowed in case of failure
+    :vartype max_retries: int
+    :ivar retry_interval: Interval between retries (in seconds)
+    :vartype retry_interval: int
+
+    :ivar interface_template: Containing interface template
+    :vartype interface_template: :class:`InterfaceTemplate`
+    :ivar service_template: Containing service template
+    :vartype service_template: :class:`ServiceTemplate`
+    :ivar operations: Instantiated operations
+    :vartype operations: [:class:`Operation`]
+    """
+
+    __tablename__ = 'operation_template'
+
+    __private_fields__ = ['service_template_fk',
+                          'interface_template_fk',
+                          'plugin_fk']
+
+    description = Column(Text)
+
+    @declared_attr
+    def plugin_specification(cls):
+        return relationship.one_to_one(cls, 'plugin_specification')
+
+    implementation = Column(Text)
+    dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+
+    @declared_attr
+    def inputs(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+
+    executor = Column(Text)
+    max_retries = Column(Integer)
+    retry_interval = Column(Integer)
+
+    # region foreign keys
+
+    @declared_attr
+    def service_template_fk(cls):
+        """For ServiceTemplate one-to-many to OperationTemplate"""
+        return relationship.foreign_key('service_template', nullable=True)
+
+    @declared_attr
+    def interface_template_fk(cls):
+        """For InterfaceTemplate one-to-many to OperationTemplate"""
+        return relationship.foreign_key('interface_template', nullable=True)
+
+    @declared_attr
+    def plugin_specification_fk(cls):
+        """For OperationTemplate one-to-one to PluginSpecification"""
+        return relationship.foreign_key('plugin_specification', nullable=True)
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('implementation', self.implementation),
+            ('dependencies', self.dependencies),
+            ('executor', self.executor),
+            ('max_retries', self.max_retries),
+            ('retry_interval', self.retry_interval),
+            ('inputs', formatting.as_raw_dict(self.inputs))))
+
+    def instantiate(self, container):
+        from . import models
+        operation = models.Operation(name=self.name,
+                                     description=deepcopy_with_locators(self.description),
+                                     implementation=self.implementation,
+                                     dependencies=self.dependencies,
+                                     plugin_specification=self.plugin_specification,
+                                     executor=self.executor,
+                                     max_retries=self.max_retries,
+                                     retry_interval=self.retry_interval,
+                                     operation_template=self)
+        utils.instantiate_dict(container, operation.inputs, self.inputs)
+        return operation
+
+    def validate(self):
+        utils.validate_dict_values(self.inputs)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.inputs, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            if self.implementation is not None:
+                console.puts('Implementation: {0}'.format(
+                    context.style.literal(self.implementation)))
+            if self.dependencies:
+                console.puts('Dependencies: {0}'.format(
+                    ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+            if self.executor is not None:
+                console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
+            if self.max_retries is not None:
+                console.puts('Max retries: {0}'.format(context.style.literal(self.max_retries)))
+            if self.retry_interval is not None:
+                console.puts('Retry interval: {0}'.format(
+                    context.style.literal(self.retry_interval)))
+            utils.dump_dict_values(self.inputs, 'Inputs')
+
+
+class ArtifactTemplateBase(TemplateModelMixin):
+    """
+    A file associated with a :class:`NodeTemplate`.
+
+    :ivar name: Name (unique for the node template)
+    :vartype name: basestring
+    :ivar type: Artifact type
+    :vartype type: :class:`Type`
+    :ivar description: Human-readable description
+    :vartype description: basestring
+    :ivar source_path: Source path (CSAR or repository)
+    :vartype source_path: basestring
+    :ivar target_path: Path at destination machine
+    :vartype target_path: basestring
+    :ivar repository_url: Repository URL
+    :vartype repository_path: basestring
+    :ivar repository_credential: Credentials for accessing the repository
+    :vartype repository_credential: {basestring: basestring}
+    :ivar properties: Associated parameters
+    :vartype properties: {basestring: :class:`Parameter`}
+
+    :ivar node_template: Containing node template
+    :vartype node_template: :class:`NodeTemplate`
+    :ivar artifacts: Instantiated artifacts
+    :vartype artifacts: [:class:`Artifact`]
+    """
+
+    __tablename__ = 'artifact_template'
+
+    __private_fields__ = ['type_fk',
+                          'node_template_fk']
+
+    @declared_attr
+    def type(cls):
+        return relationship.many_to_one(cls, 'type')
+
+    description = Column(Text)
+    source_path = Column(Text)
+    target_path = Column(Text)
+    repository_url = Column(Text)
+    repository_credential = Column(modeling_types.StrictDict(basestring, basestring))
+
+    @declared_attr
+    def properties(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='properties', dict_key='name')
+
+    # region foreign keys
+
+    @declared_attr
+    def type_fk(cls):
+        """For ArtifactTemplate many-to-one to Type"""
+        return relationship.foreign_key('type')
+
+    @declared_attr
+    def node_template_fk(cls):
+        """For NodeTemplate one-to-many to ArtifactTemplate"""
+        return relationship.foreign_key('node_template')
+
+    # endregion
+
+    @property
+    def as_raw(self):
+        return collections.OrderedDict((
+            ('name', self.name),
+            ('description', self.description),
+            ('type_name', self.type.name),
+            ('source_path', self.source_path),
+            ('target_path', self.target_path),
+            ('repository_url', self.repository_url),
+            ('repository_credential', formatting.as_agnostic(self.repository_credential)),
+            ('properties', formatting.as_raw_dict(self.properties))))
+
+    def instantiate(self, container):
+        from . import models
+        artifact = models.Artifact(name=self.name,
+                                   type=self.type,
+                                   description=deepcopy_with_locators(self.description),
+                                   source_path=self.source_path,
+                                   target_path=self.target_path,
+                                   repository_url=self.repository_url,
+                                   repository_credential=self.repository_credential,
+                                   artifact_template=self)
+        utils.instantiate_dict(container, artifact.properties, self.properties)
+        return artifact
+
+    def validate(self):
+        utils.validate_dict_values(self.properties)
+
+    def coerce_values(self, container, report_issues):
+        utils.coerce_dict_values(container, self.properties, report_issues)
+
+    def dump(self):
+        context = ConsumptionContext.get_thread_local()
+        console.puts(context.style.node(self.name))
+        if self.description:
+            console.puts(context.style.meta(self.description))
+        with context.style.indent:
+            console.puts('Artifact type: {0}'.format(context.style.type(self.type.name)))
+            console.puts('Source path: {0}'.format(context.style.literal(self.source_path)))
+            if self.target_path is not None:
+                console.puts('Target path: {0}'.format(context.style.literal(self.target_path)))
+            if self.repository_url is not None:
+                console.puts('Repository URL: {0}'.format(
+                    context.style.literal(self.repository_url)))
+            if self.repository_credential:
+                console.puts('Repository credential: {0}'.format(
+                    context.style.literal(self.repository_credential)))
+            utils.dump_dict_values(self.properties, 'Properties')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/types.py
----------------------------------------------------------------------
diff --git a/aria/modeling/types.py b/aria/modeling/types.py
new file mode 100644
index 0000000..06f171c
--- /dev/null
+++ b/aria/modeling/types.py
@@ -0,0 +1,304 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+from collections import namedtuple
+
+from sqlalchemy import (
+    TypeDecorator,
+    VARCHAR,
+    event
+)
+from sqlalchemy.ext import mutable
+
+from . import exceptions
+
+
+class _MutableType(TypeDecorator):
+    """
+    Dict representation of type.
+    """
+    @property
+    def python_type(self):
+        raise NotImplementedError
+
+    def process_literal_param(self, value, dialect):
+        pass
+
+    impl = VARCHAR
+
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            value = json.dumps(value)
+        return value
+
+    def process_result_value(self, value, dialect):
+        if value is not None:
+            value = json.loads(value)
+        return value
+
+
+class Dict(_MutableType):
+    @property
+    def python_type(self):
+        return dict
+
+
+class List(_MutableType):
+    @property
+    def python_type(self):
+        return list
+
+
+class _StrictDictMixin(object):
+
+    @classmethod
+    def coerce(cls, key, value):
+        "Convert plain dictionaries to MutableDict."
+        try:
+            if not isinstance(value, cls):
+                if isinstance(value, dict):
+                    for k, v in value.items():
+                        cls._assert_strict_key(k)
+                        cls._assert_strict_value(v)
+                    return cls(value)
+                return mutable.MutableDict.coerce(key, value)
+            else:
+                return value
+        except ValueError as e:
+            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
+
+    def __setitem__(self, key, value):
+        self._assert_strict_key(key)
+        self._assert_strict_value(value)
+        super(_StrictDictMixin, self).__setitem__(key, value)
+
+    def setdefault(self, key, value):
+        self._assert_strict_key(key)
+        self._assert_strict_value(value)
+        super(_StrictDictMixin, self).setdefault(key, value)
+
+    def update(self, *args, **kwargs):
+        for k, v in kwargs.items():
+            self._assert_strict_key(k)
+            self._assert_strict_value(v)
+        super(_StrictDictMixin, self).update(*args, **kwargs)
+
+    @classmethod
+    def _assert_strict_key(cls, key):
+        if cls._key_cls is not None and not isinstance(key, cls._key_cls):
+            raise exceptions.ValueFormatException('key type was set strictly to {0}, but was {1}'
+                                                  .format(cls._key_cls, type(key)))
+
+    @classmethod
+    def _assert_strict_value(cls, value):
+        if cls._value_cls is not None and not isinstance(value, cls._value_cls):
+            raise exceptions.ValueFormatException('value type was set strictly to {0}, but was {1}'
+                                                  .format(cls._value_cls, type(value)))
+
+
+class _MutableDict(mutable.MutableDict):
+    """
+    Enables tracking for dict values.
+    """
+
+    @classmethod
+    def coerce(cls, key, value):
+        "Convert plain dictionaries to MutableDict."
+        try:
+            return mutable.MutableDict.coerce(key, value)
+        except ValueError as e:
+            raise exceptions.ValueFormatException('could not coerce value', cause=e)
+
+
+class _StrictListMixin(object):
+
+    @classmethod
+    def coerce(cls, key, value):
+        "Convert plain dictionaries to MutableDict."
+        try:
+            if not isinstance(value, cls):
+                if isinstance(value, list):
+                    for item in value:
+                        cls._assert_item(item)
+                    return cls(value)
+                return mutable.MutableList.coerce(key, value)
+            else:
+                return value
+        except ValueError as e:
+            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
+
+    def __setitem__(self, index, value):
+        """Detect list set events and emit change events."""
+        self._assert_item(value)
+        super(_StrictListMixin, self).__setitem__(index, value)
+
+    def append(self, item):
+        self._assert_item(item)
+        super(_StrictListMixin, self).append(item)
+
+    def extend(self, item):
+        self._assert_item(item)
+        super(_StrictListMixin, self).extend(item)
+
+    def insert(self, index, item):
+        self._assert_item(item)
+        super(_StrictListMixin, self).insert(index, item)
+
+    @classmethod
+    def _assert_item(cls, item):
+        if cls._item_cls is not None and not isinstance(item, cls._item_cls):
+            raise exceptions.ValueFormatException('key type was set strictly to {0}, but was {1}'
+                                                  .format(cls._item_cls, type(item)))
+
+
+class _MutableList(mutable.MutableList):
+
+    @classmethod
+    def coerce(cls, key, value):
+        "Convert plain dictionaries to MutableDict."
+        try:
+            return mutable.MutableList.coerce(key, value)
+        except ValueError as e:
+            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
+
+
+_StrictDictID = namedtuple('_StrictDictID', 'key_cls, value_cls')
+_StrictValue = namedtuple('_StrictValue', 'type_cls, listener_cls')
+
+class _StrictDict(object):
+    """
+    This entire class functions as a factory for strict dicts and their listeners.
+    No type class, and no listener type class is created more than once. If a relevant type class
+    exists it is returned.
+    """
+    _strict_map = {}
+
+    def __call__(self, key_cls=None, value_cls=None):
+        strict_dict_map_key = _StrictDictID(key_cls=key_cls, value_cls=value_cls)
+        if strict_dict_map_key not in self._strict_map:
+            key_cls_name = getattr(key_cls, '__name__', str(key_cls))
+            value_cls_name = getattr(value_cls, '__name__', str(value_cls))
+            # Creating the type class itself. this class would be returned (used by the SQLAlchemy
+            # Column).
+            strict_dict_cls = type(
+                'StrictDict_{0}_{1}'.format(key_cls_name, value_cls_name),
+                (Dict, ),
+                {}
+            )
+            # Creating the type listening class.
+            # The new class inherits from both the _MutableDict class and the _StrictDictMixin,
+            # while setting the necessary _key_cls and _value_cls as class attributes.
+            listener_cls = type(
+                'StrictMutableDict_{0}_{1}'.format(key_cls_name, value_cls_name),
+                (_StrictDictMixin, _MutableDict),
+                {'_key_cls': key_cls, '_value_cls': value_cls}
+            )
+            self._strict_map[strict_dict_map_key] = _StrictValue(type_cls=strict_dict_cls,
+                                                                 listener_cls=listener_cls)
+
+        return self._strict_map[strict_dict_map_key].type_cls
+
+
+StrictDict = _StrictDict()
+
+
+class _StrictList(object):
+    """
+    This entire class functions as a factory for strict lists and their listeners.
+    No type class, and no listener type class is created more than once. If a relevant type class
+    exists it is returned.
+    """
+    _strict_map = {}
+
+    def __call__(self, item_cls=None):
+
+        if item_cls not in self._strict_map:
+            item_cls_name = getattr(item_cls, '__name__', str(item_cls))
+            # Creating the type class itself. this class would be returned (used by the SQLAlchemy
+            # Column).
+            strict_list_cls = type(
+                'StrictList_{0}'.format(item_cls_name),
+                (List, ),
+                {}
+            )
+            # Creating the type listening class.
+            # The new class inherits from both the _MutableList class and the _StrictListMixin,
+            # while setting the necessary _item_cls as class attribute.
+            listener_cls = type(
+                'StrictMutableList_{0}'.format(item_cls_name),
+                (_StrictListMixin, _MutableList),
+                {'_item_cls': item_cls}
+            )
+            self._strict_map[item_cls] = _StrictValue(type_cls=strict_list_cls,
+                                                      listener_cls=listener_cls)
+
+        return self._strict_map[item_cls].type_cls
+
+
+StrictList = _StrictList()
+
+
+def _mutable_association_listener(mapper, cls):
+    strict_dict_type_to_listener = \
+        dict((v.type_cls, v.listener_cls) for v in _StrictDict._strict_map.values())
+
+    strict_list_type_to_listener = \
+        dict((v.type_cls, v.listener_cls) for v in _StrictList._strict_map.values())
+
+    for prop in mapper.column_attrs:
+        column_type = prop.columns[0].type
+        # Dict Listeners
+        if type(column_type) in strict_dict_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
+            strict_dict_type_to_listener[type(column_type)].associate_with_attribute(
+                getattr(cls, prop.key))
+        elif isinstance(column_type, Dict):
+            _MutableDict.associate_with_attribute(getattr(cls, prop.key))
+
+        # List Listeners
+        if type(column_type) in strict_list_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
+            strict_list_type_to_listener[type(column_type)].associate_with_attribute(
+                getattr(cls, prop.key))
+        elif isinstance(column_type, List):
+            _MutableList.associate_with_attribute(getattr(cls, prop.key))
+
+
+_LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_listener)
+
+
+def _register_mutable_association_listener():
+    event.listen(*_LISTENER_ARGS)
+
+
+def remove_mutable_association_listener():
+    """
+    Remove the event listener that associates ``Dict`` and ``List`` column types with
+    ``MutableDict`` and ``MutableList``, respectively.
+
+    This call must happen before any model instance is instantiated.
+    This is because once it does, that would trigger the listener we are trying to remove.
+    Once it is triggered, many other listeners will then be registered.
+    At that point, it is too late.
+
+    The reason this function exists is that the association listener, interferes with ARIA change
+    tracking instrumentation, so a way to disable it is required.
+
+    Note that the event listener this call removes is registered by default.
+    """
+    if event.contains(*_LISTENER_ARGS):
+        event.remove(*_LISTENER_ARGS)
+
+
+_register_mutable_association_listener()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
new file mode 100644
index 0000000..0b4015c
--- /dev/null
+++ b/aria/modeling/utils.py
@@ -0,0 +1,121 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..parser.consumption import ConsumptionContext
+from ..parser.exceptions import InvalidValueError
+from ..parser.presentation import Value
+from ..utils.collections import OrderedDict
+from ..utils.console import puts
+from .exceptions import CannotEvaluateFunctionException
+
+
+def coerce_value(container, value, report_issues=False):
+    if isinstance(value, Value):
+        value = value.value
+
+    if isinstance(value, list):
+        return [coerce_value(container, v, report_issues) for v in value]
+    elif isinstance(value, dict):
+        return OrderedDict((k, coerce_value(container, v, report_issues))
+                           for k, v in value.iteritems())
+    elif hasattr(value, '_evaluate'):
+        context = ConsumptionContext.get_thread_local()
+        try:
+            value = value._evaluate(context, container)
+            value = coerce_value(container, value, report_issues)
+        except CannotEvaluateFunctionException:
+            pass
+        except InvalidValueError as e:
+            if report_issues:
+                context.validation.report(e.issue)
+    return value
+
+
+def coerce_dict_values(container, the_dict, report_issues=False):
+    if not the_dict:
+        return
+    coerce_list_values(container, the_dict.itervalues(), report_issues)
+
+
+def coerce_list_values(container, the_list, report_issues=False):
+    if not the_list:
+        return
+    for value in the_list:
+        value.coerce_values(container, report_issues)
+
+
+def validate_dict_values(the_dict):
+    if not the_dict:
+        return
+    validate_list_values(the_dict.itervalues())
+
+
+def validate_list_values(the_list):
+    if not the_list:
+        return
+    for value in the_list:
+        value.validate()
+
+
+def instantiate_dict(container, the_dict, from_dict):
+    if not from_dict:
+        return
+    for name, value in from_dict.iteritems():
+        value = value.instantiate(container)
+        if value is not None:
+            the_dict[name] = value
+
+
+def instantiate_list(container, the_list, from_list):
+    if not from_list:
+        return
+    for value in from_list:
+        value = value.instantiate(container)
+        if value is not None:
+            the_list.append(value)
+
+
+def dump_list_values(the_list, name):
+    if not the_list:
+        return
+    puts('%s:' % name)
+    context = ConsumptionContext.get_thread_local()
+    with context.style.indent:
+        for value in the_list:
+            value.dump()
+
+
+def dump_dict_values(the_dict, name):
+    if not the_dict:
+        return
+    dump_list_values(the_dict.itervalues(), name)
+
+
+def dump_interfaces(interfaces, name='Interfaces'):
+    if not interfaces:
+        return
+    puts('%s:' % name)
+    context = ConsumptionContext.get_thread_local()
+    with context.style.indent:
+        for interface in interfaces.itervalues():
+            interface.dump()
+
+
+class classproperty(object):                                                                        # pylint: disable=invalid-name
+    def __init__(self, f):
+        self._func = f
+
+    def __get__(self, instance, owner):
+        return self._func(owner)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9841ca4a/aria/orchestrator/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/__init__.py b/aria/orchestrator/__init__.py
index 097ee1d..0b57e4b 100644
--- a/aria/orchestrator/__init__.py
+++ b/aria/orchestrator/__init__.py
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-Aria orchestrator
+ARIA orchestrator
 """
 from .decorators import workflow, operation, WORKFLOW_DECORATOR_RESERVED_ARGUMENTS