You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by em...@apache.org on 2017/05/26 18:10:12 UTC

[1/7] incubator-ariatosca git commit: NullPool logging messages appear during execution [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-149-functions-in-operation-configuration c063b4097 -> 888c5cd6f (forced update)


NullPool logging messages appear during execution


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/0c986842
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/0c986842
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/0c986842

Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: 0c986842d52eca823ab92442dd9d77267e369ae8
Parents: 3d22d36
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 22 18:28:12 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Wed May 24 12:15:13 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/context/common.py             |  1 -
 aria/orchestrator/context/operation.py          | 13 +++++--
 .../execution_plugin/ctx_proxy/client.py        | 22 ++++++------
 .../execution_plugin/ctx_proxy/server.py        | 37 +++++++++++++-------
 aria/orchestrator/workflows/executor/process.py |  2 +-
 .../execution_plugin/test_ctx_proxy_server.py   |  4 ++-
 .../orchestrator/workflows/executor/__init__.py |  2 +-
 7 files changed, 52 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index 0854a27..c98e026 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -16,7 +16,6 @@
 """
 A common context for both workflow and operation
 """
-
 import logging
 from contextlib import contextmanager
 from functools import partial

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 68a02aa..0ce790f 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -33,6 +33,7 @@ class BaseOperationContext(BaseContext):
         self._task_id = task_id
         self._actor_id = actor_id
         self._thread_local = threading.local()
+        self._destroy_session = kwargs.pop('destroy_session', False)
         logger_level = kwargs.pop('logger_level', None)
         super(BaseOperationContext, self).__init__(**kwargs)
         self._register_logger(task_id=self.task.id, level=logger_level)
@@ -90,13 +91,21 @@ class BaseOperationContext(BaseContext):
         }
 
     @classmethod
-    def deserialize_from_dict(cls, model_storage=None, resource_storage=None, **kwargs):
+    def instantiate_from_dict(cls, model_storage=None, resource_storage=None, **kwargs):
         if model_storage:
             model_storage = aria.application_model_storage(**model_storage)
         if resource_storage:
             resource_storage = aria.application_resource_storage(**resource_storage)
 
-        return cls(model_storage=model_storage, resource_storage=resource_storage, **kwargs)
+        return cls(model_storage=model_storage,
+                   resource_storage=resource_storage,
+                   destroy_session=True,
+                   **kwargs)
+
+    def close(self):
+        if self._destroy_session:
+            self.model.log._session.remove()
+            self.model.log._engine.dispose()
 
 
 class NodeOperationContext(BaseOperationContext):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/aria/orchestrator/execution_plugin/ctx_proxy/client.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/client.py b/aria/orchestrator/execution_plugin/ctx_proxy/client.py
index d965a5e..f7f56aa 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/client.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/client.py
@@ -34,22 +34,25 @@ class _RequestError(RuntimeError):
         self.ex_traceback = ex_traceback
 
 
-def _http_request(socket_url, request, timeout):
-    response = urllib2.urlopen(
-        url=socket_url,
-        data=json.dumps(request),
-        timeout=timeout)
+def _http_request(socket_url, request, method, timeout):
+    opener = urllib2.build_opener(urllib2.HTTPHandler)
+    request = urllib2.Request(socket_url, data=json.dumps(request))
+    request.get_method = lambda: method
+    response = opener.open(request, timeout=timeout)
+
     if response.code != 200:
         raise RuntimeError('Request failed: {0}'.format(response))
     return json.loads(response.read())
 
 
-def _client_request(socket_url, args, timeout):
+def _client_request(socket_url, args, timeout, method='POST'):
     response = _http_request(
         socket_url=socket_url,
         request={'args': args},
-        timeout=timeout)
-    payload = response['payload']
+        method=method,
+        timeout=timeout
+    )
+    payload = response.get('payload')
     response_type = response.get('type')
     if response_type == 'error':
         ex_type = payload['type']
@@ -89,7 +92,7 @@ def _process_args(json_prefix, args):
 def main(args=None):
     args = _parse_args(args)
     response = _client_request(
-        socket_url=args.socket_url,
+        args.socket_url,
         args=_process_args(args.json_arg_prefix, args.args),
         timeout=args.timeout)
     if args.json_output:
@@ -100,6 +103,5 @@ def main(args=None):
         response = str(response)
     sys.stdout.write(response)
 
-
 if __name__ == '__main__':
     main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
index 52a5312..1ce0e08 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
@@ -42,16 +42,31 @@ class CtxProxy(object):
         self._started.get(timeout=5)
 
     def _start_server(self):
-        proxy = self
 
         class BottleServerAdapter(bottle.ServerAdapter):
+            proxy = self
+
+            def close_session(self):
+                self.proxy.ctx.model.log._session.remove()
+
             def run(self, app):
+
                 class Server(wsgiref.simple_server.WSGIServer):
                     allow_reuse_address = True
+                    bottle_server = self
 
                     def handle_error(self, request, client_address):
                         pass
 
+                    def serve_forever(self, poll_interval=0.5):
+                        try:
+                            wsgiref.simple_server.WSGIServer.serve_forever(self, poll_interval)
+                        finally:
+                            # Once shutdown is called, we need to close the session.
+                            # If the session is not closed properly, it might raise warnings,
+                            # or even lock the database.
+                            self.bottle_server.close_session()
+
                 class Handler(wsgiref.simple_server.WSGIRequestHandler):
                     def address_string(self):
                         return self.client_address[0]
@@ -66,8 +81,8 @@ class CtxProxy(object):
                     app=app,
                     server_class=Server,
                     handler_class=Handler)
-                proxy.server = server
-                proxy._started.put(True)
+                self.proxy.server = server
+                self.proxy._started.put(True)
                 server.serve_forever(poll_interval=0.1)
 
         def serve():
@@ -96,9 +111,10 @@ class CtxProxy(object):
         request = bottle.request.body.read()  # pylint: disable=no-member
         response = self._process(request)
         return bottle.LocalResponse(
-            body=response,
+            body=json.dumps(response, cls=modeling.utils.ModelJSONEncoder),
             status=200,
-            headers={'content-type': 'application/json'})
+            headers={'content-type': 'application/json'}
+        )
 
     def _process(self, request):
         try:
@@ -109,10 +125,7 @@ class CtxProxy(object):
             if isinstance(payload, exceptions.ScriptException):
                 payload = dict(message=str(payload))
                 result_type = 'stop_operation'
-            result = json.dumps({
-                'type': result_type,
-                'payload': payload
-            }, cls=modeling.utils.ModelJSONEncoder)
+            result = {'type': result_type, 'payload': payload}
         except Exception as e:
             traceback_out = StringIO.StringIO()
             traceback.print_exc(file=traceback_out)
@@ -121,10 +134,8 @@ class CtxProxy(object):
                 'message': str(e),
                 'traceback': traceback_out.getvalue()
             }
-            result = json.dumps({
-                'type': 'error',
-                'payload': payload
-            })
+            result = {'type': 'error', 'payload': payload}
+
         return result
 
     def __enter__(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 824c4e1..da6bbb2 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -373,7 +373,7 @@ def _main():
     # See docstring of `remove_mutable_association_listener` for further details
     modeling_types.remove_mutable_association_listener()
     try:
-        ctx = context_dict['context_cls'].deserialize_from_dict(**context_dict['context'])
+        ctx = context_dict['context_cls'].instantiate_from_dict(**context_dict['context'])
     except BaseException as e:
         messenger.failed(exception=e, tracked_changes=None, new_instances=None)
         return

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/tests/orchestrator/execution_plugin/test_ctx_proxy_server.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ctx_proxy_server.py b/tests/orchestrator/execution_plugin/test_ctx_proxy_server.py
index 98ceff9..1b19fd9 100644
--- a/tests/orchestrator/execution_plugin/test_ctx_proxy_server.py
+++ b/tests/orchestrator/execution_plugin/test_ctx_proxy_server.py
@@ -136,7 +136,7 @@ class TestCtxProxy(object):
             kwargs=kwargs)
 
     @pytest.fixture
-    def ctx(self):
+    def ctx(self, mocker):
         class MockCtx(object):
             pass
         ctx = MockCtx()
@@ -160,11 +160,13 @@ class TestCtxProxy(object):
         ctx.stub_args = self.stub_args
         ctx.stub_attr = self.StubAttribute()
         ctx.node = self.NodeAttribute(properties)
+        ctx.model = mocker.MagicMock()
         return ctx
 
     @pytest.fixture
     def server(self, ctx):
         result = ctx_proxy.server.CtxProxy(ctx)
+        result._close_session = lambda *args, **kwargs: {}
         yield result
         result.close()
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/0c986842/tests/orchestrator/workflows/executor/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/__init__.py b/tests/orchestrator/workflows/executor/__init__.py
index 8ad8edb..375c44e 100644
--- a/tests/orchestrator/workflows/executor/__init__.py
+++ b/tests/orchestrator/workflows/executor/__init__.py
@@ -69,7 +69,7 @@ class MockContext(object):
         return None
 
     @classmethod
-    def deserialize_from_dict(cls, **kwargs):
+    def instantiate_from_dict(cls, **kwargs):
         if kwargs:
             return cls(storage=aria.application_model_storage(**kwargs))
         else:


[3/7] incubator-ariatosca git commit: ARIA-258 Convert runtime_properties to attributes

Posted by em...@apache.org.
ARIA-258 Convert runtime_properties to attributes


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/50b997e3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/50b997e3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/50b997e3

Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: 50b997e3bfbaf26df5e66327d30fe8a015b92dd7
Parents: 0c98684
Author: max-orlov <ma...@gigaspaces.com>
Authored: Sun May 14 22:38:39 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Thu May 25 18:30:21 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/nodes.py                      |   6 +-
 aria/modeling/service_common.py                 |   9 +-
 aria/modeling/service_instance.py               |   8 +-
 aria/modeling/service_template.py               |   1 -
 aria/modeling/types.py                          |  20 -
 .../context/collection_instrumentation.py       | 242 ++++++++++++
 aria/orchestrator/context/operation.py          |  13 +-
 aria/orchestrator/context/toolbelt.py           |   5 +-
 .../execution_plugin/ctx_proxy/server.py        |   1 -
 aria/orchestrator/workflows/core/engine.py      |   1 -
 aria/orchestrator/workflows/executor/process.py | 125 +-----
 aria/storage/instrumentation.py                 | 282 -------------
 tests/helpers.py                                |  10 +
 tests/mock/models.py                            |   7 +-
 tests/modeling/test_mixins.py                   |   1 -
 tests/modeling/test_models.py                   |  28 +-
 .../context/test_collection_instrumentation.py  | 253 ++++++++++++
 tests/orchestrator/context/test_operation.py    |  90 ++++-
 tests/orchestrator/context/test_toolbelt.py     |   5 +-
 .../orchestrator/execution_plugin/test_local.py |  66 ++--
 tests/orchestrator/execution_plugin/test_ssh.py |  36 +-
 tests/orchestrator/workflows/core/test_task.py  |   2 +-
 .../orchestrator/workflows/executor/__init__.py |   4 +
 ...process_executor_concurrent_modifications.py |  67 ++--
 .../executor/test_process_executor_extension.py |   6 +-
 .../test_process_executor_tracked_changes.py    |  56 ++-
 tests/resources/scripts/test_ssh.sh             |  30 +-
 tests/storage/test_instrumentation.py           | 396 -------------------
 28 files changed, 786 insertions(+), 984 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
index e43493f..1bbefe6 100644
--- a/aria/cli/commands/nodes.py
+++ b/aria/cli/commands/nodes.py
@@ -47,9 +47,9 @@ def show(node_id, model_storage, logger):
 
     # print node attributes
     logger.info('Node attributes:')
-    if node.runtime_properties:
-        for prop_name, prop_value in node.runtime_properties.iteritems():
-            logger.info('\t{0}: {1}'.format(prop_name, prop_value))
+    if node.attributes:
+        for param_name, param in node.attributes.iteritems():
+            logger.info('\t{0}: {1}'.format(param_name, param.value))
     else:
         logger.info('\tNo attributes')
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index e9c96a4..ef19c8e 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -218,14 +218,13 @@ class ParameterBase(TemplateModelMixin, caching.HasCachedMethods):
         :type description: basestring
         """
 
-        from . import models
         type_name = canonical_type_name(value)
         if type_name is None:
             type_name = full_type_name(value)
-        return models.Parameter(name=name, # pylint: disable=unexpected-keyword-arg
-                                type_name=type_name,
-                                value=value,
-                                description=description)
+        return cls(name=name, # pylint: disable=unexpected-keyword-arg
+                   type_name=type_name,
+                   value=value,
+                   description=description)
 
 
 class TypeBase(InstanceModelMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 41a388d..7058969 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -333,8 +333,6 @@ class NodeBase(InstanceModelMixin):
     :vartype inbound_relationships: [:class:`Relationship`]
     :ivar host: Host node (can be self)
     :vartype host: :class:`Node`
-    :ivar runtime_properties: TODO: should be replaced with attributes
-    :vartype runtime_properties: {}
     :ivar state: The state of the node, according to to the TOSCA-defined node states
     :vartype state: string
     :ivar version: Used by `aria.storage.instrumentation`
@@ -520,7 +518,6 @@ class NodeBase(InstanceModelMixin):
     # endregion
 
     description = Column(Text)
-    runtime_properties = Column(modeling_types.Dict)
     state = Column(Enum(*STATES, name='node_state'), nullable=False, default=INITIAL)
     version = Column(Integer, default=1)
 
@@ -528,8 +525,9 @@ class NodeBase(InstanceModelMixin):
 
     @property
     def host_address(self):
-        if self.host and self.host.runtime_properties:
-            return self.host.runtime_properties.get('ip')
+        if self.host and self.host.attributes:
+            attribute = self.host.attributes.get('ip')
+            return attribute.value if attribute else None
         return None
 
     def satisfy_requirements(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 12195a1..3110248 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -562,7 +562,6 @@ class NodeTemplateBase(TemplateModelMixin):
                            type=self.type,
                            description=deepcopy_with_locators(self.description),
                            state=models.Node.INITIAL,
-                           runtime_properties={},
                            node_template=self)
         utils.instantiate_dict(node, node.properties, self.properties)
         utils.instantiate_dict(node, node.attributes, self.attributes)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/modeling/types.py
----------------------------------------------------------------------
diff --git a/aria/modeling/types.py b/aria/modeling/types.py
index 7460f47..920a0c2 100644
--- a/aria/modeling/types.py
+++ b/aria/modeling/types.py
@@ -286,24 +286,4 @@ _LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_list
 def _register_mutable_association_listener():
     event.listen(*_LISTENER_ARGS)
 
-
-def remove_mutable_association_listener():
-    """
-    Remove the event listener that associates ``Dict`` and ``List`` column types with
-    ``MutableDict`` and ``MutableList``, respectively.
-
-    This call must happen before any model instance is instantiated.
-    This is because once it does, that would trigger the listener we are trying to remove.
-    Once it is triggered, many other listeners will then be registered.
-    At that point, it is too late.
-
-    The reason this function exists is that the association listener, interferes with ARIA change
-    tracking instrumentation, so a way to disable it is required.
-
-    Note that the event listener this call removes is registered by default.
-    """
-    if event.contains(*_LISTENER_ARGS):
-        event.remove(*_LISTENER_ARGS)
-
-
 _register_mutable_association_listener()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/orchestrator/context/collection_instrumentation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/collection_instrumentation.py b/aria/orchestrator/context/collection_instrumentation.py
new file mode 100644
index 0000000..91cfd35
--- /dev/null
+++ b/aria/orchestrator/context/collection_instrumentation.py
@@ -0,0 +1,242 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import partial
+
+from aria.modeling import models
+
+
+class _InstrumentedCollection(object):
+
+    def __init__(self,
+                 model,
+                 parent,
+                 field_name,
+                 seq=None,
+                 is_top_level=True,
+                 **kwargs):
+        self._model = model
+        self._parent = parent
+        self._field_name = field_name
+        self._is_top_level = is_top_level
+        self._load(seq, **kwargs)
+
+    @property
+    def _raw(self):
+        raise NotImplementedError
+
+    def _load(self, seq, **kwargs):
+        """
+        Instantiates the object from existing seq.
+
+        :param seq: the original sequence to load from
+        :return:
+        """
+        raise NotImplementedError
+
+    def _set(self, key, value):
+        """
+        set the changes for the current object (not in the db)
+
+        :param key:
+        :param value:
+        :return:
+        """
+        raise NotImplementedError
+
+    def _del(self, collection, key):
+        raise NotImplementedError
+
+    def _instrument(self, key, value):
+        """
+        Instruments any collection to track changes (and ease of access)
+        :param key:
+        :param value:
+        :return:
+        """
+        if isinstance(value, _InstrumentedCollection):
+            return value
+        elif isinstance(value, dict):
+            instrumentation_cls = _InstrumentedDict
+        elif isinstance(value, list):
+            instrumentation_cls = _InstrumentedList
+        else:
+            return value
+
+        return instrumentation_cls(self._model, self, key, value, False)
+
+    @staticmethod
+    def _raw_value(value):
+        """
+        Get the raw value.
+        :param value:
+        :return:
+        """
+        if isinstance(value, models.Parameter):
+            return value.value
+        return value
+
+    @staticmethod
+    def _encapsulate_value(key, value):
+        """
+        Create a new item cls if needed.
+        :param key:
+        :param value:
+        :return:
+        """
+        if isinstance(value, models.Parameter):
+            return value
+        # If it is not wrapped
+        return models.Parameter.wrap(key, value)
+
+    def __setitem__(self, key, value):
+        """
+        Update the values in both the local and the db locations.
+        :param key:
+        :param value:
+        :return:
+        """
+        self._set(key, value)
+        if self._is_top_level:
+            # We are at the top level
+            field = getattr(self._parent, self._field_name)
+            mapi = getattr(self._model, models.Parameter.__modelname__)
+            value = self._set_field(field,
+                                    key,
+                                    value if key in field else self._encapsulate_value(key, value))
+            mapi.update(value)
+        else:
+            # We are not at the top level
+            self._set_field(self._parent, self._field_name, self)
+
+    def _set_field(self, collection, key, value):
+        """
+        enables updating the current change in the ancestors
+        :param collection: the collection to change
+        :param key: the key for the specific field
+        :param value: the new value
+        :return:
+        """
+        if isinstance(value, _InstrumentedCollection):
+            value = value._raw
+        if key in collection and isinstance(collection[key], models.Parameter):
+            if isinstance(collection[key], _InstrumentedCollection):
+                self._del(collection, key)
+            collection[key].value = value
+        else:
+            collection[key] = value
+        return collection[key]
+
+    def __deepcopy__(self, *args, **kwargs):
+        return self._raw
+
+
+class _InstrumentedDict(_InstrumentedCollection, dict):
+
+    def _load(self, dict_=None, **kwargs):
+        dict.__init__(
+            self,
+            tuple((key, self._raw_value(value)) for key, value in (dict_ or {}).items()),
+            **kwargs)
+
+    def update(self, dict_=None, **kwargs):
+        dict_ = dict_ or {}
+        for key, value in dict_.items():
+            self[key] = value
+        for key, value in kwargs.items():
+            self[key] = value
+
+    def __getitem__(self, key):
+        return self._instrument(key, dict.__getitem__(self, key))
+
+    def _set(self, key, value):
+        dict.__setitem__(self, key, self._raw_value(value))
+
+    @property
+    def _raw(self):
+        return dict(self)
+
+    def _del(self, collection, key):
+        del collection[key]
+
+
+class _InstrumentedList(_InstrumentedCollection, list):
+
+    def _load(self, list_=None, **kwargs):
+        list.__init__(self, list(item for item in list_ or []))
+
+    def append(self, value):
+        self.insert(len(self), value)
+
+    def insert(self, index, value):
+        list.insert(self, index, self._raw_value(value))
+        if self._is_top_level:
+            field = getattr(self._parent, self._field_name)
+            field.insert(index, self._encapsulate_value(index, value))
+        else:
+            self._parent[self._field_name] = self
+
+    def __getitem__(self, key):
+        return self._instrument(key, list.__getitem__(self, key))
+
+    def _set(self, key, value):
+        list.__setitem__(self, key, value)
+
+    def _del(self, collection, key):
+        del collection[key]
+
+    @property
+    def _raw(self):
+        return list(self)
+
+
+class _InstrumentedModel(object):
+
+    def __init__(self, field_name, original_model, model_storage):
+        super(_InstrumentedModel, self).__init__()
+        self._field_name = field_name
+        self._model_storage = model_storage
+        self._original_model = original_model
+        self._apply_instrumentation()
+
+    def __getattr__(self, item):
+        return getattr(self._original_model, item)
+
+    def _apply_instrumentation(self):
+
+        field = getattr(self._original_model, self._field_name)
+
+        # Preserve the original value. e.g. original attributes would be located under
+        # _attributes
+        setattr(self, '_{0}'.format(self._field_name), field)
+
+        # set instrumented value
+        setattr(self, self._field_name, _InstrumentedDict(self._model_storage,
+                                                          self._original_model,
+                                                          self._field_name,
+                                                          field))
+
+
+def instrument_collection(field_name, func=None):
+    if func is None:
+        return partial(instrument_collection, field_name)
+
+    def _wrapper(*args, **kwargs):
+        original_model = func(*args, **kwargs)
+        return type('Instrumented{0}'.format(original_model.__class__.__name__),
+                    (_InstrumentedModel, ),
+                    {})(field_name, original_model, args[0].model)
+
+    return _wrapper

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 0ce790f..7c21351 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -21,10 +21,13 @@ import threading
 
 import aria
 from aria.utils import file
-from .common import BaseContext
+from . import (
+    common,
+    collection_instrumentation
+)
 
 
-class BaseOperationContext(BaseContext):
+class BaseOperationContext(common.BaseContext):
     """
     Context object used during operation creation and execution
     """
@@ -114,6 +117,7 @@ class NodeOperationContext(BaseOperationContext):
     """
 
     @property
+    @collection_instrumentation.instrument_collection('attributes')
     def node_template(self):
         """
         the node of the current operation
@@ -122,6 +126,7 @@ class NodeOperationContext(BaseOperationContext):
         return self.node.node_template
 
     @property
+    @collection_instrumentation.instrument_collection('attributes')
     def node(self):
         """
         The node instance of the current operation
@@ -136,6 +141,7 @@ class RelationshipOperationContext(BaseOperationContext):
     """
 
     @property
+    @collection_instrumentation.instrument_collection('attributes')
     def source_node_template(self):
         """
         The source node
@@ -144,6 +150,7 @@ class RelationshipOperationContext(BaseOperationContext):
         return self.source_node.node_template
 
     @property
+    @collection_instrumentation.instrument_collection('attributes')
     def source_node(self):
         """
         The source node instance
@@ -152,6 +159,7 @@ class RelationshipOperationContext(BaseOperationContext):
         return self.relationship.source_node
 
     @property
+    @collection_instrumentation.instrument_collection('attributes')
     def target_node_template(self):
         """
         The target node
@@ -160,6 +168,7 @@ class RelationshipOperationContext(BaseOperationContext):
         return self.target_node.node_template
 
     @property
+    @collection_instrumentation.instrument_collection('attributes')
     def target_node(self):
         """
         The target node instance

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/orchestrator/context/toolbelt.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/toolbelt.py b/aria/orchestrator/context/toolbelt.py
index def7d42..5788ee7 100644
--- a/aria/orchestrator/context/toolbelt.py
+++ b/aria/orchestrator/context/toolbelt.py
@@ -34,7 +34,10 @@ class NodeToolBelt(object):
         """
         assert isinstance(self._op_context, operation.NodeOperationContext)
         host = self._op_context.node.host
-        return host.runtime_properties.get('ip')
+        ip = host.attributes.get('ip')
+        if ip:
+            return ip.value
+
 
 
 class RelationshipToolBelt(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
index 1ce0e08..102ff9a 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
@@ -98,7 +98,6 @@ class CtxProxy(object):
                 quiet=True,
                 server=BottleServerAdapter)
         thread = threading.Thread(target=serve)
-        thread.daemon = True
         thread.start()
         return thread
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index 561265c..3a96804 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -69,7 +69,6 @@ class Engine(logger.LoggerMixin):
             else:
                 events.on_success_workflow_signal.send(self._workflow_context)
         except BaseException as e:
-
             events.on_failure_workflow_signal.send(self._workflow_context, exception=e)
             raise
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index da6bbb2..f02e0a6 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -43,14 +43,12 @@ import jsonpickle
 
 import aria
 from aria.orchestrator.workflows.executor import base
-from aria.storage import instrumentation
 from aria.extension import process_executor
 from aria.utils import (
     imports,
     exceptions,
     process as process_utils
 )
-from aria.modeling import types as modeling_types
 
 
 _INT_FMT = 'I'
@@ -82,7 +80,6 @@ class ProcessExecutor(base.BaseExecutor):
             'started': self._handle_task_started_request,
             'succeeded': self._handle_task_succeeded_request,
             'failed': self._handle_task_failed_request,
-            'apply_tracked_changes': self._handle_apply_tracked_changes_request
         }
 
         # Server socket used to accept task status messages from subprocesses
@@ -196,41 +193,13 @@ class ProcessExecutor(base.BaseExecutor):
     def _handle_task_started_request(self, task_id, **kwargs):
         self._task_started(self._tasks[task_id])
 
-    def _handle_task_succeeded_request(self, task_id, request, **kwargs):
+    def _handle_task_succeeded_request(self, task_id, **kwargs):
         task = self._remove_task(task_id)
-        try:
-            self._apply_tracked_changes(task, request)
-        except BaseException as e:
-            e.message += UPDATE_TRACKED_CHANGES_FAILED_STR
-            self._task_failed(task, exception=e)
-        else:
-            self._task_succeeded(task)
+        self._task_succeeded(task)
 
     def _handle_task_failed_request(self, task_id, request, **kwargs):
         task = self._remove_task(task_id)
-        try:
-            self._apply_tracked_changes(task, request)
-        except BaseException as e:
-            e.message += 'Task failed due to {0}.'.format(request['exception']) + \
-                         UPDATE_TRACKED_CHANGES_FAILED_STR
-            self._task_failed(
-                task, exception=e, traceback=exceptions.get_exception_as_string(*sys.exc_info()))
-        else:
-            self._task_failed(task, exception=request['exception'], traceback=request['traceback'])
-
-    def _handle_apply_tracked_changes_request(self, task_id, request, response):
-        task = self._tasks[task_id]
-        try:
-            self._apply_tracked_changes(task, request)
-        except BaseException as e:
-            response['exception'] = exceptions.wrap_if_needed(e)
-
-    @staticmethod
-    def _apply_tracked_changes(task, request):
-        instrumentation.apply_tracked_changes(
-            tracked_changes=request['tracked_changes'],
-            new_instances=request['new_instances'],
-            model=task.context.model)
+        self._task_failed(task, exception=request['exception'], traceback=request['traceback'])
 
 
 def _send_message(connection, message):
@@ -278,28 +247,19 @@ class _Messenger(object):
         """Task started message"""
         self._send_message(type='started')
 
-    def succeeded(self, tracked_changes, new_instances):
+    def succeeded(self):
         """Task succeeded message"""
-        self._send_message(
-            type='succeeded', tracked_changes=tracked_changes, new_instances=new_instances)
+        self._send_message(type='succeeded')
 
-    def failed(self, tracked_changes, new_instances, exception):
+    def failed(self, exception):
         """Task failed message"""
-        self._send_message(type='failed',
-                           tracked_changes=tracked_changes,
-                           new_instances=new_instances,
-                           exception=exception)
-
-    def apply_tracked_changes(self, tracked_changes, new_instances):
-        self._send_message(type='apply_tracked_changes',
-                           tracked_changes=tracked_changes,
-                           new_instances=new_instances)
+        self._send_message(type='failed', exception=exception)
 
     def closed(self):
         """Executor closed message"""
         self._send_message(type='closed')
 
-    def _send_message(self, type, tracked_changes=None, new_instances=None, exception=None):
+    def _send_message(self, type, exception=None):
         sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         sock.connect(('localhost', self.port))
         try:
@@ -308,8 +268,6 @@ class _Messenger(object):
                 'task_id': self.task_id,
                 'exception': exceptions.wrap_if_needed(exception),
                 'traceback': exceptions.get_exception_as_string(*sys.exc_info()),
-                'tracked_changes': tracked_changes or {},
-                'new_instances': new_instances or {}
             })
             response = _recv_message(sock)
             response_exception = response.get('exception')
@@ -319,39 +277,6 @@ class _Messenger(object):
             sock.close()
 
 
-def _patch_ctx(ctx, messenger, instrument):
-    # model will be None only in tests that test the executor component directly
-    if not ctx.model:
-        return
-
-    # We arbitrarily select the ``node`` mapi to extract the session from it.
-    # could have been any other mapi just as well
-    session = ctx.model.node._session
-    original_refresh = session.refresh
-
-    def patched_refresh(target):
-        instrument.clear(target)
-        original_refresh(target)
-
-    def patched_commit():
-        messenger.apply_tracked_changes(instrument.tracked_changes, instrument.new_instances)
-        instrument.expunge_session()
-        instrument.clear()
-
-    def patched_rollback():
-        # Rollback is performed on parent process when commit fails
-        instrument.expunge_session()
-
-    # when autoflush is set to true (the default), refreshing an object will trigger
-    # an auto flush by sqlalchemy, this autoflush will attempt to commit changes made so
-    # far on the session. this is not the desired behavior in the subprocess
-    session.autoflush = False
-
-    session.commit = patched_commit
-    session.rollback = patched_rollback
-    session.refresh = patched_refresh
-
-
 def _main():
     arguments_json_path = sys.argv[1]
     with open(arguments_json_path) as f:
@@ -369,32 +294,24 @@ def _main():
     operation_inputs = arguments['operation_inputs']
     context_dict = arguments['context']
 
-    # This is required for the instrumentation work properly.
-    # See docstring of `remove_mutable_association_listener` for further details
-    modeling_types.remove_mutable_association_listener()
     try:
         ctx = context_dict['context_cls'].instantiate_from_dict(**context_dict['context'])
     except BaseException as e:
-        messenger.failed(exception=e, tracked_changes=None, new_instances=None)
+        messenger.failed(e)
         return
 
-    with instrumentation.track_changes(ctx.model) as instrument:
-        try:
-            messenger.started()
-            _patch_ctx(ctx=ctx, messenger=messenger, instrument=instrument)
-            task_func = imports.load_attribute(implementation)
-            aria.install_aria_extensions()
-            for decorate in process_executor.decorate():
-                task_func = decorate(task_func)
-            task_func(ctx=ctx, **operation_inputs)
-            messenger.succeeded(tracked_changes=instrument.tracked_changes,
-                                new_instances=instrument.new_instances)
-        except BaseException as e:
-            messenger.failed(exception=e,
-                             tracked_changes=instrument.tracked_changes,
-                             new_instances=instrument.new_instances)
-        finally:
-            instrument.expunge_session()
+    try:
+        messenger.started()
+        task_func = imports.load_attribute(implementation)
+        aria.install_aria_extensions()
+        for decorate in process_executor.decorate():
+            task_func = decorate(task_func)
+        task_func(ctx=ctx, **operation_inputs)
+        ctx.close()
+        messenger.succeeded()
+    except BaseException as e:
+        ctx.close()
+        messenger.failed(e)
 
 if __name__ == '__main__':
     _main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/aria/storage/instrumentation.py
----------------------------------------------------------------------
diff --git a/aria/storage/instrumentation.py b/aria/storage/instrumentation.py
deleted file mode 100644
index 390f933..0000000
--- a/aria/storage/instrumentation.py
+++ /dev/null
@@ -1,282 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import copy
-import json
-import os
-
-import sqlalchemy.event
-
-from ..modeling import models as _models
-from ..storage.exceptions import StorageError
-
-
-_VERSION_ID_COL = 'version'
-_STUB = object()
-_INSTRUMENTED = {
-    'modified': {
-        _models.Node.runtime_properties: dict,
-        _models.Node.state: str,
-        _models.Task.status: str,
-    },
-    'new': (_models.Log, )
-
-}
-
-_NEW_INSTANCE = 'NEW_INSTANCE'
-
-
-def track_changes(model=None, instrumented=None):
-    """Track changes in the specified model columns
-
-    This call will register event listeners using sqlalchemy's event mechanism. The listeners
-    instrument all returned objects such that the attributes specified in ``instrumented``, will
-    be replaced with a value that is stored in the returned instrumentation context
-    ``tracked_changes`` property.
-
-    Why should this be implemented when sqlalchemy already does a fantastic job at tracking changes
-    you ask? Well, when sqlalchemy is used with sqlite, due to how sqlite works, only one process
-    can hold a write lock to the database. This does not work well when ARIA runs tasks in
-    subprocesses (by the process executor) and these tasks wish to change some state as well. These
-    tasks certainly deserve a chance to do so!
-
-    To enable this, the subprocess calls ``track_changes()`` before any state changes are made.
-    At the end of the subprocess execution, it should return the ``tracked_changes`` attribute of
-    the instrumentation context returned from this call, to the parent process. The parent process
-    will then call ``apply_tracked_changes()`` that resides in this module as well.
-    At that point, the changes will actually be written back to the database.
-
-    :param model: the model storage. it should hold a mapi for each model. the session of each mapi
-    is needed to setup events
-    :param instrumented: A dict from model columns to their python native type
-    :return: The instrumentation context
-    """
-    return _Instrumentation(model, instrumented or _INSTRUMENTED)
-
-
-class _Instrumentation(object):
-
-    def __init__(self, model, instrumented):
-        self.tracked_changes = {}
-        self.new_instances = {}
-        self.listeners = []
-        self._instances_to_expunge = []
-        self._model = model
-        self._track_changes(instrumented)
-
-    @property
-    def _new_instance_id(self):
-        return '{prefix}_{index}'.format(prefix=_NEW_INSTANCE,
-                                         index=len(self._instances_to_expunge))
-
-    def expunge_session(self):
-        for new_instance in self._instances_to_expunge:
-            self._get_session_from_model(new_instance.__tablename__).expunge(new_instance)
-
-    def _get_session_from_model(self, tablename):
-        mapi = getattr(self._model, tablename, None)
-        if mapi:
-            return mapi._session
-        raise StorageError("Could not retrieve session for {0}".format(tablename))
-
-    def _track_changes(self, instrumented):
-        instrumented_attribute_classes = {}
-        # Track any newly-set attributes.
-        for instrumented_attribute, attribute_type in instrumented.get('modified', {}).items():
-            self._register_set_attribute_listener(
-                instrumented_attribute=instrumented_attribute,
-                attribute_type=attribute_type)
-            instrumented_class = instrumented_attribute.parent.entity
-            instrumented_class_attributes = instrumented_attribute_classes.setdefault(
-                instrumented_class, {})
-            instrumented_class_attributes[instrumented_attribute.key] = attribute_type
-
-        # Track any global instance update such as 'refresh' or 'load'
-        for instrumented_class, instrumented_attributes in instrumented_attribute_classes.items():
-            self._register_instance_listeners(instrumented_class=instrumented_class,
-                                              instrumented_attributes=instrumented_attributes)
-
-        # Track any newly created instances.
-        for instrumented_class in instrumented.get('new', {}):
-            self._register_new_instance_listener(instrumented_class)
-
-    def _register_new_instance_listener(self, instrumented_class):
-        if self._model is None:
-            raise StorageError("In order to keep track of new instances, a ctx is needed")
-
-        def listener(_, instance):
-            if not isinstance(instance, instrumented_class):
-                return
-            self._instances_to_expunge.append(instance)
-            tracked_instances = self.new_instances.setdefault(instance.__modelname__, {})
-            tracked_attributes = tracked_instances.setdefault(self._new_instance_id, {})
-            instance_as_dict = instance.to_dict()
-            instance_as_dict.update((k, getattr(instance, k))
-                                    for k in getattr(instance, '__private_fields__', []))
-            tracked_attributes.update(instance_as_dict)
-        session = self._get_session_from_model(instrumented_class.__tablename__)
-        listener_args = (session, 'after_attach', listener)
-        sqlalchemy.event.listen(*listener_args)
-        self.listeners.append(listener_args)
-
-    def _register_set_attribute_listener(self, instrumented_attribute, attribute_type):
-        def listener(target, value, *_):
-            mapi_name = target.__modelname__
-            tracked_instances = self.tracked_changes.setdefault(mapi_name, {})
-            tracked_attributes = tracked_instances.setdefault(target.id, {})
-            if value is None:
-                current = None
-            else:
-                current = copy.deepcopy(attribute_type(value))
-            tracked_attributes[instrumented_attribute.key] = _Value(_STUB, current)
-            return current
-        listener_args = (instrumented_attribute, 'set', listener)
-        sqlalchemy.event.listen(*listener_args, retval=True)
-        self.listeners.append(listener_args)
-
-    def _register_instance_listeners(self, instrumented_class, instrumented_attributes):
-        def listener(target, *_):
-            mapi_name = instrumented_class.__modelname__
-            tracked_instances = self.tracked_changes.setdefault(mapi_name, {})
-            tracked_attributes = tracked_instances.setdefault(target.id, {})
-            if hasattr(target, _VERSION_ID_COL):
-                # We want to keep track of the initial version id so it can be compared
-                # with the committed version id when the tracked changes are applied
-                tracked_attributes.setdefault(_VERSION_ID_COL,
-                                              _Value(_STUB, getattr(target, _VERSION_ID_COL)))
-            for attribute_name, attribute_type in instrumented_attributes.items():
-                if attribute_name not in tracked_attributes:
-                    initial = getattr(target, attribute_name)
-                    if initial is None:
-                        current = None
-                    else:
-                        current = copy.deepcopy(attribute_type(initial))
-                    tracked_attributes[attribute_name] = _Value(initial, current)
-                target.__dict__[attribute_name] = tracked_attributes[attribute_name].current
-        for listener_args in ((instrumented_class, 'load', listener),
-                              (instrumented_class, 'refresh', listener),
-                              (instrumented_class, 'refresh_flush', listener)):
-            sqlalchemy.event.listen(*listener_args)
-            self.listeners.append(listener_args)
-
-    def clear(self, target=None):
-        if target:
-            mapi_name = target.__modelname__
-            tracked_instances = self.tracked_changes.setdefault(mapi_name, {})
-            tracked_instances.pop(target.id, None)
-        else:
-            self.tracked_changes.clear()
-
-        self.new_instances.clear()
-        self._instances_to_expunge = []
-
-    def restore(self):
-        """Remove all listeners registered by this instrumentation"""
-        for listener_args in self.listeners:
-            if sqlalchemy.event.contains(*listener_args):
-                sqlalchemy.event.remove(*listener_args)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self.restore()
-
-
-class _Value(object):
-    # You may wonder why is this a full blown class and not a named tuple. The reason is that
-    # jsonpickle that is used to serialize the tracked_changes, does not handle named tuples very
-    # well. At the very least, I could not get it to behave.
-
-    def __init__(self, initial, current):
-        self.initial = initial
-        self.current = current
-
-    def __eq__(self, other):
-        if not isinstance(other, _Value):
-            return False
-        return self.initial == other.initial and self.current == other.current
-
-    def __hash__(self):
-        return hash((self.initial, self.current))
-
-    @property
-    def dict(self):
-        return {'initial': self.initial, 'current': self.current}.copy()
-
-
-def apply_tracked_changes(tracked_changes, new_instances, model):
-    """Write tracked changes back to the database using provided model storage
-
-    :param tracked_changes: The ``tracked_changes`` attribute of the instrumentation context
-                            returned by calling ``track_changes()``
-    :param model: The model storage used to actually apply the changes
-    """
-    successfully_updated_changes = dict()
-    try:
-        # handle instance updates
-        for mapi_name, tracked_instances in tracked_changes.items():
-            successfully_updated_changes[mapi_name] = dict()
-            mapi = getattr(model, mapi_name)
-            for instance_id, tracked_attributes in tracked_instances.items():
-                successfully_updated_changes[mapi_name][instance_id] = dict()
-                instance = None
-                for attribute_name, value in tracked_attributes.items():
-                    if value.initial != value.current:
-                        instance = instance or mapi.get(instance_id)
-                        setattr(instance, attribute_name, value.current)
-                if instance:
-                    _validate_version_id(instance, mapi)
-                    mapi.update(instance)
-                    successfully_updated_changes[mapi_name][instance_id] = [
-                        v.dict for v in tracked_attributes.values()]
-
-        # Handle new instances
-        for mapi_name, new_instance in new_instances.items():
-            successfully_updated_changes[mapi_name] = dict()
-            mapi = getattr(model, mapi_name)
-            for new_instance_kwargs in new_instance.values():
-                instance = mapi.model_cls(**new_instance_kwargs)
-                mapi.put(instance)
-                successfully_updated_changes[mapi_name][instance.id] = new_instance_kwargs
-    except BaseException:
-        for key, value in successfully_updated_changes.items():
-            if not value:
-                del successfully_updated_changes[key]
-        # TODO: if the successful has _STUB, the logging fails because it can't serialize the object
-        model.logger.error(
-            'Registering all the changes to the storage has failed. {0}'
-            'The successful updates were: {0} '
-            '{1}'.format(os.linesep, json.dumps(successfully_updated_changes, indent=4)))
-
-        raise
-
-
-def _validate_version_id(instance, mapi):
-    version_id = sqlalchemy.inspect(instance).committed_state.get(_VERSION_ID_COL)
-    # There are two version conflict code paths:
-    # 1. The instance committed state loaded already holds a newer version,
-    #    in this case, we manually raise the error
-    # 2. The UPDATE statement is executed with version validation and sqlalchemy
-    #    will raise a StateDataError if there is a version mismatch.
-    if version_id and getattr(instance, _VERSION_ID_COL) != version_id:
-        object_version_id = getattr(instance, _VERSION_ID_COL)
-        mapi._session.rollback()
-        raise StorageError(
-            'Version conflict: committed and object {0} differ '
-            '[committed {0}={1}, object {0}={2}]'
-            .format(_VERSION_ID_COL,
-                    version_id,
-                    object_version_id))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/helpers.py
----------------------------------------------------------------------
diff --git a/tests/helpers.py b/tests/helpers.py
index 3c3efc9..4c3194b 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -47,6 +47,9 @@ class FilesystemDataHolder(object):
         with open(self._path, 'w') as f:
             return json.dump(value, f)
 
+    def __contains__(self, item):
+        return item in self._load()
+
     def __setitem__(self, key, value):
         dict_ = self._load()
         dict_[key] = value
@@ -67,6 +70,13 @@ class FilesystemDataHolder(object):
         self._dump(dict_)
         return return_value
 
+    def update(self, dict_=None, **kwargs):
+        current_dict = self._load()
+        if dict_:
+            current_dict.update(dict_)
+        current_dict.update(**kwargs)
+        self._dump(current_dict)
+
     @property
     def path(self):
         return self._path

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index f066551..98703d5 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -120,7 +120,7 @@ def create_node_with_dependencies(include_attribute=False):
     node_template.service_template.services[0] = create_service(node_template.service_template)
     node = create_node(node_template, node_template.service_template.services[0])
     if include_attribute:
-        node.runtime_properties = {'attribute1': 'value1'}
+        node.attributes['attribute1'] = models.Parameter.wrap('attribute1', 'value1')               # pylint: disable=unsubscriptable-object
     return node
 
 
@@ -184,13 +184,10 @@ def create_dependent_node_template(
     )
 
 
-def create_node(dependency_node_template, service, name=NODE_NAME, state=models.Node.INITIAL,
-                runtime_properties=None):
-    runtime_properties = runtime_properties or {}
+def create_node(dependency_node_template, service, name=NODE_NAME, state=models.Node.INITIAL):
     node = models.Node(
         name=name,
         type=dependency_node_template.type,
-        runtime_properties=runtime_properties,
         version=None,
         node_template=dependency_node_template,
         state=state,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/modeling/test_mixins.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_mixins.py b/tests/modeling/test_mixins.py
index a18a04e..2c91a4b 100644
--- a/tests/modeling/test_mixins.py
+++ b/tests/modeling/test_mixins.py
@@ -121,7 +121,6 @@ def test_relationship_model_ordering(context):
     new_node = modeling.models.Node(
         name='new_node',
         type=source_node.type,
-        runtime_properties={},
         service=service,
         version=None,
         node_template=new_node_template,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 61034bd..57511dd 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -538,22 +538,20 @@ class TestNodeTemplate(object):
 
 class TestNode(object):
     @pytest.mark.parametrize(
-        'is_valid, name, runtime_properties, state, version',
+        'is_valid, name, state, version',
         [
-            (False, m_cls, {}, 'state', 1),
-            (False, 'name', m_cls, 'state', 1),
-            (False, 'name', {}, 'state', 1),
-            (False, 'name', {}, m_cls, 1),
-            (False, m_cls, {}, 'state', m_cls),
-
-            (True, 'name', {}, 'initial', 1),
-            (True, None, {}, 'initial', 1),
-            (True, 'name', None, 'initial', 1),
-            (True, 'name', {}, 'initial', None),
+            (False, m_cls, 'state', 1),
+            (False, 'name', 'state', 1),
+            (False, 'name', m_cls, 1),
+            (False, m_cls, 'state', m_cls),
+
+            (True, 'name', 'initial', 1),
+            (True, None, 'initial', 1),
+            (True, 'name', 'initial', 1),
+            (True, 'name', 'initial', None),
         ]
     )
-    def test_node_model_creation(self, node_template_storage, is_valid, name, runtime_properties,
-                                 state, version):
+    def test_node_model_creation(self, node_template_storage, is_valid, name, state, version):
         node = _test_model(
             is_valid=is_valid,
             storage=node_template_storage,
@@ -562,7 +560,6 @@ class TestNode(object):
                 node_template=node_template_storage.node_template.list()[0],
                 type=node_template_storage.type.list()[0],
                 name=name,
-                runtime_properties=runtime_properties,
                 state=state,
                 version=version,
                 service=node_template_storage.service.list()[0]
@@ -635,7 +632,6 @@ class TestNodeHostAddress(object):
             name='node',
             node_template=node_template,
             type=storage.type.list()[0],
-            runtime_properties={},
             state='initial',
             service=storage.service.list()[0]
         )
@@ -644,7 +640,7 @@ class TestNodeHostAddress(object):
             if host_address is not None:
                 host_address = host_address.value
         if host_address:
-            kwargs['runtime_properties']['ip'] = host_address
+            kwargs.setdefault('attributes', {})['ip'] = Parameter.wrap('ip', host_address)
         if is_host:
             kwargs['host_fk'] = 1
         elif host_fk:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/context/test_collection_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_collection_instrumentation.py b/tests/orchestrator/context/test_collection_instrumentation.py
new file mode 100644
index 0000000..3ee5a44
--- /dev/null
+++ b/tests/orchestrator/context/test_collection_instrumentation.py
@@ -0,0 +1,253 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from aria.modeling.models import Parameter
+from aria.orchestrator.context import collection_instrumentation
+
+
+class MockActor(object):
+    def __init__(self):
+        self.dict_ = {}
+        self.list_ = []
+
+
+class MockModel(object):
+
+    def __init__(self):
+        self.parameter = type('MockModel', (object, ), {'model_cls': Parameter,
+                                                        'put': lambda *args, **kwargs: None,
+                                                        'update': lambda *args, **kwargs: None})()
+
+
+class CollectionInstrumentation(object):
+
+    @pytest.fixture
+    def actor(self):
+        return MockActor()
+
+    @pytest.fixture
+    def model(self):
+        return MockModel()
+
+    @pytest.fixture
+    def dict_(self, actor, model):
+        return collection_instrumentation._InstrumentedDict(model, actor, 'dict_')
+
+    @pytest.fixture
+    def list_(self, actor, model):
+        return collection_instrumentation._InstrumentedList(model, actor, 'list_')
+
+
+class TestDict(CollectionInstrumentation):
+
+    def test_keys(self, actor, dict_):
+        dict_.update(
+            {
+                'key1': Parameter.wrap('key1', 'value1'),
+                'key2': Parameter.wrap('key2', 'value2')
+            }
+        )
+        assert sorted(dict_.keys()) == sorted(['key1', 'key2']) == sorted(actor.dict_.keys())
+
+    def test_values(self, actor, dict_):
+        dict_.update({
+            'key1': Parameter.wrap('key1', 'value1'),
+            'key2': Parameter.wrap('key1', 'value2')
+        })
+        assert (sorted(dict_.values()) ==
+                sorted(['value1', 'value2']) ==
+                sorted(v.value for v in actor.dict_.values()))
+
+    def test_items(self, dict_):
+        dict_.update({
+            'key1': Parameter.wrap('key1', 'value1'),
+            'key2': Parameter.wrap('key1', 'value2')
+        })
+        assert sorted(dict_.items()) == sorted([('key1', 'value1'), ('key2', 'value2')])
+
+    def test_iter(self, actor, dict_):
+        dict_.update({
+            'key1': Parameter.wrap('key1', 'value1'),
+            'key2': Parameter.wrap('key1', 'value2')
+        })
+        assert sorted(list(dict_)) == sorted(['key1', 'key2']) == sorted(actor.dict_.keys())
+
+    def test_bool(self, dict_):
+        assert not dict_
+        dict_.update({
+            'key1': Parameter.wrap('key1', 'value1'),
+            'key2': Parameter.wrap('key1', 'value2')
+        })
+        assert dict_
+
+    def test_set_item(self, actor, dict_):
+        dict_['key1'] = Parameter.wrap('key1', 'value1')
+        assert dict_['key1'] == 'value1' == actor.dict_['key1'].value
+        assert isinstance(actor.dict_['key1'], Parameter)
+
+    def test_nested(self, actor, dict_):
+        dict_['key'] = {}
+        assert isinstance(actor.dict_['key'], Parameter)
+        assert dict_['key'] == actor.dict_['key'].value == {}
+
+        dict_['key']['inner_key'] = 'value'
+
+        assert len(dict_) == 1
+        assert 'inner_key' in dict_['key']
+        assert dict_['key']['inner_key'] == 'value'
+        assert dict_['key'].keys() == ['inner_key']
+        assert dict_['key'].values() == ['value']
+        assert dict_['key'].items() == [('inner_key', 'value')]
+        assert isinstance(actor.dict_['key'], Parameter)
+        assert isinstance(dict_['key'], collection_instrumentation._InstrumentedDict)
+
+        dict_['key'].update({'updated_key': 'updated_value'})
+        assert len(dict_) == 1
+        assert 'updated_key' in dict_['key']
+        assert dict_['key']['updated_key'] == 'updated_value'
+        assert sorted(dict_['key'].keys()) == sorted(['inner_key', 'updated_key'])
+        assert sorted(dict_['key'].values()) == sorted(['value', 'updated_value'])
+        assert sorted(dict_['key'].items()) == sorted([('inner_key', 'value'),
+                                                       ('updated_key', 'updated_value')])
+        assert isinstance(actor.dict_['key'], Parameter)
+        assert isinstance(dict_['key'], collection_instrumentation._InstrumentedDict)
+
+        dict_.update({'key': 'override_value'})
+        assert len(dict_) == 1
+        assert 'key' in dict_
+        assert dict_['key'] == 'override_value'
+        assert len(actor.dict_) == 1
+        assert isinstance(actor.dict_['key'], Parameter)
+        assert actor.dict_['key'].value == 'override_value'
+
+    def test_get_item(self, actor, dict_):
+        dict_['key1'] = Parameter.wrap('key1', 'value1')
+        assert isinstance(actor.dict_['key1'], Parameter)
+
+    def test_update(self, actor, dict_):
+        dict_['key1'] = 'value1'
+
+        new_dict = {'key2': 'value2'}
+        dict_.update(new_dict)
+        assert len(dict_) == 2
+        assert dict_['key2'] == 'value2'
+        assert isinstance(actor.dict_['key2'], Parameter)
+
+        new_dict = {}
+        new_dict.update(dict_)
+        assert new_dict['key1'] == dict_['key1']
+
+    def test_copy(self, dict_):
+        dict_['key1'] = 'value1'
+
+        new_dict = dict_.copy()
+        assert new_dict is not dict_
+        assert new_dict == dict_
+
+        dict_['key1'] = 'value2'
+        assert new_dict['key1'] == 'value1'
+        assert dict_['key1'] == 'value2'
+
+    def test_clear(self, dict_):
+        dict_['key1'] = 'value1'
+        dict_.clear()
+
+        assert len(dict_) == 0
+
+
+class TestList(CollectionInstrumentation):
+
+    def test_append(self, actor, list_):
+        list_.append(Parameter.wrap('name', 'value1'))
+        list_.append('value2')
+        assert len(actor.list_) == 2
+        assert len(list_) == 2
+        assert isinstance(actor.list_[0], Parameter)
+        assert list_[0] == 'value1'
+
+        assert isinstance(actor.list_[1], Parameter)
+        assert list_[1] == 'value2'
+
+        list_[0] = 'new_value1'
+        list_[1] = 'new_value2'
+        assert isinstance(actor.list_[1], Parameter)
+        assert isinstance(actor.list_[1], Parameter)
+        assert list_[0] == 'new_value1'
+        assert list_[1] == 'new_value2'
+
+    def test_iter(self, list_):
+        list_.append('value1')
+        list_.append('value2')
+        assert sorted(list_) == sorted(['value1', 'value2'])
+
+    def test_insert(self, actor, list_):
+        list_.append('value1')
+        list_.insert(0, 'value2')
+        list_.insert(2, 'value3')
+        list_.insert(10, 'value4')
+        assert sorted(list_) == sorted(['value1', 'value2', 'value3', 'value4'])
+        assert len(actor.list_) == 4
+
+    def test_set(self, list_):
+        list_.append('value1')
+        list_.append('value2')
+
+        list_[1] = 'value3'
+        assert len(list_) == 2
+        assert sorted(list_) == sorted(['value1', 'value3'])
+
+    def test_insert_into_nested(self, actor, list_):
+        list_.append([])
+
+        list_[0].append('inner_item')
+        assert isinstance(actor.list_[0], Parameter)
+        assert len(list_) == 1
+        assert list_[0][0] == 'inner_item'
+
+        list_[0].append('new_item')
+        assert isinstance(actor.list_[0], Parameter)
+        assert len(list_) == 1
+        assert list_[0][1] == 'new_item'
+
+        assert list_[0] == ['inner_item', 'new_item']
+        assert ['inner_item', 'new_item'] == list_[0]
+
+
+class TestDictList(CollectionInstrumentation):
+    def test_dict_in_list(self, actor, list_):
+        list_.append({})
+        assert len(list_) == 1
+        assert isinstance(actor.list_[0], Parameter)
+        assert actor.list_[0].value == {}
+
+        list_[0]['key'] = 'value'
+        assert list_[0]['key'] == 'value'
+        assert len(actor.list_) == 1
+        assert isinstance(actor.list_[0], Parameter)
+        assert actor.list_[0].value['key'] == 'value'
+
+    def test_list_in_dict(self, actor, dict_):
+        dict_['key'] = []
+        assert len(dict_) == 1
+        assert isinstance(actor.dict_['key'], Parameter)
+        assert actor.dict_['key'].value == []
+
+        dict_['key'].append('value')
+        assert dict_['key'][0] == 'value'
+        assert len(actor.dict_) == 1
+        assert isinstance(actor.dict_['key'], Parameter)
+        assert actor.dict_['key'].value[0] == 'value'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index cdeb5fa..5d193bc 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -343,6 +343,74 @@ def test_relationship_operation_logging(ctx, executor):
     _assert_loggins(ctx, inputs)
 
 
+def test_attribute_consumption(ctx, executor, dataholder):
+    # region Updating node operation
+    node_int_name, node_op_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
+
+    source_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+
+    inputs = {'dict_': {'key': 'value'},
+              'set_test_dict': {'key2': 'value2'}}
+    interface = mock.models.create_interface(
+        source_node.service,
+        node_int_name,
+        node_op_name,
+        operation_kwargs=dict(
+            implementation=op_path(attribute_altering_operation, module_path=__name__),
+            inputs=inputs)
+    )
+    source_node.interfaces[interface.name] = interface
+    ctx.model.node.update(source_node)
+    # endregion
+
+    # region updating relationship operation
+    rel_int_name, rel_op_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[2]
+
+    relationship = ctx.model.relationship.list()[0]
+    interface = mock.models.create_interface(
+        relationship.source_node.service,
+        rel_int_name,
+        rel_op_name,
+        operation_kwargs=dict(
+            implementation=op_path(attribute_consuming_operation, module_path=__name__),
+            inputs={'holder_path': dataholder.path}
+        )
+    )
+    relationship.interfaces[interface.name] = interface
+    ctx.model.relationship.update(relationship)
+    # endregion
+
+    @workflow
+    def basic_workflow(graph, **_):
+        graph.sequence(
+            api.task.OperationTask(
+                source_node,
+                interface_name=node_int_name,
+                operation_name=node_op_name,
+                inputs=inputs
+            ),
+            api.task.OperationTask(
+                relationship,
+                interface_name=rel_int_name,
+                operation_name=rel_op_name,
+            )
+        )
+
+    execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor)
+    target_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+
+    assert len(source_node.attributes) == len(target_node.attributes) == 2
+    assert source_node.attributes['key'] != target_node.attributes['key']
+    assert source_node.attributes['key'].value == \
+           target_node.attributes['key'].value == \
+           dataholder['key'] == 'value'
+
+    assert source_node.attributes['key2'] != target_node.attributes['key2']
+    assert source_node.attributes['key2'].value == \
+           target_node.attributes['key2'].value == \
+           dataholder['key2'] == 'value2'
+
+
 def _assert_loggins(ctx, inputs):
 
     # The logs should contain the following: Workflow Start, Operation Start, custom operation
@@ -377,10 +445,10 @@ def _assert_loggins(ctx, inputs):
 
 @operation
 def logged_operation(ctx, **_):
-    ctx.logger.info(ctx.task.inputs['op_start'])
+    ctx.logger.info(ctx.task.inputs['op_start'].value)
     # enables to check the relation between the created_at field properly
     time.sleep(1)
-    ctx.logger.debug(ctx.task.inputs['op_end'])
+    ctx.logger.debug(ctx.task.inputs['op_end'].value)
 
 
 @operation
@@ -422,3 +490,21 @@ def get_node_id(ctx, holder_path, **_):
 def _test_plugin_workdir(ctx, filename, content):
     with open(os.path.join(ctx.plugin_workdir, filename), 'w') as f:
         f.write(content)
+
+
+@operation
+def attribute_altering_operation(ctx, dict_, set_test_dict, **_):
+    ctx.node.attributes.update(dict_)
+
+    for key, value in set_test_dict.items():
+        ctx.node.attributes[key] = value
+
+
+@operation
+def attribute_consuming_operation(ctx, holder_path, **_):
+    holder = helpers.FilesystemDataHolder(holder_path)
+    ctx.target_node.attributes.update(ctx.source_node.attributes)
+    holder.update(**ctx.target_node.attributes)
+
+    ctx.target_node.attributes['key2'] = ctx.source_node.attributes['key2']
+    holder['key2'] = ctx.target_node.attributes['key2']

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index d199954..fc34907 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -16,6 +16,7 @@
 import pytest
 
 from aria import workflow, operation
+from aria.modeling import models
 from aria.orchestrator import context
 from aria.orchestrator.workflows import api
 from aria.orchestrator.workflows.executor import thread
@@ -93,7 +94,7 @@ def test_host_ip(workflow_context, executor, dataholder):
         operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
-    dependency_node.runtime_properties['ip'] = '1.1.1.1'
+    dependency_node.attributes['ip'] = models.Parameter.wrap('ip', '1.1.1.1')
 
     workflow_context.model.node.update(dependency_node)
 
@@ -110,7 +111,7 @@ def test_host_ip(workflow_context, executor, dataholder):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert dataholder.get('host_ip') == dependency_node.runtime_properties.get('ip')
+    assert dataholder.get('host_ip') == dependency_node.attributes.get('ip').value
 
 
 def test_relationship_tool_belt(workflow_context, executor, dataholder):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index 09d0499..d9115e1 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -43,26 +43,26 @@ class TestLocalRunScript(object):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties map.key value
+            ctx node attributes map.key value
             ''',
             windows_script='''
-            ctx node runtime-properties map.key value
+            ctx node attributes map.key value
         ''')
         props = self._run(
             executor, workflow_context,
             script_path=script_path)
-        assert props['map']['key'] == 'value'
+        assert props['map'].value['key'] == 'value'
 
     def test_process_env(self, executor, workflow_context, tmpdir):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties map.key1 $key1
-            ctx node runtime-properties map.key2 $key2
+            ctx node attributes map.key1 $key1
+            ctx node attributes map.key2 $key2
             ''',
             windows_script='''
-            ctx node runtime-properties map.key1 %key1%
-            ctx node runtime-properties map.key2 %key2%
+            ctx node attributes map.key1 %key1%
+            ctx node attributes map.key2 %key2%
         ''')
         props = self._run(
             executor, workflow_context,
@@ -73,7 +73,7 @@ class TestLocalRunScript(object):
                     'key2': 'value2'
                 }
             })
-        p_map = props['map']
+        p_map = props['map'].value
         assert p_map['key1'] == 'value1'
         assert p_map['key2'] == 'value2'
 
@@ -81,10 +81,10 @@ class TestLocalRunScript(object):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties map.cwd $PWD
+            ctx node attributes map.cwd $PWD
             ''',
             windows_script='''
-            ctx node runtime-properties map.cwd %CD%
+            ctx node attributes map.cwd %CD%
             ''')
         tmpdir = str(tmpdir)
         props = self._run(
@@ -93,11 +93,11 @@ class TestLocalRunScript(object):
             process={
                 'cwd': tmpdir
             })
-        p_map = props['map']
+        p_map = props['map'].value
         assert p_map['cwd'] == tmpdir
 
     def test_process_command_prefix(self, executor, workflow_context, tmpdir):
-        use_ctx = 'ctx node runtime-properties map.key value'
+        use_ctx = 'ctx node attributes map.key value'
         python_script = ['import subprocess',
                          'subprocess.Popen("{0}".split(' ')).communicate()[0]'.format(use_ctx)]
         python_script = '\n'.join(python_script)
@@ -114,19 +114,19 @@ class TestLocalRunScript(object):
                 'env': {'TEST_KEY': 'value'},
                 'command_prefix': 'python'
             })
-        p_map = props['map']
+        p_map = props['map'].value
         assert p_map['key'] == 'value'
 
     def test_process_args(self, executor, workflow_context, tmpdir):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties map.arg1 "$1"
-            ctx node runtime-properties map.arg2 $2
+            ctx node attributes map.arg1 "$1"
+            ctx node attributes map.arg2 $2
             ''',
             windows_script='''
-            ctx node runtime-properties map.arg1 %1
-            ctx node runtime-properties map.arg2 %2
+            ctx node attributes map.arg1 %1
+            ctx node attributes map.arg2 %2
             ''')
         props = self._run(
             executor, workflow_context,
@@ -134,8 +134,8 @@ class TestLocalRunScript(object):
             process={
                 'args': ['"arg with spaces"', 'arg2']
             })
-        assert props['map']['arg1'] == 'arg with spaces'
-        assert props['map']['arg2'] == 'arg2'
+        assert props['map'].value['arg1'] == 'arg with spaces'
+        assert props['map'].value['arg2'] == 'arg2'
 
     def test_no_script_path(self, executor, workflow_context):
         exception = self._run_and_get_task_exception(
@@ -187,7 +187,7 @@ class TestLocalRunScript(object):
         script = '''
 from aria.orchestrator.execution_plugin import ctx, inputs
 if __name__ == '__main__':
-    ctx.node.runtime_properties['key'] = inputs['key']
+    ctx.node.attributes['key'] = inputs['key']
 '''
         suffix = '.py'
         script_path = self._create_script(
@@ -200,7 +200,7 @@ if __name__ == '__main__':
             executor, workflow_context,
             script_path=script_path,
             inputs={'key': 'value'})
-        assert props['key'] == 'value'
+        assert props['key'].value == 'value'
 
     @pytest.mark.parametrize(
         'value', ['string-value', [1, 2, 3], 999, 3.14, False,
@@ -209,16 +209,17 @@ if __name__ == '__main__':
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties key "${input_as_env_var}"
+            ctx node attributes key "${input_as_env_var}"
             ''',
             windows_script='''
-            ctx node runtime-properties key "%input_as_env_var%"
+            ctx node attributes key "%input_as_env_var%"
         ''')
         props = self._run(
             executor, workflow_context,
             script_path=script_path,
             env_var=value)
-        expected = props['key'] if isinstance(value, basestring) else json.loads(props['key'])
+        value = props['key'].value
+        expected = value if isinstance(value, basestring) else json.loads(value)
         assert expected == value
 
     @pytest.mark.parametrize('value', ['override', {'key': 'value'}])
@@ -227,10 +228,10 @@ if __name__ == '__main__':
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties key "${input_as_env_var}"
+            ctx node attributes key "${input_as_env_var}"
             ''',
             windows_script='''
-            ctx node runtime-properties key "%input_as_env_var%"
+            ctx node attributes key "%input_as_env_var%"
         ''')
 
         props = self._run(
@@ -242,17 +243,18 @@ if __name__ == '__main__':
                     'input_as_env_var': value
                 }
             })
-        expected = props['key'] if isinstance(value, basestring) else json.loads(props['key'])
+        value = props['key'].value
+        expected = value if isinstance(value, basestring) else json.loads(value)
         assert expected == value
 
     def test_get_nonexistent_runtime_property(self, executor, workflow_context, tmpdir):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node runtime-properties nonexistent
+            ctx node attributes nonexistent
             ''',
             windows_script='''
-            ctx node runtime-properties nonexistent
+            ctx node attributes nonexistent
         ''')
         exception = self._run_and_get_task_exception(
             executor, workflow_context,
@@ -266,10 +268,10 @@ if __name__ == '__main__':
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx -j instance runtime-properties nonexistent
+            ctx -j instance attributes nonexistent
             ''',
             windows_script='''
-            ctx -j instance runtime-properties nonexistent
+            ctx -j instance attributes nonexistent
             ''')
         exception = self._run_and_get_task_exception(
             executor, workflow_context,
@@ -502,7 +504,7 @@ if __name__ == '__main__':
             tasks_graph=tasks_graph)
         eng.execute()
         return workflow_context.model.node.get_by_name(
-            mock.models.DEPENDENCY_NODE_NAME).runtime_properties
+            mock.models.DEPENDENCY_NODE_NAME).attributes
 
     @pytest.fixture
     def executor(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index a9dc5e8..92d250e 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -53,9 +53,9 @@ _FABRIC_ENV = {
 class TestWithActualSSHServer(object):
 
     def test_run_script_basic(self):
-        expected_runtime_property_value = 'some_value'
-        props = self._execute(env={'test_value': expected_runtime_property_value})
-        assert props['test_value'] == expected_runtime_property_value
+        expected_attribute_value = 'some_value'
+        props = self._execute(env={'test_value': expected_attribute_value})
+        assert props['test_value'].value == expected_attribute_value
 
     @pytest.mark.skip(reason='sudo privileges are required')
     def test_run_script_as_sudo(self):
@@ -66,7 +66,7 @@ class TestWithActualSSHServer(object):
 
     def test_run_script_default_base_dir(self):
         props = self._execute()
-        assert props['work_dir'] == '{0}/work'.format(constants.DEFAULT_BASE_DIR)
+        assert props['work_dir'].value == '{0}/work'.format(constants.DEFAULT_BASE_DIR)
 
     @pytest.mark.skip(reason='Re-enable once output from process executor can be captured')
     @pytest.mark.parametrize('hide_groups', [[], ['everything']])
@@ -93,16 +93,16 @@ class TestWithActualSSHServer(object):
                 'cwd': expected_cwd,
                 'base_dir': expected_base_dir
             })
-        assert props['env_value'] == expected_env_value
-        assert len(props['bash_version']) > 0
-        assert props['arg1_value'] == expected_arg1_value
-        assert props['arg2_value'] == expected_arg2_value
-        assert props['cwd'] == expected_cwd
-        assert props['ctx_path'] == '{0}/ctx'.format(expected_base_dir)
+        assert props['env_value'].value == expected_env_value
+        assert len(props['bash_version'].value) > 0
+        assert props['arg1_value'].value == expected_arg1_value
+        assert props['arg2_value'].value == expected_arg2_value
+        assert props['cwd'].value == expected_cwd
+        assert props['ctx_path'].value == '{0}/ctx'.format(expected_base_dir)
 
     def test_run_script_command_prefix(self):
         props = self._execute(process={'command_prefix': 'bash -i'})
-        assert 'i' in props['dollar_dash']
+        assert 'i' in props['dollar_dash'].value
 
     def test_run_script_reuse_existing_ctx(self):
         expected_test_value_1 = 'test_value_1'
@@ -112,27 +112,27 @@ class TestWithActualSSHServer(object):
                              '{0}_2'.format(self.test_name)],
             env={'test_value1': expected_test_value_1,
                  'test_value2': expected_test_value_2})
-        assert props['test_value1'] == expected_test_value_1
-        assert props['test_value2'] == expected_test_value_2
+        assert props['test_value1'].value == expected_test_value_1
+        assert props['test_value2'].value == expected_test_value_2
 
     def test_run_script_download_resource_plain(self, tmpdir):
         resource = tmpdir.join('resource')
         resource.write('content')
         self._upload(str(resource), 'test_resource')
         props = self._execute()
-        assert props['test_value'] == 'content'
+        assert props['test_value'].value == 'content'
 
     def test_run_script_download_resource_and_render(self, tmpdir):
         resource = tmpdir.join('resource')
         resource.write('{{ctx.service.name}}')
         self._upload(str(resource), 'test_resource')
         props = self._execute()
-        assert props['test_value'] == self._workflow_context.service.name
+        assert props['test_value'].value == self._workflow_context.service.name
 
     @pytest.mark.parametrize('value', ['string-value', [1, 2, 3], {'key': 'value'}])
     def test_run_script_inputs_as_env_variables_no_override(self, value):
         props = self._execute(custom_input=value)
-        return_value = props['test_value']
+        return_value = props['test_value'].value
         expected = return_value if isinstance(value, basestring) else json.loads(return_value)
         assert value == expected
 
@@ -140,7 +140,7 @@ class TestWithActualSSHServer(object):
     def test_run_script_inputs_as_env_variables_process_env_override(self, value):
         props = self._execute(custom_input='custom-input-value',
                               env={'custom_env_var': value})
-        return_value = props['test_value']
+        return_value = props['test_value'].value
         expected = return_value if isinstance(value, basestring) else json.loads(return_value)
         assert value == expected
 
@@ -260,7 +260,7 @@ class TestWithActualSSHServer(object):
             tasks_graph=tasks_graph)
         eng.execute()
         return self._workflow_context.model.node.get_by_name(
-            mock.models.DEPENDENCY_NODE_NAME).runtime_properties
+            mock.models.DEPENDENCY_NODE_NAME).attributes
 
     def _execute_and_get_task_exception(self, *args, **kwargs):
         signal = events.on_failure_task_signal

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 50ca7f5..e488933 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -100,7 +100,7 @@ class TestOperationTask(object):
         storage_task = ctx.model.task.get_by_name(core_task.name)
         assert storage_task.plugin is storage_plugin
         assert storage_task.execution_name == ctx.execution.name
-        assert storage_task.actor == core_task.context.node
+        assert storage_task.actor == core_task.context.node._original_model
         assert core_task.model_task == storage_task
         assert core_task.name == api_task.name
         assert core_task.implementation == api_task.implementation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/workflows/executor/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/__init__.py b/tests/orchestrator/workflows/executor/__init__.py
index 375c44e..41c4b2e 100644
--- a/tests/orchestrator/workflows/executor/__init__.py
+++ b/tests/orchestrator/workflows/executor/__init__.py
@@ -74,3 +74,7 @@ class MockContext(object):
             return cls(storage=aria.application_model_storage(**kwargs))
         else:
             return cls()
+
+    @staticmethod
+    def close():
+        pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 1dbfae1..92f0fc4 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -17,7 +17,6 @@ import time
 import fasteners
 import pytest
 
-from aria.storage.exceptions import StorageError
 from aria.orchestrator import events
 from aria.orchestrator.workflows.exceptions import ExecutorException
 from aria.orchestrator.workflows import api
@@ -29,47 +28,37 @@ from tests.orchestrator.context import execute as execute_workflow
 from tests.orchestrator.workflows.helpers import events_collector
 from tests import mock
 from tests import storage
+from tests import helpers
 
 
-def test_concurrent_modification_on_task_succeeded(context, executor, lock_files):
-    _test(context, executor, lock_files, _test_task_succeeded, expected_failure=True)
-
-
-@operation
-def _test_task_succeeded(ctx, lock_files, key, first_value, second_value):
-    _concurrent_update(lock_files, ctx.node, key, first_value, second_value)
+@pytest.fixture
+def dataholder(tmpdir):
+    dataholder_path = str(tmpdir.join('dataholder'))
+    holder = helpers.FilesystemDataHolder(dataholder_path)
+    return holder
 
 
-def test_concurrent_modification_on_task_failed(context, executor, lock_files):
-    _test(context, executor, lock_files, _test_task_failed, expected_failure=True)
+def test_concurrent_modification_on_task_succeeded(context, executor, lock_files, dataholder):
+    _test(context, executor, lock_files, _test_task_succeeded, dataholder, expected_failure=False)
 
 
 @operation
-def _test_task_failed(ctx, lock_files, key, first_value, second_value):
-    first = _concurrent_update(lock_files, ctx.node, key, first_value, second_value)
-    if not first:
-        raise RuntimeError('MESSAGE')
+def _test_task_succeeded(ctx, lock_files, key, first_value, second_value, holder_path):
+    _concurrent_update(lock_files, ctx.node, key, first_value, second_value, holder_path)
 
 
-def test_concurrent_modification_on_update_and_refresh(context, executor, lock_files):
-    _test(context, executor, lock_files, _test_update_and_refresh, expected_failure=False)
+def test_concurrent_modification_on_task_failed(context, executor, lock_files, dataholder):
+    _test(context, executor, lock_files, _test_task_failed, dataholder, expected_failure=True)
 
 
 @operation
-def _test_update_and_refresh(ctx, lock_files, key, first_value, second_value):
-    node = ctx.node
-    first = _concurrent_update(lock_files, node, key, first_value, second_value)
+def _test_task_failed(ctx, lock_files, key, first_value, second_value, holder_path):
+    first = _concurrent_update(lock_files, ctx.node, key, first_value, second_value, holder_path)
     if not first:
-        try:
-            ctx.model.node.update(node)
-        except StorageError as e:
-            assert 'Version conflict' in str(e)
-            ctx.model.node.refresh(node)
-        else:
-            raise RuntimeError('Unexpected')
+        raise RuntimeError('MESSAGE')
 
 
-def _test(context, executor, lock_files, func, expected_failure):
+def _test(context, executor, lock_files, func, dataholder, expected_failure):
     def _node(ctx):
         return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
@@ -82,7 +71,8 @@ def _test(context, executor, lock_files, func, expected_failure):
         'lock_files': lock_files,
         'key': key,
         'first_value': first_value,
-        'second_value': second_value
+        'second_value': second_value,
+        'holder_path': dataholder.path
     }
 
     node = _node(context)
@@ -118,17 +108,13 @@ def _test(context, executor, lock_files, func, expected_failure):
         except ExecutorException:
             pass
 
-    props = _node(context).runtime_properties
-    assert props[key] == first_value
+    props = _node(context).attributes
+    assert dataholder['invocations'] == 2
+    assert props[key].value == dataholder[key]
 
     exceptions = [event['kwargs']['exception'] for event in collected.get(signal, [])]
     if expected_failure:
         assert exceptions
-        exception = exceptions[-1]
-        assert isinstance(exception, StorageError)
-        assert 'Version conflict' in str(exception)
-    else:
-        assert not exceptions
 
 
 @pytest.fixture
@@ -150,8 +136,8 @@ def lock_files(tmpdir):
     return str(tmpdir.join('first_lock_file')), str(tmpdir.join('second_lock_file'))
 
 
-def _concurrent_update(lock_files, node, key, first_value, second_value):
-
+def _concurrent_update(lock_files, node, key, first_value, second_value, holder_path):
+    holder = helpers.FilesystemDataHolder(holder_path)
     locker1 = fasteners.InterProcessLock(lock_files[0])
     locker2 = fasteners.InterProcessLock(lock_files[1])
 
@@ -161,11 +147,14 @@ def _concurrent_update(lock_files, node, key, first_value, second_value):
         # Give chance for both processes to acquire locks
         while locker2.acquire(blocking=False):
             locker2.release()
-            time.sleep(0.01)
+            time.sleep(0.1)
     else:
         locker2.acquire()
 
-    node.runtime_properties[key] = first_value if first else second_value
+    node.attributes[key] = first_value if first else second_value
+    holder['key'] = first_value if first else second_value
+    holder.setdefault('invocations', 0)
+    holder['invocations'] += 1
 
     if first:
         locker1.release()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 878ac24..30b23ed 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -56,7 +56,7 @@ def test_decorate_extension(context, executor):
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
-    out = get_node(context).runtime_properties['out']
+    out = get_node(context).attributes.get('out').value
     assert out['wrapper_inputs'] == inputs
     assert out['function_inputs'] == inputs
 
@@ -67,7 +67,7 @@ class MockProcessExecutorExtension(object):
     def decorate(self):
         def decorator(function):
             def wrapper(ctx, **operation_inputs):
-                ctx.node.runtime_properties['out'] = {'wrapper_inputs': operation_inputs}
+                ctx.node.attributes['out'] = {'wrapper_inputs': operation_inputs}
                 function(ctx=ctx, **operation_inputs)
             return wrapper
         return decorator
@@ -75,7 +75,7 @@ class MockProcessExecutorExtension(object):
 
 @operation
 def _mock_operation(ctx, **operation_inputs):
-    ctx.node.runtime_properties['out']['function_inputs'] = operation_inputs
+    ctx.node.attributes['out']['function_inputs'] = operation_inputs
 
 
 @pytest.fixture


[2/7] incubator-ariatosca git commit: ARIA-258 Convert runtime_properties to attributes

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index 4fbe9c1..2b628a0 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -28,7 +28,7 @@ from tests import mock
 from tests import storage
 
 
-_TEST_RUNTIME_PROPERTIES = {
+_TEST_ATTRIBUTES = {
     'some': 'values', 'that': 'are', 'most': 'likely', 'only': 'set', 'here': 'yo'
 }
 
@@ -46,17 +46,18 @@ def test_track_changes_of_failed_operation(context, executor):
 
 def _assert_tracked_changes_are_applied(context):
     instance = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    assert instance.runtime_properties == _TEST_RUNTIME_PROPERTIES
+    assert all(instance.attributes[key].value == value
+               for key, value in _TEST_ATTRIBUTES.items())
 
 
-def _update_runtime_properties(context):
-    context.node.runtime_properties.clear()
-    context.node.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
+def _update_attributes(context):
+    context.node.attributes.clear()
+    context.node.attributes.update(_TEST_ATTRIBUTES)
 
 
 def test_refresh_state_of_tracked_attributes(context, executor):
     out = _run_workflow(context=context, executor=executor, op_func=_mock_refreshing_operation)
-    assert out['initial'] == out['after_refresh']
+    assert out['after_refresh'] == out['after_change']
     assert out['initial'] != out['after_change']
 
 
@@ -66,22 +67,19 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
         'changed_but_refreshed': {'some': 'newer', 'properties': 'right there'}
     }
 
-    expected_initial = context.model.node.get_by_name(
-        mock.models.DEPENDENCY_NODE_NAME).runtime_properties
-
-    out = _run_workflow(context=context, executor=executor, op_func=_mock_updating_operation,
-                        inputs=inputs)
+    expected_initial = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME).attributes
+    out = _run_workflow(
+        context=context, executor=executor, op_func=_mock_updating_operation, inputs=inputs)
 
     expected_after_update = expected_initial.copy()
     expected_after_update.update(inputs['committed']) # pylint: disable=no-member
     expected_after_change = expected_after_update.copy()
     expected_after_change.update(inputs['changed_but_refreshed']) # pylint: disable=no-member
-    expected_after_refresh = expected_after_update
 
     assert out['initial'] == expected_initial
     assert out['after_update'] == expected_after_update
     assert out['after_change'] == expected_after_change
-    assert out['after_refresh'] == expected_after_refresh
+    assert out['after_refresh'] == expected_after_change
 
 
 def _run_workflow(context, executor, op_func, inputs=None):
@@ -109,42 +107,42 @@ def _run_workflow(context, executor, op_func, inputs=None):
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
-    return context.model.node.get_by_name(
-        mock.models.DEPENDENCY_NODE_NAME).runtime_properties.get('out')
+    out = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME).attributes.get('out')
+    return out.value if out else None
 
 
 @operation
 def _mock_success_operation(ctx):
-    _update_runtime_properties(ctx)
+    _update_attributes(ctx)
 
 
 @operation
 def _mock_fail_operation(ctx):
-    _update_runtime_properties(ctx)
+    _update_attributes(ctx)
     raise RuntimeError
 
 
 @operation
 def _mock_refreshing_operation(ctx):
-    out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
-    ctx.node.runtime_properties.update({'some': 'new', 'properties': 'right here'})
-    out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+    out = {'initial': copy.deepcopy(ctx.node.attributes)}
+    ctx.node.attributes.update({'some': 'new', 'properties': 'right here'})
+    out['after_change'] = copy.deepcopy(ctx.node.attributes)
     ctx.model.node.refresh(ctx.node)
-    out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
-    ctx.node.runtime_properties['out'] = out
+    out['after_refresh'] = copy.deepcopy(ctx.node.attributes)
+    ctx.node.attributes['out'] = out
 
 
 @operation
 def _mock_updating_operation(ctx, committed, changed_but_refreshed):
-    out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
-    ctx.node.runtime_properties.update(committed)
+    out = {'initial': copy.deepcopy(ctx.node.attributes)}
+    ctx.node.attributes.update(committed)
     ctx.model.node.update(ctx.node)
-    out['after_update'] = copy.deepcopy(ctx.node.runtime_properties)
-    ctx.node.runtime_properties.update(changed_but_refreshed)
-    out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+    out['after_update'] = copy.deepcopy(ctx.node.attributes)
+    ctx.node.attributes.update(changed_but_refreshed)
+    out['after_change'] = copy.deepcopy(ctx.node.attributes)
     ctx.model.node.refresh(ctx.node)
-    out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
-    ctx.node.runtime_properties['out'] = out
+    out['after_refresh'] = copy.deepcopy(ctx.node.attributes)
+    ctx.node.attributes['out'] = out
 
 
 def _operation_mapping(func):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/resources/scripts/test_ssh.sh
----------------------------------------------------------------------
diff --git a/tests/resources/scripts/test_ssh.sh b/tests/resources/scripts/test_ssh.sh
index 90202c7..bbdf773 100644
--- a/tests/resources/scripts/test_ssh.sh
+++ b/tests/resources/scripts/test_ssh.sh
@@ -4,7 +4,7 @@ set -u
 set -e
 
 test_run_script_basic() {
-    ctx node runtime-properties test_value $test_value
+    ctx node attributes test_value $test_value
 }
 
 test_run_script_as_sudo() {
@@ -12,7 +12,7 @@ test_run_script_as_sudo() {
 }
 
 test_run_script_default_base_dir() {
-    ctx node runtime-properties work_dir $PWD
+    ctx node attributes work_dir $PWD
 }
 
 test_run_script_with_hide() {
@@ -20,44 +20,44 @@ test_run_script_with_hide() {
 }
 
 test_run_script_process_config() {
-    ctx node runtime-properties env_value $test_value_env
-    ctx node runtime-properties bash_version $BASH_VERSION
-    ctx node runtime-properties arg1_value $1
-    ctx node runtime-properties arg2_value $2
-    ctx node runtime-properties cwd $PWD
-    ctx node runtime-properties ctx_path $(which ctx)
+    ctx node attributes env_value $test_value_env
+    ctx node attributes bash_version $BASH_VERSION
+    ctx node attributes arg1_value $1
+    ctx node attributes arg2_value $2
+    ctx node attributes cwd $PWD
+    ctx node attributes ctx_path $(which ctx)
 }
 
 test_run_script_command_prefix() {
-    ctx node runtime-properties dollar_dash $-
+    ctx node attributes dollar_dash $-
 }
 
 test_run_script_reuse_existing_ctx_1() {
-    ctx node runtime-properties test_value1 $test_value1
+    ctx node attributes test_value1 $test_value1
 }
 
 test_run_script_reuse_existing_ctx_2() {
-    ctx node runtime-properties test_value2 $test_value2
+    ctx node attributes test_value2 $test_value2
 }
 
 test_run_script_download_resource_plain() {
     local destination=$(mktemp)
     ctx download-resource ${destination} test_resource
-    ctx node runtime-properties test_value "$(cat ${destination})"
+    ctx node attributes test_value "$(cat ${destination})"
 }
 
 test_run_script_download_resource_and_render() {
     local destination=$(mktemp)
     ctx download-resource-and-render ${destination} test_resource
-    ctx node runtime-properties test_value "$(cat ${destination})"
+    ctx node attributes test_value "$(cat ${destination})"
 }
 
 test_run_script_inputs_as_env_variables_no_override() {
-    ctx node runtime-properties test_value "$custom_env_var"
+    ctx node attributes test_value "$custom_env_var"
 }
 
 test_run_script_inputs_as_env_variables_process_env_override() {
-    ctx node runtime-properties test_value "$custom_env_var"
+    ctx node attributes test_value "$custom_env_var"
 }
 
 test_run_script_error_in_script() {

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/50b997e3/tests/storage/test_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_instrumentation.py b/tests/storage/test_instrumentation.py
deleted file mode 100644
index bdbb17e..0000000
--- a/tests/storage/test_instrumentation.py
+++ /dev/null
@@ -1,396 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-from sqlalchemy import Column, Text, Integer, event
-
-from aria.modeling import (
-    mixins,
-    types as modeling_types,
-    models
-)
-from aria.modeling.exceptions import ValueFormatException
-from aria.storage import (
-    ModelStorage,
-    sql_mapi,
-    instrumentation
-)
-
-from . import release_sqlite_storage, init_inmemory_model_storage
-
-STUB = instrumentation._STUB
-Value = instrumentation._Value
-instruments_holder = []
-
-
-class TestInstrumentation(object):
-
-    def test_track_changes(self, storage):
-        model_kwargs = dict(
-            name='name',
-            dict1={'initial': 'value'},
-            dict2={'initial': 'value'},
-            list1=['initial'],
-            list2=['initial'],
-            int1=0,
-            int2=0,
-            string2='string')
-        model1_instance = MockModel1(**model_kwargs)
-        model2_instance = MockModel2(**model_kwargs)
-        storage.mock_model_1.put(model1_instance)
-        storage.mock_model_2.put(model2_instance)
-
-        instrument = self._track_changes({
-            MockModel1.dict1: dict,
-            MockModel1.list1: list,
-            MockModel1.int1: int,
-            MockModel1.string2: str,
-            MockModel2.dict2: dict,
-            MockModel2.list2: list,
-            MockModel2.int2: int,
-            MockModel2.name: str
-        })
-
-        assert not instrument.tracked_changes
-
-        storage_model1_instance = storage.mock_model_1.get(model1_instance.id)
-        storage_model2_instance = storage.mock_model_2.get(model2_instance.id)
-
-        storage_model1_instance.dict1 = {'hello': 'world'}
-        storage_model1_instance.dict2 = {'should': 'not track'}
-        storage_model1_instance.list1 = ['hello']
-        storage_model1_instance.list2 = ['should not track']
-        storage_model1_instance.int1 = 100
-        storage_model1_instance.int2 = 20000
-        storage_model1_instance.name = 'should not track'
-        storage_model1_instance.string2 = 'new_string'
-
-        storage_model2_instance.dict1.update({'should': 'not track'})
-        storage_model2_instance.dict2.update({'hello': 'world'})
-        storage_model2_instance.list1.append('should not track')
-        storage_model2_instance.list2.append('hello')
-        storage_model2_instance.int1 = 100
-        storage_model2_instance.int2 = 20000
-        storage_model2_instance.name = 'new_name'
-        storage_model2_instance.string2 = 'should not track'
-
-        assert instrument.tracked_changes == {
-            'mock_model_1': {
-                model1_instance.id: {
-                    'dict1': Value(STUB, {'hello': 'world'}),
-                    'list1': Value(STUB, ['hello']),
-                    'int1': Value(STUB, 100),
-                    'string2': Value(STUB, 'new_string')
-                }
-            },
-            'mock_model_2': {
-                model2_instance.id: {
-                    'dict2': Value({'initial': 'value'}, {'hello': 'world', 'initial': 'value'}),
-                    'list2': Value(['initial'], ['initial', 'hello']),
-                    'int2': Value(STUB, 20000),
-                    'name': Value(STUB, 'new_name'),
-                }
-            }
-        }
-
-    def test_attribute_initial_none_value(self, storage):
-        instance1 = MockModel1(name='name1', dict1=None)
-        instance2 = MockModel1(name='name2', dict1=None)
-        storage.mock_model_1.put(instance1)
-        storage.mock_model_1.put(instance2)
-        instrument = self._track_changes({MockModel1.dict1: dict})
-        instance1 = storage.mock_model_1.get(instance1.id)
-        instance2 = storage.mock_model_1.get(instance2.id)
-        instance1.dict1 = {'new': 'value'}
-        assert instrument.tracked_changes == {
-            'mock_model_1': {
-                instance1.id: {'dict1': Value(STUB, {'new': 'value'})},
-                instance2.id: {'dict1': Value(None, None)},
-            }
-        }
-
-    def test_attribute_set_none_value(self, storage):
-        instance = MockModel1(name='name')
-        storage.mock_model_1.put(instance)
-        instrument = self._track_changes({
-            MockModel1.dict1: dict,
-            MockModel1.list1: list,
-            MockModel1.string2: str,
-            MockModel1.int1: int
-        })
-        instance = storage.mock_model_1.get(instance.id)
-        instance.dict1 = None
-        instance.list1 = None
-        instance.string2 = None
-        instance.int1 = None
-        assert instrument.tracked_changes == {
-            'mock_model_1': {
-                instance.id: {
-                    'dict1': Value(STUB, None),
-                    'list1': Value(STUB, None),
-                    'string2': Value(STUB, None),
-                    'int1': Value(STUB, None)
-                }
-            }
-        }
-
-    def test_restore(self):
-        instrument = self._track_changes({MockModel1.dict1: dict})
-        # set instance attribute, load instance, refresh instance and flush_refresh listeners
-        assert len(instrument.listeners) == 4
-        for listener_args in instrument.listeners:
-            assert event.contains(*listener_args)
-        instrument.restore()
-        assert len(instrument.listeners) == 4
-        for listener_args in instrument.listeners:
-            assert not event.contains(*listener_args)
-        return instrument
-
-    def test_restore_twice(self):
-        instrument = self.test_restore()
-        instrument.restore()
-
-    def test_instrumentation_context_manager(self, storage):
-        instance = MockModel1(name='name')
-        storage.mock_model_1.put(instance)
-        with self._track_changes({MockModel1.dict1: dict}) as instrument:
-            instance = storage.mock_model_1.get(instance.id)
-            instance.dict1 = {'new': 'value'}
-            assert instrument.tracked_changes == {
-                'mock_model_1': {instance.id: {'dict1': Value(STUB, {'new': 'value'})}}
-            }
-            assert len(instrument.listeners) == 4
-            for listener_args in instrument.listeners:
-                assert event.contains(*listener_args)
-        for listener_args in instrument.listeners:
-            assert not event.contains(*listener_args)
-
-    def test_apply_tracked_changes(self, storage):
-        initial_values = {'dict1': {'initial': 'value'}, 'list1': ['initial']}
-        instance1_1 = MockModel1(name='instance1_1', **initial_values)
-        instance1_2 = MockModel1(name='instance1_2', **initial_values)
-        instance2_1 = MockModel2(name='instance2_1', **initial_values)
-        instance2_2 = MockModel2(name='instance2_2', **initial_values)
-        storage.mock_model_1.put(instance1_1)
-        storage.mock_model_1.put(instance1_2)
-        storage.mock_model_2.put(instance2_1)
-        storage.mock_model_2.put(instance2_2)
-
-        instrument = self._track_changes({
-            MockModel1.dict1: dict,
-            MockModel1.list1: list,
-            MockModel2.dict1: dict,
-            MockModel2.list1: list
-        })
-
-        def get_instances():
-            return (storage.mock_model_1.get(instance1_1.id),
-                    storage.mock_model_1.get(instance1_2.id),
-                    storage.mock_model_2.get(instance2_1.id),
-                    storage.mock_model_2.get(instance2_2.id))
-
-        instance1_1, instance1_2, instance2_1, instance2_2 = get_instances()
-        instance1_1.dict1 = {'new': 'value'}
-        instance1_2.list1 = ['new_value']
-        instance2_1.dict1.update({'new': 'value'})
-        instance2_2.list1.append('new_value')
-
-        instrument.restore()
-        storage.mock_model_1._session.expire_all()
-
-        instance1_1, instance1_2, instance2_1, instance2_2 = get_instances()
-        instance1_1.dict1 = {'overriding': 'value'}
-        instance1_2.list1 = ['overriding_value']
-        instance2_1.dict1 = {'overriding': 'value'}
-        instance2_2.list1 = ['overriding_value']
-        storage.mock_model_1.put(instance1_1)
-        storage.mock_model_1.put(instance1_2)
-        storage.mock_model_2.put(instance2_1)
-        storage.mock_model_2.put(instance2_2)
-        instance1_1, instance1_2, instance2_1, instance2_2 = get_instances()
-        assert instance1_1.dict1 == {'overriding': 'value'}
-        assert instance1_2.list1 == ['overriding_value']
-        assert instance2_1.dict1 == {'overriding': 'value'}
-        assert instance2_2.list1 == ['overriding_value']
-
-        instrumentation.apply_tracked_changes(
-            tracked_changes=instrument.tracked_changes,
-            new_instances={},
-            model=storage)
-
-        instance1_1, instance1_2, instance2_1, instance2_2 = get_instances()
-        assert instance1_1.dict1 == {'new': 'value'}
-        assert instance1_2.list1 == ['new_value']
-        assert instance2_1.dict1 == {'initial': 'value', 'new': 'value'}
-        assert instance2_2.list1 == ['initial', 'new_value']
-
-    def test_clear_instance(self, storage):
-        instance1 = MockModel1(name='name1')
-        instance2 = MockModel1(name='name2')
-        for instance in [instance1, instance2]:
-            storage.mock_model_1.put(instance)
-        instrument = self._track_changes({MockModel1.dict1: dict})
-        instance1.dict1 = {'new': 'value'}
-        instance2.dict1 = {'new2': 'value2'}
-        assert instrument.tracked_changes == {
-            'mock_model_1': {
-                instance1.id: {'dict1': Value(STUB, {'new': 'value'})},
-                instance2.id: {'dict1': Value(STUB, {'new2': 'value2'})}
-            }
-        }
-        instrument.clear(instance1)
-        assert instrument.tracked_changes == {
-            'mock_model_1': {
-                instance2.id: {'dict1': Value(STUB, {'new2': 'value2'})}
-            }
-        }
-
-    def test_clear_all(self, storage):
-        instance1 = MockModel1(name='name1')
-        instance2 = MockModel1(name='name2')
-        for instance in [instance1, instance2]:
-            storage.mock_model_1.put(instance)
-        instrument = self._track_changes({MockModel1.dict1: dict})
-        instance1.dict1 = {'new': 'value'}
-        instance2.dict1 = {'new2': 'value2'}
-        assert instrument.tracked_changes == {
-            'mock_model_1': {
-                instance1.id: {'dict1': Value(STUB, {'new': 'value'})},
-                instance2.id: {'dict1': Value(STUB, {'new2': 'value2'})}
-            }
-        }
-        instrument.clear()
-        assert instrument.tracked_changes == {}
-
-    def test_new_instances(self, storage):
-        model_kwargs = dict(
-            name='name',
-            dict1={'initial': 'value'},
-            dict2={'initial': 'value'},
-            list1=['initial'],
-            list2=['initial'],
-            int1=0,
-            int2=0,
-            string2='string')
-        model_instance_1 = MockModel1(**model_kwargs)
-        model_instance_2 = MockModel2(**model_kwargs)
-
-        instrument = self._track_changes(model=storage, instrumented_new=(MockModel1,))
-        assert not instrument.tracked_changes
-
-        storage.mock_model_1.put(model_instance_1)
-        storage.mock_model_2.put(model_instance_2)
-        # Assert all models made it to storage
-        assert len(storage.mock_model_1.list()) == len(storage.mock_model_2.list()) == 1
-
-        # Assert only one model was tracked
-        assert len(instrument.new_instances) == 1
-
-        mock_model_1 = instrument.new_instances[MockModel1.__tablename__].values()[0]
-        storage_model1_instance = storage.mock_model_1.get(model_instance_1.id)
-
-        for key in model_kwargs:
-            assert mock_model_1[key] == model_kwargs[key] == getattr(storage_model1_instance, key)
-
-    def _track_changes(self, instrumented_modified=None, model=None, instrumented_new=None):
-        instrument = instrumentation.track_changes(
-            model=model,
-            instrumented={'modified': instrumented_modified or {}, 'new': instrumented_new or {}})
-        instruments_holder.append(instrument)
-        return instrument
-
-    def test_track_changes_to_strict_dict(self, storage):
-        model_kwargs = dict(strict_dict={'key': 'value'},
-                            strict_list=['item'])
-        model_instance = StrictMockModel(**model_kwargs)
-        storage.strict_mock_model.put(model_instance)
-
-        instrument = self._track_changes({
-            StrictMockModel.strict_dict: dict,
-            StrictMockModel.strict_list: list,
-        })
-
-        assert not instrument.tracked_changes
-
-        storage_model_instance = storage.strict_mock_model.get(model_instance.id)
-
-        with pytest.raises(ValueFormatException):
-            storage_model_instance.strict_dict = {1: 1}
-
-        with pytest.raises(ValueFormatException):
-            storage_model_instance.strict_dict = {'hello': 1}
-
-        with pytest.raises(ValueFormatException):
-            storage_model_instance.strict_dict = {1: 'hello'}
-
-        storage_model_instance.strict_dict = {'hello': 'world'}
-        assert storage_model_instance.strict_dict == {'hello': 'world'}
-
-        with pytest.raises(ValueFormatException):
-            storage_model_instance.strict_list = [1]
-        storage_model_instance.strict_list = ['hello']
-        assert storage_model_instance.strict_list == ['hello']
-
-        assert instrument.tracked_changes == {
-            'strict_mock_model': {
-                model_instance.id: {
-                    'strict_dict': Value(STUB, {'hello': 'world'}),
-                    'strict_list': Value(STUB, ['hello']),
-                }
-            },
-        }
-
-
-@pytest.fixture(autouse=True)
-def restore_instrumentation():
-    yield
-    for instrument in instruments_holder:
-        instrument.restore()
-    del instruments_holder[:]
-
-
-@pytest.fixture
-def storage():
-    result = ModelStorage(api_cls=sql_mapi.SQLAlchemyModelAPI,
-                          items=(MockModel1, MockModel2, StrictMockModel),
-                          initiator=init_inmemory_model_storage)
-    yield result
-    release_sqlite_storage(result)
-
-
-class _MockModel(mixins.ModelMixin):
-    name = Column(Text)
-    dict1 = Column(modeling_types.Dict)
-    dict2 = Column(modeling_types.Dict)
-    list1 = Column(modeling_types.List)
-    list2 = Column(modeling_types.List)
-    int1 = Column(Integer)
-    int2 = Column(Integer)
-    string2 = Column(Text)
-
-
-class MockModel1(_MockModel, models.aria_declarative_base):
-    __tablename__ = 'mock_model_1'
-
-
-class MockModel2(_MockModel, models.aria_declarative_base):
-    __tablename__ = 'mock_model_2'
-
-
-class StrictMockModel(mixins.ModelMixin, models.aria_declarative_base):
-    __tablename__ = 'strict_mock_model'
-
-    strict_dict = Column(modeling_types.StrictDict(basestring, basestring))
-    strict_list = Column(modeling_types.StrictList(basestring))


[6/7] incubator-ariatosca git commit: ARIA-149 Enhance operation configuration

Posted by em...@apache.org.
ARIA-149 Enhance operation configuration

* Parse special "dependencies" configuration parameters as YAML and
  treat as Parameter models, allowing them full use of intrinsic
  functions, type coersions, and validations
* Rename various functions that process "properties" to more generically
  process "parameters" (properties, inputs, attributes, arguments, etc.)
* The "configuration" field in OperationTemplate and Operation models
  is now now a dict of Parameter models
* Added "function" and "arguments" fields to Operation model to preserve
  user data (in "implementation" and "inputs") and to clearly demarcate
  orchestration data from user data
* Some cleanup of parser code touched by this commit


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/f6ee65a9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/f6ee65a9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/f6ee65a9

Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: f6ee65a9eaa8eb252c4431152327635a43dff425
Parents: adf7607
Author: Tal Liron <ta...@gmail.com>
Authored: Thu Apr 20 17:54:47 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Fri May 26 12:41:30 2017 -0500

----------------------------------------------------------------------
 aria/cli/commands/services.py                   |   2 +-
 aria/core.py                                    |  14 +-
 aria/modeling/constraints.py                    |  28 +++
 aria/modeling/contraints.py                     |  28 ---
 aria/modeling/exceptions.py                     |  16 +-
 aria/modeling/service_common.py                 |   4 +
 aria/modeling/service_instance.py               |  69 +++---
 aria/modeling/service_template.py               |  54 ++---
 aria/modeling/utils.py                          | 105 ++++-----
 .../execution_plugin/instantiation.py           | 133 +++++++-----
 aria/orchestrator/workflow_runner.py            |   2 +-
 aria/orchestrator/workflows/api/task.py         |   4 +-
 aria/orchestrator/workflows/core/task.py        |   1 -
 .../profiles/aria-1.0/aria-1.0.yaml             |  16 +-
 .../simple_v1_0/assignments.py                  |   4 +-
 .../simple_v1_0/modeling/__init__.py            |  65 ++++--
 .../simple_v1_0/modeling/artifacts.py           |   2 +-
 .../simple_v1_0/modeling/capabilities.py        |  24 ++-
 .../simple_v1_0/modeling/constraints.py         |   2 +-
 .../simple_v1_0/modeling/data_types.py          |  16 ++
 .../simple_v1_0/modeling/interfaces.py          |  34 ++-
 .../simple_v1_0/modeling/parameters.py          | 211 +++++++++++++++++++
 .../simple_v1_0/modeling/policies.py            |   2 +
 .../simple_v1_0/modeling/properties.py          | 202 ------------------
 .../simple_v1_0/modeling/requirements.py        |  20 +-
 .../modeling/substitution_mappings.py           |   4 +
 .../simple_v1_0/templates.py                    |  13 +-
 .../aria_extension_tosca/simple_v1_0/types.py   |  24 +--
 tests/cli/test_services.py                      |  14 +-
 tests/mock/models.py                            |  10 +-
 tests/mock/topology.py                          |  12 +-
 tests/orchestrator/context/test_operation.py    |  53 +++--
 tests/orchestrator/context/test_serialize.py    |   2 +-
 tests/orchestrator/context/test_toolbelt.py     |  14 +-
 .../orchestrator/execution_plugin/test_local.py |  10 +-
 tests/orchestrator/execution_plugin/test_ssh.py |  12 +-
 tests/orchestrator/test_workflow_runner.py      |   8 +-
 tests/orchestrator/workflows/api/test_task.py   |  26 +--
 .../workflows/builtin/test_execute_operation.py |   2 +-
 .../orchestrator/workflows/core/test_engine.py  |   6 +-
 .../orchestrator/workflows/core/test_events.py  |   3 +-
 tests/orchestrator/workflows/core/test_task.py  |   6 +-
 .../test_task_graph_into_execution_graph.py     |   2 +-
 ...process_executor_concurrent_modifications.py |  10 +-
 .../executor/test_process_executor_extension.py |  13 +-
 .../test_process_executor_tracked_changes.py    |   8 +-
 .../node-cellar/node-cellar.yaml                |  24 ++-
 47 files changed, 737 insertions(+), 597 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 24de7c5..476387c 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -151,7 +151,7 @@ def create(service_template_name,
     except storage_exceptions.StorageError as e:
         utils.check_overriding_storage_exceptions(e, 'service', service_name)
         raise
-    except modeling_exceptions.InputsException:
+    except modeling_exceptions.ParameterException:
         service_templates.print_service_template_inputs(model_storage, service_template_name,
                                                         logger)
         raise

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index cc943ef..f660167 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -56,7 +56,8 @@ class Core(object):
         service_template = self.model_storage.service_template.get(service_template_id)
         if service_template.services:
             raise exceptions.DependentServicesError(
-                "Can't delete service template {0} - Service template has existing services")
+                'Can\'t delete service template `{0}` - service template has existing services'
+                .format(service_template.name))
 
         self.model_storage.service_template.delete(service_template)
         self.resource_storage.service_template.delete(entry_id=str(service_template.id))
@@ -87,7 +88,8 @@ class Core(object):
                     consumption.CoerceServiceInstanceValues
                 )).consume()
             if context.validation.dump_issues():
-                raise exceptions.InstantiationError('Failed to instantiate service template')
+                raise exceptions.InstantiationError('Failed to instantiate service template `{0}`'
+                                                    .format(service_template.name))
 
         storage_session.flush()  # flushing so service.id would auto-populate
         service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
@@ -100,15 +102,15 @@ class Core(object):
         active_executions = [e for e in service.executions if e.is_active()]
         if active_executions:
             raise exceptions.DependentActiveExecutionsError(
-                "Can't delete service {0} - there is an active execution for this service. "
-                "Active execution id: {1}".format(service.name, active_executions[0].id))
+                'Can\'t delete service `{0}` - there is an active execution for this service. '
+                'Active execution ID: {1}'.format(service.name, active_executions[0].id))
 
         if not force:
             available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()]
             if available_nodes:
                 raise exceptions.DependentAvailableNodesError(
-                    "Can't delete service {0} - there are available nodes for this service. "
-                    "Available node ids: {1}".format(service.name, ', '.join(available_nodes)))
+                    'Can\'t delete service `{0}` - there are available nodes for this service. '
+                    'Available node IDs: {1}'.format(service.name, ', '.join(available_nodes)))
 
         self.model_storage.service.delete(service)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/constraints.py
----------------------------------------------------------------------
diff --git a/aria/modeling/constraints.py b/aria/modeling/constraints.py
new file mode 100644
index 0000000..107b010
--- /dev/null
+++ b/aria/modeling/constraints.py
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class NodeTemplateConstraint(object):
+    """
+    Used to constrain requirements for node templates.
+
+    Must be serializable.
+    """
+
+    def matches(self, source_node_template, target_node_template):
+        """
+        Returns true is the target matches the constraint for the source.
+        """
+        raise NotImplementedError

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/contraints.py
----------------------------------------------------------------------
diff --git a/aria/modeling/contraints.py b/aria/modeling/contraints.py
deleted file mode 100644
index 107b010..0000000
--- a/aria/modeling/contraints.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class NodeTemplateConstraint(object):
-    """
-    Used to constrain requirements for node templates.
-
-    Must be serializable.
-    """
-
-    def matches(self, source_node_template, target_node_template):
-        """
-        Returns true is the target matches the constraint for the source.
-        """
-        raise NotImplementedError

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index 19fd942..d0e3e22 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -22,9 +22,9 @@ class ModelingException(AriaException):
     """
 
 
-class InputsException(ModelingException):
+class ParameterException(ModelingException):
     """
-    ARIA inputs exception.
+    ARIA parameter exception.
     """
     pass
 
@@ -41,19 +41,19 @@ class CannotEvaluateFunctionException(ModelingException):
     """
 
 
-class MissingRequiredInputsException(InputsException):
+class MissingRequiredParametersException(ParameterException):
     """
-    ARIA modeling exception: Required inputs have been omitted.
+    ARIA modeling exception: Required parameters have been omitted.
     """
 
 
-class InputsOfWrongTypeException(InputsException):
+class ParametersOfWrongTypeException(ParameterException):
     """
-    ARIA modeling exception: Inputs of the wrong types have been provided.
+    ARIA modeling exception: Parameters of the wrong types have been provided.
     """
 
 
-class UndeclaredInputsException(InputsException):
+class UndeclaredParametersException(ParameterException):
     """
-    ARIA modeling exception: Undeclared inputs have been provided.
+    ARIA modeling exception: Undeclared parameters have been provided.
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index ef19c8e..3bae490 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -211,6 +211,10 @@ class ParameterBase(TemplateModelMixin, caching.HasCachedMethods):
         """
         Wraps an arbitrary value as a parameter. The type will be guessed via introspection.
 
+        For primitive types, we will prefer their TOSCA aliases. See the `TOSCA Simple Profile v1.0
+        cos01 specification <http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01
+        /TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc373867862>`__
+
         :param name: Parameter name
         :type name: basestring
         :param value: Parameter value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 7058969..31f7212 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -1631,20 +1631,24 @@ class OperationBase(InstanceModelMixin):
     :vartype operation_template: :class:`OperationTemplate`
     :ivar description: Human-readable description
     :vartype description: string
-    :ivar plugin: Associated plugin
-    :vartype plugin: :class:`Plugin`
     :ivar relationship_edge: When true specified that the operation is on the relationship's
                              target edge instead of its source (only used by relationship
                              operations)
     :vartype relationship_edge: bool
     :ivar implementation: Implementation (interpreted by the plugin)
     :vartype implementation: basestring
-    :ivar configuration: Configuration (interpreted by the plugin)
-    :vartype configuration: {basestring, object}
     :ivar dependencies: Dependency strings (interpreted by the plugin)
     :vartype dependencies: [basestring]
     :ivar inputs: Parameters that can be used by this operation
     :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar plugin: Associated plugin
+    :vartype plugin: :class:`Plugin`
+    :ivar configuration: Configuration (interpreted by the plugin)
+    :vartype configuration: {basestring, :class:`Parameter`}
+    :ivar function: Name of the operation function
+    :vartype function: basestring
+    :ivar arguments: Arguments to send to the operation function
+    :vartype arguments: {basestring: :class:`Parameter`}
     :ivar executor: Name of executor to run the operation with
     :vartype executor: basestring
     :ivar max_attempts: Maximum number of attempts allowed in case of failure
@@ -1726,34 +1730,41 @@ class OperationBase(InstanceModelMixin):
     def inputs(cls):
         return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
 
+    @declared_attr
+    def configuration(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='configuration', dict_key='name')
+
+    @declared_attr
+    def arguments(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='arguments', dict_key='name')
+
     # endregion
 
     description = Column(Text)
     relationship_edge = Column(Boolean)
     implementation = Column(Text)
-    configuration = Column(modeling_types.StrictDict(key_cls=basestring))
     dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+    function = Column(Text)
     executor = Column(Text)
     max_attempts = Column(Integer)
     retry_interval = Column(Integer)
 
     def configure(self):
-        from . import models
-        # Note: for workflows (operations attached directly to the service) "interface" will be None
-        if (self.implementation is None) or (self.interface is None):
+        if (self.implementation is None) and (self.function is None):
             return
 
-        if self.plugin is None:
-            arguments = execution_plugin.instantiation.configure_operation(self)
+        if (self.plugin is None) and (self.interface is not None):
+            # Default to execution plugin ("interface" is None for workflow operations)
+            execution_plugin.instantiation.configure_operation(self)
         else:
             # In the future plugins may be able to add their own "configure_operation" hook that
-            # can validate the configuration and otherwise return specially derived arguments
-            arguments = self.configuration
+            # can validate the configuration and otherwise create specially derived arguments. For
+            # now, we just send all configuration parameters as arguments
+            utils.instantiate_dict(self, self.arguments, self.configuration)
 
-        # Note: the arguments will *override* operation inputs of the same name
-        if arguments:
-            for k, v in arguments.iteritems():
-                self.inputs[k] = models.Parameter.wrap(k, v)
+        # Send all inputs as extra arguments. Note that they will override existing arguments of the
+        # same names.
+        utils.instantiate_dict(self, self.arguments, self.inputs)
 
     @property
     def as_raw(self):
@@ -1762,17 +1773,18 @@ class OperationBase(InstanceModelMixin):
             ('description', self.description),
             ('implementation', self.implementation),
             ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_attempts', self.max_attempts),
-            ('retry_interval', self.retry_interval),
             ('inputs', formatting.as_raw_dict(self.inputs))))
 
     def validate(self):
-        # TODO must be associated with interface or service
+        # TODO must be associated with either interface or service
         utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.configuration)
+        utils.validate_dict_values(self.arguments)
 
     def coerce_values(self, report_issues):
         utils.coerce_dict_values(self.inputs, report_issues)
+        utils.coerce_dict_values(self.configuration, report_issues)
+        utils.coerce_dict_values(self.arguments, report_issues)
 
     def dump(self):
         context = ConsumptionContext.get_thread_local()
@@ -1780,21 +1792,14 @@ class OperationBase(InstanceModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
         with context.style.indent:
-            if self.plugin is not None:
-                console.puts('Plugin: {0}'.format(
-                    context.style.literal(self.plugin.name)))
             if self.implementation is not None:
                 console.puts('Implementation: {0}'.format(
                     context.style.literal(self.implementation)))
-            if self.configuration:
-                with context.style.indent:
-                    for k, v in self.configuration.iteritems():
-                        console.puts('{0}: {1}'.format(context.style.property(k),
-                                                       context.style.literal(v)))
             if self.dependencies:
                 console.puts(
                     'Dependencies: {0}'.format(
                         ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+            utils.dump_dict_values(self.inputs, 'Inputs')
             if self.executor is not None:
                 console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
             if self.max_attempts is not None:
@@ -1802,7 +1807,13 @@ class OperationBase(InstanceModelMixin):
             if self.retry_interval is not None:
                 console.puts('Retry interval: {0}'.format(
                     context.style.literal(self.retry_interval)))
-            utils.dump_dict_values(self.inputs, 'Inputs')
+            if self.plugin is not None:
+                console.puts('Plugin: {0}'.format(
+                    context.style.literal(self.plugin.name)))
+            utils.dump_dict_values(self.configuration, 'Configuration')
+            if self.function is not None:
+                console.puts('Function: {0}'.format(context.style.literal(self.function)))
+            utils.dump_dict_values(self.arguments, 'Arguments')
 
 
 class ArtifactBase(InstanceModelMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 3110248..b4a54ca 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,7 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  service_template=self)
         context.modeling.instance = service
 
-        service.inputs = utils.create_inputs(inputs or {}, self.inputs)
+        service.inputs = utils.create_parameters(inputs or {}, self.inputs)
         # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
 
         for plugin_specification in self.plugin_specifications.itervalues():
@@ -1762,20 +1762,22 @@ class OperationTemplateBase(TemplateModelMixin):
     :vartype name: basestring
     :ivar description: Human-readable description
     :vartype description: basestring
-    :ivar plugin_specification: Associated plugin
-    :vartype plugin_specification: :class:`PluginSpecification`
     :ivar relationship_edge: When true specified that the operation is on the relationship's
                              target edge instead of its source (only used by relationship
                              operations)
     :vartype relationship_edge: bool
     :ivar implementation: Implementation (interpreted by the plugin)
     :vartype implementation: basestring
-    :ivar configuration: Configuration (interpreted by the plugin)
-    :vartype configuration: {basestring, object}
     :ivar dependencies: Dependency strings (interpreted by the plugin)
     :vartype dependencies: [basestring]
     :ivar inputs: Parameters that can be used by this operation
     :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar plugin_specification: Associated plugin
+    :vartype plugin_specification: :class:`PluginSpecification`
+    :ivar configuration: Configuration (interpreted by the plugin)
+    :vartype configuration: {basestring, :class:`Parameter`}
+    :ivar function: Name of the operation function
+    :vartype function: basestring
     :ivar executor: Name of executor to run the operation with
     :vartype executor: basestring
     :ivar max_attempts: Maximum number of attempts allowed in case of failure
@@ -1855,13 +1857,17 @@ class OperationTemplateBase(TemplateModelMixin):
     def inputs(cls):
         return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
 
+    @declared_attr
+    def configuration(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='configuration', dict_key='name')
+
     # endregion
 
     description = Column(Text)
     relationship_edge = Column(Boolean)
     implementation = Column(Text)
-    configuration = Column(modeling_types.StrictDict(key_cls=basestring))
     dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+    function = Column(Text)
     executor = Column(Text)
     max_attempts = Column(Integer)
     retry_interval = Column(Integer)
@@ -1873,9 +1879,6 @@ class OperationTemplateBase(TemplateModelMixin):
             ('description', self.description),
             ('implementation', self.implementation),
             ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_attempts', self.max_attempts),
-            ('retry_interval', self.retry_interval),
             ('inputs', formatting.as_raw_dict(self.inputs))))
 
     def instantiate(self, container):
@@ -1883,38 +1886,41 @@ class OperationTemplateBase(TemplateModelMixin):
         if self.plugin_specification:
             if self.plugin_specification.enabled:
                 plugin = self.plugin_specification.plugin
-                implementation = self.implementation if plugin is not None else None
+                function = self.function if plugin is not None else None
                 # "plugin" would be none if a match was not found. In that case, a validation error
                 # should already have been reported in ServiceTemplateBase.instantiate, so we will
                 # continue silently here
             else:
                 # If the plugin is disabled, the operation should be disabled, too
                 plugin = None
-                implementation = None
+                function = None
         else:
-            # Using the execution plugin
+            # Using the default execution plugin (plugin=None)
             plugin = None
-            implementation = self.implementation
+            function = self.function
 
         operation = models.Operation(name=self.name,
                                      description=deepcopy_with_locators(self.description),
                                      relationship_edge=self.relationship_edge,
-                                     plugin=plugin,
-                                     implementation=implementation,
-                                     configuration=self.configuration,
+                                     implementation=self.implementation,
                                      dependencies=self.dependencies,
                                      executor=self.executor,
+                                     plugin=plugin,
+                                     function=function,
                                      max_attempts=self.max_attempts,
                                      retry_interval=self.retry_interval,
                                      operation_template=self)
         utils.instantiate_dict(container, operation.inputs, self.inputs)
+        utils.instantiate_dict(container, operation.configuration, self.configuration)
         return operation
 
     def validate(self):
         utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.configuration)
 
     def coerce_values(self, report_issues):
         utils.coerce_dict_values(self.inputs, report_issues)
+        utils.coerce_dict_values(self.configuration, report_issues)
 
     def dump(self):
         context = ConsumptionContext.get_thread_local()
@@ -1922,20 +1928,13 @@ class OperationTemplateBase(TemplateModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
         with context.style.indent:
-            if self.plugin_specification is not None:
-                console.puts('Plugin specification: {0}'.format(
-                    context.style.literal(self.plugin_specification.name)))
             if self.implementation is not None:
                 console.puts('Implementation: {0}'.format(
                     context.style.literal(self.implementation)))
-            if self.configuration:
-                with context.style.indent:
-                    for k, v in self.configuration.iteritems():
-                        console.puts('{0}: {1}'.format(context.style.property(k),
-                                                       context.style.literal(v)))
             if self.dependencies:
                 console.puts('Dependencies: {0}'.format(
                     ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+            utils.dump_dict_values(self.inputs, 'Inputs')
             if self.executor is not None:
                 console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
             if self.max_attempts is not None:
@@ -1943,7 +1942,12 @@ class OperationTemplateBase(TemplateModelMixin):
             if self.retry_interval is not None:
                 console.puts('Retry interval: {0}'.format(
                     context.style.literal(self.retry_interval)))
-            utils.dump_dict_values(self.inputs, 'Inputs')
+            if self.plugin_specification is not None:
+                console.puts('Plugin specification: {0}'.format(
+                    context.style.literal(self.plugin_specification.name)))
+            utils.dump_dict_values(self.configuration, 'Configuration')
+            if self.function is not None:
+                console.puts('Function: {0}'.format(context.style.literal(self.function)))
 
 
 class ArtifactTemplateBase(TemplateModelMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 0404fe4..6f4022c 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -21,6 +21,7 @@ from . import exceptions
 from ..parser.consumption import ConsumptionContext
 from ..utils.console import puts
 from ..utils.type import validate_value_type
+from ..utils.collections import OrderedDict
 
 
 class ModelJSONEncoder(JSONEncoder):
@@ -39,7 +40,7 @@ class ModelJSONEncoder(JSONEncoder):
 class NodeTemplateContainerHolder(object):
     """
     Wrapper that allows using a :class:`aria.modeling.models.NodeTemplate` model directly as the
-    ``container_holder`` argument for :func:`aria.modeling.functions.evaluate`.
+    ``container_holder`` input for :func:`aria.modeling.functions.evaluate`.
     """
 
     def __init__(self, node_template):
@@ -51,74 +52,84 @@ class NodeTemplateContainerHolder(object):
         return self.container.service_template
 
 
-def create_inputs(inputs, template_inputs):
+def create_parameters(parameters, declared_parameters):
     """
-    :param inputs: key-value dict
-    :param template_inputs: parameter name to parameter object dict
-    :return: dict of parameter name to Parameter models
+    Validates, merges, and wraps parameter values according to those declared by a type.
+
+    Exceptions will be raised for validation errors:
+
+    * :class:`aria.modeling.exceptions.UndeclaredParametersException` if a key in ``parameters``
+      does not exist in ``declared_parameters``
+    * :class:`aria.modeling.exceptions.MissingRequiredParametersException` if a key in
+      ``declared_parameters`` does not exist in ``parameters`` and also has no default value
+    * :class:`aria.modeling.exceptions.ParametersOfWrongTypeException` if a value in ``parameters``
+      does not match its type in ``declared_parameters``
+
+    :param parameters: Provided parameter values
+    :type parameters: {basestring, object}
+    :param declared_parameters: Declared parameters
+    :type declared_parameters: {basestring, :class:`aria.modeling.models.Parameter`}
+    :return: The merged parameters
+    :rtype: {basestring, :class:`aria.modeling.models.Parameter`}
     """
-    merged_inputs = _merge_and_validate_inputs(inputs, template_inputs)
+
+    merged_parameters = _merge_and_validate_parameters(parameters, declared_parameters)
 
     from . import models
-    input_models = []
-    for input_name, input_val in merged_inputs.iteritems():
+    parameters_models = OrderedDict()
+    for parameter_name, parameter_value in merged_parameters.iteritems():
         parameter = models.Parameter( # pylint: disable=unexpected-keyword-arg
-            name=input_name,
-            type_name=template_inputs[input_name].type_name,
-            description=template_inputs[input_name].description,
-            value=input_val)
-        input_models.append(parameter)
+            name=parameter_name,
+            type_name=declared_parameters[parameter_name].type_name,
+            description=declared_parameters[parameter_name].description,
+            value=parameter_value)
+        parameters_models[parameter.name] = parameter
 
-    return dict((inp.name, inp) for inp in input_models)
+    return parameters_models
 
 
-def _merge_and_validate_inputs(inputs, template_inputs):
-    """
-    :param inputs: key-value dict
-    :param template_inputs: parameter name to parameter object dict
-    :return:
-    """
-    merged_inputs = inputs.copy()
-
-    missing_inputs = []
-    wrong_type_inputs = {}
-    for input_name, input_template in template_inputs.iteritems():
-        if input_name not in inputs:
-            if input_template.value is not None:
-                merged_inputs[input_name] = input_template.value  # apply default value
+def _merge_and_validate_parameters(parameters, declared_parameters):
+    merged_parameters = OrderedDict(parameters)
+
+    missing_parameters = []
+    wrong_type_parameters = OrderedDict()
+    for parameter_name, declared_parameter in declared_parameters.iteritems():
+        if parameter_name not in parameters:
+            if declared_parameter.value is not None:
+                merged_parameters[parameter_name] = declared_parameter.value  # apply default value
             else:
-                missing_inputs.append(input_name)
+                missing_parameters.append(parameter_name)
         else:
-            # Validate input type
+            # Validate parameter type
             try:
-                validate_value_type(inputs[input_name], input_template.type_name)
+                validate_value_type(parameters[parameter_name], declared_parameter.type_name)
             except ValueError:
-                wrong_type_inputs[input_name] = input_template.type_name
+                wrong_type_parameters[parameter_name] = declared_parameter.type_name
             except RuntimeError:
                 # TODO: This error shouldn't be raised (or caught), but right now we lack support
                 # for custom data_types, which will raise this error. Skipping their validation.
                 pass
 
-    if missing_inputs:
-        raise exceptions.MissingRequiredInputsException(
-            'Required inputs {0} have not been specified - expected inputs: {1}'
-            .format(missing_inputs, template_inputs.keys()))
+    if missing_parameters:
+        raise exceptions.MissingRequiredParametersException(
+            'Required parameters {0} have not been specified; Expected parameters: {1}'
+            .format(missing_parameters, declared_parameters.keys()))
 
-    if wrong_type_inputs:
+    if wrong_type_parameters:
         error_message = StringIO()
-        for param_name, param_type in wrong_type_inputs.iteritems():
-            error_message.write('Input "{0}" must be of type {1}{2}'
+        for param_name, param_type in wrong_type_parameters.iteritems():
+            error_message.write('Parameter "{0}" must be of type {1}{2}'
                                 .format(param_name, param_type, os.linesep))
-        raise exceptions.InputsOfWrongTypeException(error_message.getvalue())
+        raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
 
-    undeclared_inputs = [input_name for input_name in inputs.keys()
-                         if input_name not in template_inputs]
-    if undeclared_inputs:
-        raise exceptions.UndeclaredInputsException(
-            'Undeclared inputs have been specified: {0}; Expected inputs: {1}'
-            .format(undeclared_inputs, template_inputs.keys()))
+    undeclared_parameters = [parameter_name for parameter_name in parameters.keys()
+                             if parameter_name not in declared_parameters]
+    if undeclared_parameters:
+        raise exceptions.UndeclaredParametersException(
+            'Undeclared parameters have been specified: {0}; Expected parameters: {1}'
+            .format(undeclared_parameters, declared_parameters.keys()))
 
-    return merged_inputs
+    return merged_parameters
 
 
 def coerce_dict_values(the_dict, report_issues=False):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
index c09434e..26c3913 100644
--- a/aria/orchestrator/execution_plugin/instantiation.py
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -16,19 +16,13 @@
 # TODO: this module will eventually be moved to a new "aria.instantiation" package
 
 from ...utils.type import full_type_name
-from ...utils.collections import OrderedDict
+from ...utils.formatting import safe_repr
 from ...parser import validation
 from ...parser.consumption import ConsumptionContext
+from ...modeling.functions import Function
 
 
 def configure_operation(operation):
-    configuration = OrderedDict(operation.configuration) if operation.configuration else {}
-
-    arguments = OrderedDict()
-    arguments['script_path'] = operation.implementation
-    arguments['process'] = _get_process(configuration.pop('process')) \
-        if 'process' in configuration else dict()
-
     host = None
     interface = operation.interface
     if interface.node is not None:
@@ -36,87 +30,118 @@ def configure_operation(operation):
     elif interface.relationship is not None:
         if operation.relationship_edge is True:
             host = interface.relationship.target_node.host
-        else: # either False or None
+        else: # either False or None (None meaning that edge was not specified)
             host = interface.relationship.source_node.host
 
+    _configure_common(operation)
     if host is None:
         _configure_local(operation)
     else:
-        _configure_remote(operation, configuration, arguments)
+        _configure_remote(operation)
+
+    # Any remaining un-handled configuration parameters will become extra arguments, available as
+    # kwargs in either "run_script_locally" or "run_script_with_ssh"
+    for key, value in operation.configuration.iteritems():
+        if key not in ('process', 'ssh'):
+            operation.arguments[key] = value.instantiate()
 
-    # Any remaining unhandled configuration values will become extra arguments, available as kwargs
-    # in either "run_script_locally" or "run_script_with_ssh"
-    arguments.update(configuration)
 
-    return arguments
+def _configure_common(operation):
+    """
+    Local and remote operations.
+    """
+
+    from ...modeling.models import Parameter
+    operation.arguments['script_path'] = Parameter.wrap('script_path', operation.implementation,
+                                                        'Relative path to the executable file.')
+    operation.arguments['process'] = Parameter.wrap('process', _get_process(operation),
+                                                    'Sub-process configuration.')
+
 
 def _configure_local(operation):
     """
     Local operation.
     """
+
     from . import operations
-    operation.implementation = '{0}.{1}'.format(operations.__name__,
-                                                operations.run_script_locally.__name__)
+    operation.function = '{0}.{1}'.format(operations.__name__,
+                                          operations.run_script_locally.__name__)
 
 
-def _configure_remote(operation, configuration, arguments):
+def _configure_remote(operation):
     """
     Remote SSH operation via Fabric.
     """
+
+    from ...modeling.models import Parameter
+    from . import operations
+
+    ssh = _get_ssh(operation)
+
+    # Defaults
     # TODO: find a way to configure these generally in the service template
     default_user = ''
     default_password = ''
-
-    ssh = _get_ssh(configuration.pop('ssh')) if 'ssh' in configuration else {}
     if 'user' not in ssh:
         ssh['user'] = default_user
     if ('password' not in ssh) and ('key' not in ssh) and ('key_filename' not in ssh):
         ssh['password'] = default_password
 
-    arguments['use_sudo'] = ssh.get('use_sudo', False)
-    arguments['hide_output'] = ssh.get('hide_output', [])
-    arguments['fabric_env'] = {}
+    operation.arguments['use_sudo'] = Parameter.wrap('use_sudo', ssh.get('use_sudo', False),
+                                                     'Whether to execute with sudo.')
+
+    operation.arguments['hide_output'] = Parameter.wrap('hide_output', ssh.get('hide_output', []),
+                                                        'Hide output of these Fabric groups.')
+
+    fabric_env = {}
     if 'warn_only' in ssh:
-        arguments['fabric_env']['warn_only'] = ssh['warn_only']
-    arguments['fabric_env']['user'] = ssh.get('user')
-    arguments['fabric_env']['password'] = ssh.get('password')
-    arguments['fabric_env']['key'] = ssh.get('key')
-    arguments['fabric_env']['key_filename'] = ssh.get('key_filename')
+        fabric_env['warn_only'] = ssh['warn_only']
+    fabric_env['user'] = ssh.get('user')
+    fabric_env['password'] = ssh.get('password')
+    fabric_env['key'] = ssh.get('key')
+    fabric_env['key_filename'] = ssh.get('key_filename')
     if 'address' in ssh:
-        arguments['fabric_env']['host_string'] = ssh['address']
+        fabric_env['host_string'] = ssh['address']
 
-    if arguments['fabric_env'].get('user') is None:
+    # Make sure we have a user
+    if fabric_env.get('user') is None:
         context = ConsumptionContext.get_thread_local()
         context.validation.report('must configure "ssh.user" for "{0}"'
                                   .format(operation.implementation),
                                   level=validation.Issue.BETWEEN_TYPES)
-    if (arguments['fabric_env'].get('password') is None) and \
-        (arguments['fabric_env'].get('key') is None) and \
-        (arguments['fabric_env'].get('key_filename') is None):
+
+    # Make sure we have an authentication value
+    if (fabric_env.get('password') is None) and \
+        (fabric_env.get('key') is None) and \
+        (fabric_env.get('key_filename') is None):
         context = ConsumptionContext.get_thread_local()
         context.validation.report('must configure "ssh.password", "ssh.key", or "ssh.key_filename" '
                                   'for "{0}"'
                                   .format(operation.implementation),
                                   level=validation.Issue.BETWEEN_TYPES)
 
-    from . import operations
-    operation.implementation = '{0}.{1}'.format(operations.__name__,
-                                                operations.run_script_with_ssh.__name__)
+    operation.arguments['fabric_env'] = Parameter.wrap('fabric_env', fabric_env,
+                                                       'Fabric configuration.')
 
+    operation.function = '{0}.{1}'.format(operations.__name__,
+                                          operations.run_script_with_ssh.__name__)
 
-def _get_process(value):
+
+def _get_process(operation):
+    value = operation.configuration.get('process')._value \
+        if 'process' in operation.configuration else None
     if value is None:
-        return None
+        return {}
     _validate_type(value, dict, 'process')
     for k, v in value.iteritems():
         if k == 'eval_python':
-            value[k] = _str_to_bool(v, 'process.eval_python')
+            value[k] = _coerce_bool(v, 'process.eval_python')
         elif k == 'cwd':
             _validate_type(v, basestring, 'process.cwd')
         elif k == 'command_prefix':
             _validate_type(v, basestring, 'process.command_prefix')
         elif k == 'args':
-            value[k] = _dict_to_list(v, 'process.args')
+            value[k] = _dict_to_list_of_strings(v, 'process.args')
         elif k == 'env':
             _validate_type(v, dict, 'process.env')
         else:
@@ -126,17 +151,19 @@ def _get_process(value):
     return value
 
 
-def _get_ssh(value):
+def _get_ssh(operation):
+    value = operation.configuration.get('ssh')._value \
+        if 'process' in operation.configuration else None
     if value is None:
         return {}
     _validate_type(value, dict, 'ssh')
     for k, v in value.iteritems():
         if k == 'use_sudo':
-            value[k] = _str_to_bool(v, 'ssh.use_sudo')
+            value[k] = _coerce_bool(v, 'ssh.use_sudo')
         elif k == 'hide_output':
-            value[k] = _dict_to_list(v, 'ssh.hide_output')
+            value[k] = _dict_to_list_of_strings(v, 'ssh.hide_output')
         elif k == 'warn_only':
-            value[k] = _str_to_bool(v, 'ssh.warn_only')
+            value[k] = _coerce_bool(v, 'ssh.warn_only')
         elif k == 'user':
             _validate_type(v, basestring, 'ssh.user')
         elif k == 'password':
@@ -155,16 +182,20 @@ def _get_ssh(value):
 
 
 def _validate_type(value, the_type, name):
+    if isinstance(value, Function):
+        return
     if not isinstance(value, the_type):
         context = ConsumptionContext.get_thread_local()
-        context.validation.report('"{0}" configuration is not a {1}'
-                                  .format(name, full_type_name(the_type)),
+        context.validation.report('"{0}" configuration is not a {1}: {2}'
+                                  .format(name, full_type_name(the_type), safe_repr(value)),
                                   level=validation.Issue.BETWEEN_TYPES)
 
 
-def _str_to_bool(value, name):
+def _coerce_bool(value, name):
     if value is None:
         return None
+    if isinstance(value, bool):
+        return value
     _validate_type(value, basestring, name)
     if value == 'true':
         return True
@@ -173,19 +204,15 @@ def _str_to_bool(value, name):
     else:
         context = ConsumptionContext.get_thread_local()
         context.validation.report('"{0}" configuration is not "true" or "false": {1}'
-                                  .format(name, repr(value)),
+                                  .format(name, safe_repr(value)),
                                   level=validation.Issue.BETWEEN_TYPES)
 
 
-def _dict_to_list(the_dict, name):
+def _dict_to_list_of_strings(the_dict, name):
     _validate_type(the_dict, dict, name)
     value = []
     for k in sorted(the_dict):
         v = the_dict[k]
-        if not isinstance(v, basestring):
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('"{0}.{1}" configuration is not a string: {2}'
-                                      .format(name, k, repr(v)),
-                                      level=validation.Issue.BETWEEN_TYPES)
+        _validate_type(v, basestring, '{0}.{1}'.format(name, k))
         value.append(v)
     return value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 8f25cce..0c6321f 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -119,7 +119,7 @@ class WorkflowRunner(object):
         else:
             workflow_inputs = self.service.workflows[self._workflow_name].inputs
 
-        execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs)
+        execution.inputs = modeling_utils.create_parameters(inputs, workflow_inputs)
         # TODO: these two following calls should execute atomically
         self._validate_no_active_executions(execution)
         self._model_storage.execution.put(execution)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index cb79eb3..aa6ac45 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -99,8 +99,8 @@ class OperationTask(BaseTask):
 
         operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
         self.plugin = operation.plugin
-        self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs)
-        self.implementation = operation.implementation
+        self.inputs = modeling_utils.create_parameters(inputs or {}, operation.arguments)
+        self.implementation = operation.function
 
     def __repr__(self):
         return self.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index b3dfb3c..0d6eb11 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -148,7 +148,6 @@ class OperationTask(BaseTask):
             plugin=api_task.plugin,
             implementation=api_task.implementation,
             inputs=api_task.inputs
-
         )
         self._workflow_context.model.task.put(task_model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
index 0c5e77f..abac03b 100644
--- a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
@@ -52,18 +52,10 @@ policy_types:
       should be inherited and extended with additional properties.
     derived_from: tosca.policies.Root
     properties:
-      implementation:
+      function:
         description: >-
-          The interpretation of the implementation string depends on the orchestrator. In ARIA it is
-          the full path to a Python @workflow function that generates a task graph based on the
-          service topology.
+          The interpretation of the function string depends on the orchestrator. In ARIA it is the
+          full path to a Python @workflow function that generates a task graph based on the service
+          topology.
         type: string
         required: true
-      dependencies:
-        description: >-
-          The optional ordered list of one or more dependent or secondary implementation artifact
-          name which are referenced by the primary implementation artifact (e.g., a library the
-          script installs or a secondary script).
-        type: list
-        entry_schema: string
-        required: false

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/assignments.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/assignments.py b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
index d929ce0..79f6377 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/assignments.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
@@ -23,7 +23,7 @@ from aria.parser.presentation import (AsIsPresentation, has_fields, allow_unknow
 
 from .filters import NodeFilter
 from .misc import Description, OperationImplementation
-from .modeling.properties import get_assigned_and_defined_property_values
+from .modeling.parameters import get_assigned_and_defined_parameter_values
 from .presentation.extensible import ExtensiblePresentation
 from .presentation.field_validators import (node_template_or_type_validator,
                                             relationship_template_or_type_validator,
@@ -428,7 +428,7 @@ class ArtifactAssignment(ExtensiblePresentation):
 
     @cachedmethod
     def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
 
     @cachedmethod
     def _validate(self, context):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 99389e4..0b04fdc 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -26,14 +26,17 @@ import re
 from types import FunctionType
 from datetime import datetime
 
+from ruamel import yaml
+
 from aria.parser.validation import Issue
-from aria.utils.collections import StrictDict
+from aria.utils.collections import (StrictDict, OrderedDict)
 from aria.modeling.models import (Type, ServiceTemplate, NodeTemplate,
                                   RequirementTemplate, RelationshipTemplate, CapabilityTemplate,
                                   GroupTemplate, PolicyTemplate, SubstitutionTemplate,
                                   SubstitutionTemplateMapping, InterfaceTemplate, OperationTemplate,
                                   ArtifactTemplate, Metadata, Parameter, PluginSpecification)
 
+from .parameters import coerce_parameter_value
 from .constraints import (Equal, GreaterThan, GreaterOrEqual, LessThan, LessOrEqual, InRange,
                           ValidValues, Length, MinLength, MaxLength, Pattern)
 from ..data_types import coerce_value
@@ -375,7 +378,7 @@ def create_operation_template_model(context, service_template, operation):
     implementation = operation.implementation
     if implementation is not None:
         primary = implementation.primary
-        parse_implementation_string(context, service_template, operation, model, primary)
+        set_implementation(context, service_template, operation, model, primary)
         relationship_edge = operation._get_extensions(context).get('relationship_edge')
         if relationship_edge is not None:
             if relationship_edge == 'source':
@@ -384,18 +387,37 @@ def create_operation_template_model(context, service_template, operation):
                 model.relationship_edge = True
 
         dependencies = implementation.dependencies
+        configuration = OrderedDict()
         if dependencies:
             for dependency in dependencies:
                 key, value = split_prefix(dependency)
                 if key is not None:
-                    if model.configuration is None:
-                        model.configuration = {}
-                    set_nested(model.configuration, key.split('.'), value)
+                    # Parse as YAML
+                    try:
+                        value = yaml.load(value)
+                    except yaml.parser.MarkedYAMLError as e:
+                        context.validation.report(
+                            'YAML parser {0} in operation configuration: {1}'
+                            .format(e.problem, value),
+                            locator=implementation._locator,
+                            level=Issue.FIELD)
+                        continue
+
+                    # Coerce to intrinsic functions, if there are any
+                    value = coerce_parameter_value(context, implementation, None, value).value
+
+                    # Support dot-notation nesting
+                    set_nested(configuration, key.split('.'), value)
                 else:
                     if model.dependencies is None:
                         model.dependencies = []
                     model.dependencies.append(dependency)
 
+        # Convert configuration to Parameter models
+        for key, value in configuration.iteritems():
+            model.configuration[key] = Parameter.wrap(key, value,
+                                                      description='Operation configuration.')
+
     inputs = operation.inputs
     if inputs:
         for input_name, the_input in inputs.iteritems():
@@ -490,15 +512,15 @@ def create_workflow_operation_template_model(context, service_template, policy):
 
     properties = policy._get_property_values(context)
     for prop_name, prop in properties.iteritems():
-        if prop_name == 'implementation':
-            parse_implementation_string(context, service_template, policy, model, prop.value)
+        if prop_name == 'function':
+            model.function = prop.value
         elif prop_name == 'dependencies':
             model.dependencies = prop.value
         else:
-            model.inputs[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
-                                                type_name=prop.type,
-                                                value=prop.value,
-                                                description=prop.description)
+            model.configuration[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
+                                                       type_name=prop.type,
+                                                       value=prop.value,
+                                                       description=prop.description)
 
     return model
 
@@ -639,12 +661,12 @@ def create_constraint(context, node_filter, constraint_clause, property_name, ca
 
 def split_prefix(string):
     """
-    Splits the prefix on the first unescaped ">".
+    Splits the prefix on the first non-escaped ">".
     """
 
-    split = IMPLEMENTATION_PREFIX_REGEX.split(string, 2)
+    split = IMPLEMENTATION_PREFIX_REGEX.split(string, 1)
     if len(split) < 2:
-        return None, string
+        return None, None
     return split[0].strip(), split[1].lstrip()
 
 
@@ -671,13 +693,18 @@ def set_nested(the_dict, keys, value):
         set_nested(the_dict[key], keys, value)
 
 
-def parse_implementation_string(context, service_template, presentation, model, implementation):
-    plugin_name, model.implementation = split_prefix(implementation)
-    if plugin_name is not None:
-        model.plugin_specification = service_template.plugin_specifications.get(plugin_name)
+def set_implementation(context, service_template, presentation, model, primary):
+    prefix, postfix = split_prefix(primary)
+    if prefix:
+        # Special ARIA prefix
+        model.plugin_specification = service_template.plugin_specifications.get(prefix)
+        model.function = postfix
         if model.plugin_specification is None:
             context.validation.report(
                 'no policy for plugin "{0}" specified in operation implementation: {1}'
-                .format(plugin_name, implementation),
+                .format(prefix, primary),
                 locator=presentation._get_child_locator('properties', 'implementation'),
                 level=Issue.BETWEEN_TYPES)
+    else:
+        # Standard TOSCA artifact
+        model.implementation = primary

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
index 4f61ef5..dd9eeb4 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
@@ -15,11 +15,11 @@
 
 from aria.utils.collections import OrderedDict
 
+
 #
 # NodeType, NodeTemplate
 #
 
-
 def get_inherited_artifact_definitions(context, presentation, for_presentation=None):
 
     if hasattr(presentation, '_get_type'):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
index 6df7177..a90a9fc 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
@@ -16,8 +16,9 @@
 from aria.utils.collections import deepcopy_with_locators, OrderedDict
 from aria.parser.validation import Issue
 
-from .properties import (convert_property_definitions_to_values, merge_raw_property_definitions,
-                         get_assigned_and_defined_property_values)
+from .parameters import (convert_parameter_definitions_to_values, merge_raw_parameter_definitions,
+                         get_assigned_and_defined_parameter_values)
+
 
 #
 # CapabilityType
@@ -38,6 +39,7 @@ def get_inherited_valid_source_types(context, presentation):
 
     return valid_source_types
 
+
 #
 # NodeType
 #
@@ -92,6 +94,7 @@ def get_inherited_capability_definitions(context, presentation, for_presentation
 
     return capability_definitions
 
+
 #
 # NodeTemplate
 #
@@ -127,8 +130,9 @@ def get_template_capabilities(context, presentation):
                 capability_assignment = capability_assignments[capability_name]
 
                 # Assign properties
-                values = get_assigned_and_defined_property_values(context,
-                                                                  our_capability_assignment)
+                values = get_assigned_and_defined_parameter_values(context,
+                                                                   our_capability_assignment,
+                                                                   'property')
                 if values:
                     capability_assignment._raw['properties'] = values
             else:
@@ -139,6 +143,7 @@ def get_template_capabilities(context, presentation):
 
     return capability_assignments
 
+
 #
 # Utils
 #
@@ -150,24 +155,25 @@ def convert_capability_from_definition_to_assignment(context, presentation, cont
 
     properties = presentation.properties
     if properties is not None:
-        raw['properties'] = convert_property_definitions_to_values(context, properties)
+        raw['properties'] = convert_parameter_definitions_to_values(context, properties)
 
     # TODO attributes
 
     return CapabilityAssignment(name=presentation._name, raw=raw, container=container)
 
+
 def merge_capability_definition_from_type(context, presentation, capability_definition):
     raw_properties = OrderedDict()
 
     # Merge properties from type
     the_type = capability_definition._get_type(context)
     type_property_defintions = the_type._get_properties(context)
-    merge_raw_property_definitions(context, presentation, raw_properties, type_property_defintions,
-                                   'properties')
+    merge_raw_parameter_definitions(context, presentation, raw_properties, type_property_defintions,
+                                    'properties')
 
     # Merge our properties
-    merge_raw_property_definitions(context, presentation, raw_properties,
-                                   capability_definition.properties, 'properties')
+    merge_raw_parameter_definitions(context, presentation, raw_properties,
+                                    capability_definition.properties, 'properties')
 
     if raw_properties:
         capability_definition._raw['properties'] = raw_properties

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
index 7c99eab..9a30cc1 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
@@ -15,7 +15,7 @@
 
 import re
 
-from aria.modeling.contraints import NodeTemplateConstraint
+from aria.modeling.constraints import NodeTemplateConstraint
 from aria.modeling.utils import NodeTemplateContainerHolder
 from aria.modeling.functions import evaluate
 from aria.parser import implements_specification

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
index 3952785..c0d79e5 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
@@ -26,6 +26,7 @@ from aria.parser.validation import Issue
 from .functions import get_function
 from ..presentation.types import get_type_by_full_or_shorthand_name
 
+
 #
 # DataType
 #
@@ -50,6 +51,7 @@ def get_inherited_constraints(context, presentation):
 
     return constraints
 
+
 def coerce_data_type_value(context, presentation, data_type, entry_schema, constraints, value, # pylint: disable=unused-argument
                            aspect):
     """
@@ -121,6 +123,7 @@ def coerce_data_type_value(context, presentation, data_type, entry_schema, const
 
     return value
 
+
 def validate_data_type_name(context, presentation):
     """
     Makes sure the complex data type's name is not that of a built-in type.
@@ -132,6 +135,7 @@ def validate_data_type_name(context, presentation):
                                   % safe_repr(name),
                                   locator=presentation._locator, level=Issue.BETWEEN_TYPES)
 
+
 #
 # PropertyDefinition, AttributeDefinition, EntrySchema, DataType
 #
@@ -172,6 +176,7 @@ def get_data_type(context, presentation, field_name, allow_none=False):
     # Try primitive data type
     return get_primitive_data_type(type_name)
 
+
 #
 # PropertyDefinition, EntrySchema
 #
@@ -195,6 +200,7 @@ def get_property_constraints(context, presentation):
 
     return constraints
 
+
 #
 # ConstraintClause
 #
@@ -310,6 +316,7 @@ def apply_constraint_to_value(context, presentation, constraint_clause, value):
 
     return True
 
+
 #
 # Repository
 #
@@ -326,6 +333,7 @@ def get_data_type_value(context, presentation, field_name, type_name):
                                   locator=presentation._locator, level=Issue.BETWEEN_TYPES)
     return None
 
+
 #
 # Utils
 #
@@ -345,6 +353,7 @@ PRIMITIVE_DATA_TYPES = {
     'boolean': bool,
     'null': None.__class__}
 
+
 @implements_specification('3.2.1-3', 'tosca-simple-1.0')
 def get_primitive_data_type(type_name):
     """
@@ -358,6 +367,7 @@ def get_primitive_data_type(type_name):
 
     return PRIMITIVE_DATA_TYPES.get(type_name)
 
+
 def get_data_type_name(the_type):
     """
     Returns the name of the type, whether it's a DataType, a primitive type, or another class.
@@ -365,6 +375,7 @@ def get_data_type_name(the_type):
 
     return the_type._name if hasattr(the_type, '_name') else full_type_name(the_type)
 
+
 def coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect=None): # pylint: disable=too-many-return-statements
     """
     Returns the value after it's coerced to its type, reporting validation errors if it cannot be
@@ -410,6 +421,7 @@ def coerce_value(context, presentation, the_type, entry_schema, constraints, val
     # Coerce to primitive type
     return coerce_to_primitive(context, presentation, the_type, constraints, value, aspect)
 
+
 def coerce_to_primitive(context, presentation, primitive_type, constraints, value, aspect=None):
     """
     Returns the value after it's coerced to a primitive type, translating exceptions to validation
@@ -435,6 +447,7 @@ def coerce_to_primitive(context, presentation, primitive_type, constraints, valu
 
     return value
 
+
 def coerce_to_data_type_class(context, presentation, cls, entry_schema, constraints, value,
                               aspect=None):
     """
@@ -463,6 +476,7 @@ def coerce_to_data_type_class(context, presentation, cls, entry_schema, constrai
 
     return value
 
+
 def apply_constraints_to_value(context, presentation, constraints, value):
     """
     Applies all constraints to the value. If the value conforms, returns the value. If it does not
@@ -478,6 +492,7 @@ def apply_constraints_to_value(context, presentation, constraints, value):
             value = None
     return value
 
+
 def get_container_data_type(presentation):
     if presentation is None:
         return None
@@ -485,6 +500,7 @@ def get_container_data_type(presentation):
         return presentation
     return get_container_data_type(presentation._container)
 
+
 def report_issue_for_bad_format(context, presentation, the_type, value, aspect, e):
     if aspect == 'default':
         aspect = '"default" value'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
index 3e6aa6f..e04ac4a 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
@@ -13,11 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from aria.utils.collections import merge, deepcopy_with_locators, OrderedDict
+from aria.utils.collections import (merge, deepcopy_with_locators, OrderedDict)
 from aria.parser.presentation import get_locator
 from aria.parser.validation import Issue
 
-from .properties import (coerce_property_value, convert_property_definitions_to_values)
+from .parameters import (coerce_parameter_value, convert_parameter_definitions_to_values)
+
 
 #
 # InterfaceType
@@ -45,6 +46,7 @@ def get_inherited_operations(context, presentation):
 
     return operations
 
+
 #
 # InterfaceDefinition
 #
@@ -73,6 +75,7 @@ def get_and_override_input_definitions_from_type(context, presentation):
 
     return inputs
 
+
 def get_and_override_operation_definitions_from_type(context, presentation):
     """
     Returns our operation definitions added on top of those of the interface type, if specified.
@@ -96,6 +99,7 @@ def get_and_override_operation_definitions_from_type(context, presentation):
 
     return operations
 
+
 #
 # NodeType, RelationshipType, GroupType
 #
@@ -124,6 +128,7 @@ def get_inherited_interface_definitions(context, presentation, type_name, for_pr
 
     return interfaces
 
+
 #
 # NodeTemplate, RelationshipTemplate, GroupTemplate
 #
@@ -186,6 +191,7 @@ def get_template_interfaces(context, presentation, type_name):
 
     return template_interfaces
 
+
 #
 # Utils
 #
@@ -200,13 +206,14 @@ def convert_interface_definition_from_type_to_template(context, presentation, co
     raw = convert_interface_definition_from_type_to_raw_template(context, presentation)
     return InterfaceAssignment(name=presentation._name, raw=raw, container=container)
 
+
 def convert_interface_definition_from_type_to_raw_template(context, presentation): # pylint: disable=invalid-name
     raw = OrderedDict()
 
     # Copy default values for inputs
     inputs = presentation._get_inputs(context)
     if inputs is not None:
-        raw['inputs'] = convert_property_definitions_to_values(context, inputs)
+        raw['inputs'] = convert_parameter_definitions_to_values(context, inputs)
 
     # Copy operations
     operations = presentation._get_operations(context)
@@ -221,11 +228,12 @@ def convert_interface_definition_from_type_to_raw_template(context, presentation
                 raw[operation_name]['implementation'] = deepcopy_with_locators(implementation._raw)
             inputs = operation.inputs
             if inputs is not None:
-                raw[operation_name]['inputs'] = convert_property_definitions_to_values(context,
-                                                                                       inputs)
+                raw[operation_name]['inputs'] = convert_parameter_definitions_to_values(context,
+                                                                                        inputs)
 
     return raw
 
+
 def convert_requirement_interface_definitions_from_type_to_raw_template(context, raw_requirement, # pylint: disable=invalid-name
                                                                         interface_definitions):
     if not interface_definitions:
@@ -240,6 +248,7 @@ def convert_requirement_interface_definitions_from_type_to_raw_template(context,
         else:
             raw_requirement['interfaces'][interface_name] = raw_interface
 
+
 def merge_interface(context, presentation, interface_assignment, our_interface_assignment,
                     interface_definition, interface_name):
     # Assign/merge interface inputs
@@ -282,6 +291,7 @@ def merge_interface(context, presentation, interface_assignment, our_interface_a
                               our_input_assignments, input_definitions, interface_name,
                               operation_name, presentation)
 
+
 def merge_raw_input_definition(context, the_raw_input, our_input, interface_name, operation_name,
                                presentation, type_name):
     # Check if we changed the type
@@ -305,6 +315,7 @@ def merge_raw_input_definition(context, the_raw_input, our_input, interface_name
     # Merge
     merge(the_raw_input, our_input._raw)
 
+
 def merge_input_definitions(context, inputs, our_inputs, interface_name, operation_name,
                             presentation, type_name):
     for input_name, our_input in our_inputs.iteritems():
@@ -314,6 +325,7 @@ def merge_input_definitions(context, inputs, our_inputs, interface_name, operati
         else:
             inputs[input_name] = our_input._clone(presentation)
 
+
 def merge_raw_input_definitions(context, raw_inputs, our_inputs, interface_name, operation_name,
                                 presentation, type_name):
     for input_name, our_input in our_inputs.iteritems():
@@ -323,6 +335,7 @@ def merge_raw_input_definitions(context, raw_inputs, our_inputs, interface_name,
         else:
             raw_inputs[input_name] = deepcopy_with_locators(our_input._raw)
 
+
 def merge_raw_operation_definition(context, raw_operation, our_operation, interface_name,
                                    presentation, type_name):
     if not isinstance(our_operation._raw, dict):
@@ -353,6 +366,7 @@ def merge_raw_operation_definition(context, raw_operation, our_operation, interf
             raw_operation['implementation'] = \
                 deepcopy_with_locators(our_operation._raw['implementation'])
 
+
 def merge_operation_definitions(context, operations, our_operations, interface_name, presentation,
                                 type_name):
     if not our_operations:
@@ -364,6 +378,7 @@ def merge_operation_definitions(context, operations, our_operations, interface_n
         else:
             operations[operation_name] = our_operation._clone(presentation)
 
+
 def merge_raw_operation_definitions(context, raw_operations, our_operations, interface_name,
                                     presentation, type_name):
     for operation_name, our_operation in our_operations.iteritems():
@@ -378,6 +393,7 @@ def merge_raw_operation_definitions(context, raw_operations, our_operations, int
         else:
             raw_operations[operation_name] = deepcopy_with_locators(our_operation._raw)
 
+
 # From either an InterfaceType or an InterfaceDefinition:
 def merge_interface_definition(context, interface, our_source, presentation, type_name):
     if hasattr(our_source, 'type'):
@@ -408,6 +424,7 @@ def merge_interface_definition(context, interface, our_source, presentation, typ
         merge_raw_operation_definitions(context, interface._raw, our_operations, our_source._name,
                                         presentation, type_name)
 
+
 def merge_interface_definitions(context, interfaces, our_interfaces, presentation,
                                 for_presentation=None):
     if not our_interfaces:
@@ -419,12 +436,14 @@ def merge_interface_definitions(context, interfaces, our_interfaces, presentatio
         else:
             interfaces[name] = our_interface._clone(for_presentation)
 
+
 def merge_interface_definitions_from_their_types(context, interfaces, presentation):
     for interface in interfaces.itervalues():
         the_type = interface._get_type(context) # InterfaceType
         if the_type is not None:
             merge_interface_definition(context, interface, the_type, presentation, 'type')
 
+
 def assign_raw_inputs(context, values, assignments, definitions, interface_name, operation_name,
                       presentation):
     if not assignments:
@@ -454,8 +473,9 @@ def assign_raw_inputs(context, values, assignments, definitions, interface_name,
         # Note: default value has already been assigned
 
         # Coerce value
-        values['inputs'][input_name] = coerce_property_value(context, assignment, definition,
-                                                             assignment.value)
+        values['inputs'][input_name] = coerce_parameter_value(context, assignment, definition,
+                                                              assignment.value)
+
 
 def validate_required_inputs(context, presentation, assignment, definition, original_assignment,
                              interface_name, operation_name=None):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
new file mode 100644
index 0000000..c910956
--- /dev/null
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
@@ -0,0 +1,211 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from aria.utils.collections import (merge, deepcopy_with_locators, OrderedDict)
+from aria.utils.formatting import pluralize
+from aria.parser.presentation import Value
+from aria.parser.validation import Issue
+
+from .data_types import coerce_value
+
+
+#
+# ArtifactType, DataType, CapabilityType, RelationshipType, NodeType, GroupType, PolicyType
+#
+
+def get_inherited_parameter_definitions(context, presentation, field_name, for_presentation=None):
+    """
+    Returns our parameter definitions added on top of those of our parent, if we have one
+    (recursively).
+
+    Allows overriding all aspects of parent properties except data type.
+    """
+
+    # Get definitions from parent
+    # If we inherit from a primitive, it does not have a parent:
+    parent = presentation._get_parent(context) if hasattr(presentation, '_get_parent') else None
+    definitions = get_inherited_parameter_definitions(context, parent, field_name,
+                                                      for_presentation=presentation) \
+                                                      if parent is not None else OrderedDict()
+
+    # Add/merge our definitions
+    # If we inherit from a primitive, it does not have our field
+    our_definitions = getattr(presentation, field_name, None)
+    if our_definitions:
+        our_definitions_clone = OrderedDict()
+        for name, our_definition in our_definitions.iteritems():
+            our_definitions_clone[name] = our_definition._clone(for_presentation)
+        our_definitions = our_definitions_clone
+        merge_parameter_definitions(context, presentation, definitions, our_definitions, field_name)
+
+    for definition in definitions.itervalues():
+        definition._reset_method_cache()
+
+    return definitions
+
+
+#
+# NodeTemplate, RelationshipTemplate, GroupTemplate, PolicyTemplate
+#
+
+def get_assigned_and_defined_parameter_values(context, presentation, field_name):
+    """
+    Returns the assigned property values while making sure they are defined in our type.
+
+    The property definition's default value, if available, will be used if we did not assign it.
+
+    Makes sure that required properties indeed end up with a value.
+    """
+
+    values = OrderedDict()
+
+    the_type = presentation._get_type(context)
+    field_name_plural = pluralize(field_name)
+    assignments = getattr(presentation, field_name_plural)
+    get_fn_name = '_get_{0}'.format(field_name_plural)
+    definitions = getattr(the_type, get_fn_name)(context) if the_type is not None else None
+
+    # Fill in our assignments, but make sure they are defined
+    if assignments:
+        for name, value in assignments.iteritems():
+            if (definitions is not None) and (name in definitions):
+                definition = definitions[name]
+                values[name] = coerce_parameter_value(context, value, definition, value.value)
+            else:
+                context.validation.report('assignment to undefined {0} "{1}" in "{2}"'
+                                          .format(field_name, name, presentation._fullname),
+                                          locator=value._locator, level=Issue.BETWEEN_TYPES)
+
+    # Fill in defaults from the definitions
+    if definitions:
+        for name, definition in definitions.iteritems():
+            if values.get(name) is None:
+                values[name] = coerce_parameter_value(context, presentation, definition,
+                                                      definition.default)
+
+    validate_required_values(context, presentation, values, definitions)
+
+    return values
+
+
+#
+# TopologyTemplate
+#
+
+def get_parameter_values(context, presentation, field_name):
+    values = OrderedDict()
+
+    parameters = getattr(presentation, field_name)
+
+    # Fill in defaults and values
+    if parameters:
+        for name, parameter in parameters.iteritems():
+            if values.get(name) is None:
+                if hasattr(parameter, 'value') and (parameter.value is not None):
+                    # For parameters only:
+                    values[name] = coerce_parameter_value(context, presentation, parameter,
+                                                          parameter.value)
+                else:
+                    default = parameter.default if hasattr(parameter, 'default') else None
+                    values[name] = coerce_parameter_value(context, presentation, parameter, default)
+
+    return values
+
+
+#
+# Utils
+#
+
+def validate_required_values(context, presentation, values, definitions):
+    """
+    Check if required properties have not been assigned.
+    """
+
+    if not definitions:
+        return
+    for name, definition in definitions.iteritems():
+        if getattr(definition, 'required', False) \
+            and ((values is None) or (values.get(name) is None)):
+            context.validation.report('required property "%s" is not assigned a value in "%s"'
+                                      % (name, presentation._fullname),
+                                      locator=presentation._get_child_locator('properties'),
+                                      level=Issue.BETWEEN_TYPES)
+
+
+def merge_raw_parameter_definition(context, presentation, raw_property_definition,
+                                   our_property_definition, field_name, property_name):
+    # Check if we changed the type
+    # TODO: allow a sub-type?
+    type1 = raw_property_definition.get('type')
+    type2 = our_property_definition.type
+    if type1 != type2:
+        context.validation.report(
+            'override changes type from "%s" to "%s" for property "%s" in "%s"'
+            % (type1, type2, property_name, presentation._fullname),
+            locator=presentation._get_child_locator(field_name, property_name),
+            level=Issue.BETWEEN_TYPES)
+
+    merge(raw_property_definition, our_property_definition._raw)
+
+
+def merge_raw_parameter_definitions(context, presentation, raw_property_definitions,
+                                    our_property_definitions, field_name):
+    if not our_property_definitions:
+        return
+    for property_name, our_property_definition in our_property_definitions.iteritems():
+        if property_name in raw_property_definitions:
+            raw_property_definition = raw_property_definitions[property_name]
+            merge_raw_parameter_definition(context, presentation, raw_property_definition,
+                                           our_property_definition, field_name, property_name)
+        else:
+            raw_property_definitions[property_name] = \
+                deepcopy_with_locators(our_property_definition._raw)
+
+
+def merge_parameter_definitions(context, presentation, property_definitions,
+                                our_property_definitions, field_name):
+    if not our_property_definitions:
+        return
+    for property_name, our_property_definition in our_property_definitions.iteritems():
+        if property_name in property_definitions:
+            property_definition = property_definitions[property_name]
+            merge_raw_parameter_definition(context, presentation, property_definition._raw,
+                                           our_property_definition, field_name, property_name)
+        else:
+            property_definitions[property_name] = our_property_definition
+
+
+# Works on properties, inputs, and parameters
+def coerce_parameter_value(context, presentation, definition, value, aspect=None):
+    the_type = definition._get_type(context) if definition is not None else None
+    entry_schema = definition.entry_schema if definition is not None else None
+    constraints = definition._get_constraints(context) \
+        if ((definition is not None) and hasattr(definition, '_get_constraints')) else None
+    value = coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect)
+    if (the_type is not None) and hasattr(the_type, '_name'):
+        type_name = the_type._name
+    else:
+        type_name = getattr(definition, 'type', None)
+    description = getattr(definition, 'description', None)
+    description = description.value if description is not None else None
+    return Value(type_name, value, description)
+
+
+def convert_parameter_definitions_to_values(context, definitions):
+    values = OrderedDict()
+    for name, definition in definitions.iteritems():
+        default = definition.default
+        values[name] = coerce_parameter_value(context, definition, definition, default)
+    return values

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
index fba1972..7dd803b 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
@@ -15,6 +15,7 @@
 
 from ..presentation.types import convert_shorthand_to_full_type_name
 
+
 #
 # PolicyType
 #
@@ -49,6 +50,7 @@ def get_inherited_targets(context, presentation):
 
     return node_types, group_types
 
+
 #
 # PolicyTemplate
 #



[5/7] incubator-ariatosca git commit: ARIA-149 Enhance operation configuration

Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py
deleted file mode 100644
index 9c3ea42..0000000
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import merge, deepcopy_with_locators, OrderedDict
-from aria.parser.presentation import Value
-from aria.parser.validation import Issue
-
-from .data_types import coerce_value
-
-#
-# ArtifactType, DataType, CapabilityType, RelationshipType, NodeType, GroupType, PolicyType
-#
-
-# Works on properties, parameters, inputs, and attributes
-def get_inherited_property_definitions(context, presentation, field_name, for_presentation=None):
-    """
-    Returns our property definitions added on top of those of our parent, if we have one
-    (recursively).
-
-    Allows overriding all aspects of parent properties except data type.
-    """
-
-    # Get definitions from parent
-    # If we inherit from a primitive, it does not have a parent:
-    parent = presentation._get_parent(context) if hasattr(presentation, '_get_parent') else None
-    definitions = get_inherited_property_definitions(context, parent, field_name,
-                                                     for_presentation=presentation) \
-                                                     if parent is not None else OrderedDict()
-
-    # Add/merge our definitions
-    # If we inherit from a primitive, it does not have our field
-    our_definitions = getattr(presentation, field_name, None)
-    if our_definitions:
-        our_definitions_clone = OrderedDict()
-        for name, our_definition in our_definitions.iteritems():
-            our_definitions_clone[name] = our_definition._clone(for_presentation)
-        our_definitions = our_definitions_clone
-        merge_property_definitions(context, presentation, definitions, our_definitions, field_name)
-
-    for definition in definitions.itervalues():
-        definition._reset_method_cache()
-
-    return definitions
-
-#
-# NodeTemplate, RelationshipTemplate, GroupTemplate, PolicyTemplate
-#
-
-def get_assigned_and_defined_property_values(context, presentation, field_name='property',
-                                             field_name_plural='properties'):
-    """
-    Returns the assigned property values while making sure they are defined in our type.
-
-    The property definition's default value, if available, will be used if we did not assign it.
-
-    Makes sure that required properties indeed end up with a value.
-    """
-
-    values = OrderedDict()
-
-    the_type = presentation._get_type(context)
-    assignments = getattr(presentation, field_name_plural)
-    get_fn_name = '_get_{0}'.format(field_name_plural)
-    definitions = getattr(the_type, get_fn_name)(context) if the_type is not None else None
-
-    # Fill in our assignments, but make sure they are defined
-    if assignments:
-        for name, value in assignments.iteritems():
-            if (definitions is not None) and (name in definitions):
-                definition = definitions[name]
-                values[name] = coerce_property_value(context, value, definition, value.value)
-            else:
-                context.validation.report('assignment to undefined {0} "{1}" in "{2}"'
-                                          .format(field_name, name, presentation._fullname),
-                                          locator=value._locator, level=Issue.BETWEEN_TYPES)
-
-    # Fill in defaults from the definitions
-    if definitions:
-        for name, definition in definitions.iteritems():
-            if values.get(name) is None:
-                values[name] = coerce_property_value(context, presentation, definition,
-                                                     definition.default)
-
-    validate_required_values(context, presentation, values, definitions)
-
-    return values
-
-#
-# TopologyTemplate
-#
-
-def get_parameter_values(context, presentation, field_name):
-    values = OrderedDict()
-
-    parameters = getattr(presentation, field_name)
-
-    # Fill in defaults and values
-    if parameters:
-        for name, parameter in parameters.iteritems():
-            if values.get(name) is None:
-                if hasattr(parameter, 'value') and (parameter.value is not None):
-                    # For parameters only:
-                    values[name] = coerce_property_value(context, presentation, parameter,
-                                                         parameter.value)
-                else:
-                    default = parameter.default if hasattr(parameter, 'default') else None
-                    values[name] = coerce_property_value(context, presentation, parameter, default)
-
-    return values
-
-#
-# Utils
-#
-
-def validate_required_values(context, presentation, values, definitions):
-    """
-    Check if required properties have not been assigned.
-    """
-
-    if not definitions:
-        return
-    for name, definition in definitions.iteritems():
-        if getattr(definition, 'required', False) \
-            and ((values is None) or (values.get(name) is None)):
-            context.validation.report('required property "%s" is not assigned a value in "%s"'
-                                      % (name, presentation._fullname),
-                                      locator=presentation._get_child_locator('properties'),
-                                      level=Issue.BETWEEN_TYPES)
-
-def merge_raw_property_definition(context, presentation, raw_property_definition,
-                                  our_property_definition, field_name, property_name):
-    # Check if we changed the type
-    # TODO: allow a sub-type?
-    type1 = raw_property_definition.get('type')
-    type2 = our_property_definition.type
-    if type1 != type2:
-        context.validation.report(
-            'override changes type from "%s" to "%s" for property "%s" in "%s"'
-            % (type1, type2, property_name, presentation._fullname),
-            locator=presentation._get_child_locator(field_name, property_name),
-            level=Issue.BETWEEN_TYPES)
-
-    merge(raw_property_definition, our_property_definition._raw)
-
-def merge_raw_property_definitions(context, presentation, raw_property_definitions,
-                                   our_property_definitions, field_name):
-    if not our_property_definitions:
-        return
-    for property_name, our_property_definition in our_property_definitions.iteritems():
-        if property_name in raw_property_definitions:
-            raw_property_definition = raw_property_definitions[property_name]
-            merge_raw_property_definition(context, presentation, raw_property_definition,
-                                          our_property_definition, field_name, property_name)
-        else:
-            raw_property_definitions[property_name] = \
-                deepcopy_with_locators(our_property_definition._raw)
-
-def merge_property_definitions(context, presentation, property_definitions,
-                               our_property_definitions, field_name):
-    if not our_property_definitions:
-        return
-    for property_name, our_property_definition in our_property_definitions.iteritems():
-        if property_name in property_definitions:
-            property_definition = property_definitions[property_name]
-            merge_raw_property_definition(context, presentation, property_definition._raw,
-                                          our_property_definition, field_name, property_name)
-        else:
-            property_definitions[property_name] = our_property_definition
-
-# Works on properties, inputs, and parameters
-def coerce_property_value(context, presentation, definition, value, aspect=None):
-    the_type = definition._get_type(context) if definition is not None else None
-    entry_schema = definition.entry_schema if definition is not None else None
-    constraints = definition._get_constraints(context) \
-        if ((definition is not None) and hasattr(definition, '_get_constraints')) else None
-    value = coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect)
-    if (the_type is not None) and hasattr(the_type, '_name'):
-        type_name = the_type._name
-    else:
-        type_name = getattr(definition, 'type', None)
-    description = getattr(definition, 'description', None)
-    description = description.value if description is not None else None
-    return Value(type_name, value, description)
-
-def convert_property_definitions_to_values(context, definitions):
-    values = OrderedDict()
-    for name, definition in definitions.iteritems():
-        default = definition.default
-        values[name] = coerce_property_value(context, definition, definition, default)
-    return values

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
index 2a68da2..6bdb5b1 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
@@ -14,13 +14,14 @@
 # limitations under the License.
 
 from aria.parser.validation import Issue
-from aria.utils.collections import deepcopy_with_locators, OrderedDict
+from aria.utils.collections import (deepcopy_with_locators, OrderedDict)
 
-from .properties import (convert_property_definitions_to_values, validate_required_values,
-                         coerce_property_value)
+from .parameters import (convert_parameter_definitions_to_values, validate_required_values,
+                         coerce_parameter_value)
 from .interfaces import (convert_requirement_interface_definitions_from_type_to_raw_template,
                          merge_interface_definitions, merge_interface, validate_required_inputs)
 
+
 #
 # NodeType
 #
@@ -49,6 +50,7 @@ def get_inherited_requirement_definitions(context, presentation):
 
     return requirement_definitions
 
+
 #
 # NodeTemplate
 #
@@ -127,6 +129,7 @@ def get_template_requirements(context, presentation):
 
     return requirement_assignments
 
+
 #
 # Utils
 #
@@ -195,8 +198,8 @@ def convert_requirement_from_definition_to_assignment(context, requirement_defin
             if relationship_property_definitions:
                 # Convert property definitions to values
                 raw['relationship']['properties'] = \
-                    convert_property_definitions_to_values(context,
-                                                           relationship_property_definitions)
+                    convert_parameter_definitions_to_values(context,
+                                                            relationship_property_definitions)
 
         # These are our interface definitions
         # InterfaceDefinition:
@@ -229,6 +232,7 @@ def convert_requirement_from_definition_to_assignment(context, requirement_defin
         relationship_property_definitions, \
         relationship_interface_definitions
 
+
 def add_requirement_assignments(context, presentation, requirement_assignments,
                                 requirement_definitions, our_requirement_assignments):
     for requirement_name, our_requirement_assignment in our_requirement_assignments:
@@ -258,6 +262,7 @@ def add_requirement_assignments(context, presentation, requirement_assignments,
                                       locator=our_requirement_assignment._locator,
                                       level=Issue.BETWEEN_TYPES)
 
+
 def merge_requirement_assignment(context, relationship_property_definitions,
                                  relationship_interface_definitions, requirement, our_requirement):
     our_capability = our_requirement.capability
@@ -283,6 +288,7 @@ def merge_requirement_assignment(context, relationship_property_definitions,
                                                   relationship_interface_definitions,
                                                   requirement, our_relationship)
 
+
 def merge_requirement_assignment_relationship(context, presentation, property_definitions,
                                               interface_definitions, requirement, our_relationship):
     our_relationship_properties = our_relationship._raw.get('properties')
@@ -296,7 +302,7 @@ def merge_requirement_assignment_relationship(context, presentation, property_de
             if property_name in property_definitions:
                 definition = property_definitions[property_name]
                 requirement._raw['relationship']['properties'][property_name] = \
-                    coerce_property_value(context, presentation, definition, prop)
+                    coerce_parameter_value(context, presentation, definition, prop)
             else:
                 context.validation.report(
                     'relationship property "%s" not declared at definition of requirement "%s"'
@@ -330,6 +336,7 @@ def merge_requirement_assignment_relationship(context, presentation, property_de
                        presentation._container._container._fullname),
                     locator=our_relationship._locator, level=Issue.BETWEEN_TYPES)
 
+
 def validate_requirement_assignment(context, presentation, requirement_assignment,
                                     relationship_property_definitions,
                                     relationship_interface_definitions):
@@ -348,6 +355,7 @@ def validate_requirement_assignment(context, presentation, requirement_assignmen
             validate_required_inputs(context, presentation, interface_assignment,
                                      relationship_interface_definition, None, interface_name)
 
+
 def get_first_requirement(requirement_definitions, name):
     if requirement_definitions is not None:
         for requirement_name, requirement_definition in requirement_definitions:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
index c1e21de..8f7ec4c 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
@@ -16,6 +16,7 @@
 from aria.utils.formatting import safe_repr
 from aria.parser.validation import Issue
 
+
 def validate_subtitution_mappings_requirement(context, presentation):
     if not validate_format(context, presentation, 'requirement'):
         return
@@ -57,6 +58,7 @@ def validate_subtitution_mappings_requirement(context, presentation):
             locator=presentation._locator, level=Issue.BETWEEN_TYPES)
         return
 
+
 def validate_subtitution_mappings_capability(context, presentation):
     if not validate_format(context, presentation, 'capability'):
         return
@@ -99,6 +101,7 @@ def validate_subtitution_mappings_capability(context, presentation):
             % (capability_type._name, presentation._name, type_capability_type._name),
             locator=presentation._locator, level=Issue.BETWEEN_TYPES)
 
+
 #
 # Utils
 #
@@ -114,6 +117,7 @@ def validate_format(context, presentation, name):
         return False
     return True
 
+
 def get_node_template(context, presentation, name):
     node_template_name = presentation._raw[0]
     node_template = context.presentation.get_from_dict('service_template', 'topology_template',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/templates.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/templates.py b/extensions/aria_extension_tosca/simple_v1_0/templates.py
index ce6b5d9..123a00e 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/templates.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/templates.py
@@ -26,7 +26,7 @@ from .assignments import (PropertyAssignment, AttributeAssignment, RequirementAs
 from .definitions import ParameterDefinition
 from .filters import NodeFilter
 from .misc import (Description, MetaData, Repository, Import, SubstitutionMappings)
-from .modeling.properties import (get_assigned_and_defined_property_values, get_parameter_values)
+from .modeling.parameters import (get_assigned_and_defined_parameter_values, get_parameter_values)
 from .modeling.interfaces import get_template_interfaces
 from .modeling.requirements import get_template_requirements
 from .modeling.capabilities import get_template_capabilities
@@ -157,12 +157,11 @@ class NodeTemplate(ExtensiblePresentation):
 
     @cachedmethod
     def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
 
     @cachedmethod
     def _get_attribute_default_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self,
-                                                                   'attribute', 'attributes'))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'attribute'))
 
     @cachedmethod
     def _get_requirements(self, context):
@@ -281,7 +280,7 @@ class RelationshipTemplate(ExtensiblePresentation):
 
     @cachedmethod
     def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
 
     @cachedmethod
     def _get_interfaces(self, context):
@@ -363,7 +362,7 @@ class GroupTemplate(ExtensiblePresentation):
 
     @cachedmethod
     def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
 
     @cachedmethod
     def _get_interfaces(self, context):
@@ -427,7 +426,7 @@ class PolicyTemplate(ExtensiblePresentation):
 
     @cachedmethod
     def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
 
     @cachedmethod
     def _get_targets(self, context):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/extensions/aria_extension_tosca/simple_v1_0/types.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/types.py b/extensions/aria_extension_tosca/simple_v1_0/types.py
index bc80eb9..d97b89c 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/types.py
@@ -33,9 +33,9 @@ from .modeling.capabilities import (get_inherited_valid_source_types,
                                     get_inherited_capability_definitions)
 from .modeling.data_types import (get_data_type, get_inherited_constraints, coerce_data_type_value,
                                   validate_data_type_name)
-from .modeling.interfaces import get_inherited_interface_definitions, get_inherited_operations
+from .modeling.interfaces import (get_inherited_interface_definitions, get_inherited_operations)
 from .modeling.policies import get_inherited_targets
-from .modeling.properties import get_inherited_property_definitions
+from .modeling.parameters import get_inherited_parameter_definitions
 from .modeling.requirements import get_inherited_requirement_definitions
 from .presentation.extensible import ExtensiblePresentation
 from .presentation.field_getters import data_type_class_getter
@@ -115,7 +115,7 @@ class ArtifactType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     def _validate(self, context):
         super(ArtifactType, self)._validate(context)
@@ -201,7 +201,7 @@ class DataType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     @cachedmethod
     def _get_constraints(self, context):
@@ -307,7 +307,7 @@ class CapabilityType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     @cachedmethod
     def _get_valid_source_types(self, context):
@@ -385,7 +385,7 @@ class InterfaceType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_inputs(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'inputs'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'inputs'))
 
     @cachedmethod
     def _get_operations(self, context):
@@ -493,11 +493,11 @@ class RelationshipType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     @cachedmethod
     def _get_attributes(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'attributes'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'attributes'))
 
     @cachedmethod
     def _get_interfaces(self, context):
@@ -624,11 +624,11 @@ class NodeType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     @cachedmethod
     def _get_attributes(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'attributes'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'attributes'))
 
     @cachedmethod
     def _get_requirements(self, context):
@@ -760,7 +760,7 @@ class GroupType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     @cachedmethod
     def _get_interfaces(self, context):
@@ -848,7 +848,7 @@ class PolicyType(ExtensiblePresentation):
 
     @cachedmethod
     def _get_properties(self, context):
-        return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+        return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
 
     @cachedmethod
     def _get_targets(self, context):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/cli/test_services.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_services.py b/tests/cli/test_services.py
index b1a6ee4..e5717cc 100644
--- a/tests/cli/test_services.py
+++ b/tests/cli/test_services.py
@@ -19,7 +19,7 @@ import mock
 from aria.cli.env import _Environment
 from aria.core import Core
 from aria.exceptions import DependentActiveExecutionsError, DependentAvailableNodesError
-from aria.modeling.exceptions import InputsException
+from aria.modeling.exceptions import ParameterException
 from aria.storage import exceptions as storage_exceptions
 
 from .base_test import (  # pylint: disable=unused-import
@@ -120,11 +120,11 @@ class TestServicesCreate(TestCliBase):
         monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
         monkeypatch.setattr(Core,
                             'create_service',
-                            raise_exception(InputsException))
+                            raise_exception(ParameterException))
 
         assert_exception_raised(
             self.invoke('services create -t with_inputs test_s'),
-            expected_exception=InputsException)
+            expected_exception=ParameterException)
 
         assert "Service created. The service's name is test_s" not in self.logger_output_string
 
@@ -152,8 +152,8 @@ class TestServicesDelete(TestCliBase):
         assert_exception_raised(
             self.invoke('services delete test_s'),
             expected_exception=DependentActiveExecutionsError,
-            expected_msg="Can't delete service {name} - there is an active execution "
-                         "for this service. Active execution id: 1".format(
+            expected_msg="Can't delete service `{name}` - there is an active execution "
+                         "for this service. Active execution ID: 1".format(
                              name=mock_models.SERVICE_NAME))
 
     def test_delete_available_nodes_error(self, monkeypatch, mock_storage):
@@ -161,8 +161,8 @@ class TestServicesDelete(TestCliBase):
         assert_exception_raised(
             self.invoke('services delete test_s'),
             expected_exception=DependentAvailableNodesError,
-            expected_msg="Can't delete service {name} - there are available nodes "
-                         "for this service. Available node ids: 1".format(
+            expected_msg="Can't delete service `{name}` - there are available nodes "
+                         "for this service. Available node IDs: 1".format(
                              name=mock_models.SERVICE_NAME))
 
     def test_delete_available_nodes_error_with_force(self, monkeypatch, mock_storage):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 98703d5..50aa340 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -225,11 +225,11 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N
                      interface_kwargs=None):
     the_type = service.service_template.interface_types.get_descendant('test_interface_type')
 
-    if operation_kwargs and operation_kwargs.get('inputs'):
-        operation_kwargs['inputs'] = dict(
-            (input_name, models.Parameter.wrap(input_name, input_value))
-            for input_name, input_value in operation_kwargs['inputs'].iteritems()
-            if input_value is not None)
+    if operation_kwargs and operation_kwargs.get('arguments'):
+        operation_kwargs['arguments'] = dict(
+            (argument_name, models.Parameter.wrap(argument_name, argument_value))
+            for argument_name, argument_value in operation_kwargs['arguments'].iteritems()
+            if argument_value is not None)
 
     operation = models.Operation(
         name=operation_name,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/mock/topology.py
----------------------------------------------------------------------
diff --git a/tests/mock/topology.py b/tests/mock/topology.py
index bfb7b4e..ab08dbd 100644
--- a/tests/mock/topology.py
+++ b/tests/mock/topology.py
@@ -27,9 +27,9 @@ def create_simple_topology_single_node(model_storage, create_operation):
         service_template,
         'Standard', 'create',
         operation_kwargs=dict(
-            implementation=create_operation,
-            inputs={'key': aria_models.Parameter.wrap('key', 'create'),
-                    'value': aria_models.Parameter.wrap('value', True)})
+            function=create_operation,
+            arguments={'key': aria_models.Parameter.wrap('key', 'create'),
+                       'value': aria_models.Parameter.wrap('value', True)})
     )
     node_template.interface_templates[interface_template.name] = interface_template                 # pylint: disable=unsubscriptable-object
 
@@ -38,9 +38,9 @@ def create_simple_topology_single_node(model_storage, create_operation):
         service,
         'Standard', 'create',
         operation_kwargs=dict(
-            implementation=create_operation,
-            inputs={'key': aria_models.Parameter.wrap('key', 'create'),
-                    'value': aria_models.Parameter.wrap('value', True)})
+            function=create_operation,
+            arguments={'key': aria_models.Parameter.wrap('key', 'create'),
+                       'value': aria_models.Parameter.wrap('value', True)})
     )
     node.interfaces[interface.name] = interface                                                     # pylint: disable=unsubscriptable-object
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 5d193bc..7dbdd04 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -78,14 +78,14 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
     interface_name = 'Standard'
     operation_name = 'create'
 
-    inputs = {'putput': True, 'holder_path': dataholder.path}
+    arguments = {'putput': True, 'holder_path': dataholder.path}
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_node_operation, module_path=__name__),
-                              inputs=inputs)
+        operation_kwargs=dict(function=op_path(basic_node_operation, module_path=__name__),
+                              arguments=arguments)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
@@ -97,7 +97,7 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs
+                inputs=arguments
             )
         )
 
@@ -115,7 +115,7 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
     )
     operations = interface.operations
     assert len(operations) == 1
-    assert dataholder['implementation'] == operations.values()[0].implementation             # pylint: disable=no-member
+    assert dataholder['implementation'] == operations.values()[0].function             # pylint: disable=no-member
     assert dataholder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
@@ -127,15 +127,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
     interface_name = 'Configure'
     operation_name = 'post_configure'
 
-    inputs = {'putput': True, 'holder_path': dataholder.path}
+    arguments = {'putput': True, 'holder_path': dataholder.path}
     relationship = ctx.model.relationship.list()[0]
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(basic_relationship_operation,
-                                                     module_path=__name__),
-                              inputs=inputs),
+        operation_kwargs=dict(function=op_path(basic_relationship_operation, module_path=__name__),
+                              arguments=arguments),
     )
 
     relationship.interfaces[interface.name] = interface
@@ -148,7 +147,7 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs
+                inputs=arguments
             )
         )
 
@@ -160,7 +159,7 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
     assert dataholder['actor_name'] == relationship.name
     assert interface_name in dataholder['task_name']
     operations = interface.operations
-    assert dataholder['implementation'] == operations.values()[0].implementation           # pylint: disable=no-member
+    assert dataholder['implementation'] == operations.values()[0].function           # pylint: disable=no-member
     assert dataholder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
@@ -197,8 +196,8 @@ def test_invalid_task_operation_id(ctx, thread_executor, dataholder):
         node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__),
-                              inputs={'holder_path': dataholder.path})
+        operation_kwargs=dict(function=op_path(get_node_id, module_path=__name__),
+                              arguments={'holder_path': dataholder.path})
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
@@ -234,15 +233,15 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     filename = 'test_file'
     content = 'file content'
-    inputs = {'filename': filename, 'content': content}
+    arguments = {'filename': filename, 'content': content}
     interface = mock.models.create_interface(
         node.service,
         interface_name,
         operation_name,
         operation_kwargs=dict(
-            implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
+            function='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
             plugin=plugin,
-            inputs=inputs)
+            arguments=arguments)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
@@ -253,7 +252,7 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
             node,
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=inputs))
+            inputs=arguments))
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
     expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service.id),
@@ -280,7 +279,7 @@ def test_node_operation_logging(ctx, executor):
 
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
 
-    inputs = {
+    arguments = {
         'op_start': 'op_start',
         'op_end': 'op_end',
     }
@@ -289,8 +288,8 @@ def test_node_operation_logging(ctx, executor):
         interface_name,
         operation_name,
         operation_kwargs=dict(
-            implementation=op_path(logged_operation, module_path=__name__),
-            inputs=inputs)
+            function=op_path(logged_operation, module_path=__name__),
+            arguments=arguments)
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
@@ -302,19 +301,19 @@ def test_node_operation_logging(ctx, executor):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs
+                inputs=arguments
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor)
-    _assert_loggins(ctx, inputs)
+    _assert_loggins(ctx, arguments)
 
 
 def test_relationship_operation_logging(ctx, executor):
     interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0]
 
     relationship = ctx.model.relationship.list()[0]
-    inputs = {
+    arguments = {
         'op_start': 'op_start',
         'op_end': 'op_end',
     }
@@ -322,8 +321,8 @@ def test_relationship_operation_logging(ctx, executor):
         relationship.source_node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__),
-                              inputs=inputs)
+        operation_kwargs=dict(function=op_path(logged_operation, module_path=__name__),
+                              arguments=arguments)
     )
     relationship.interfaces[interface.name] = interface
     ctx.model.relationship.update(relationship)
@@ -335,12 +334,12 @@ def test_relationship_operation_logging(ctx, executor):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs
+                inputs=arguments
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor)
-    _assert_loggins(ctx, inputs)
+    _assert_loggins(ctx, arguments)
 
 
 def test_attribute_consumption(ctx, executor, dataholder):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 8a5db6f..946b0bd 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -47,7 +47,7 @@ def _mock_workflow(ctx, graph):
         node.service,
         'test',
         'op',
-        operation_kwargs=dict(implementation=_operation_mapping(),
+        operation_kwargs=dict(function=_operation_mapping(),
                               plugin=plugin)
     )
     node.interfaces[interface.name] = interface

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index fc34907..26a15e5 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -86,12 +86,12 @@ def test_host_ip(workflow_context, executor, dataholder):
     interface_name = 'Standard'
     operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
-    inputs = {'putput': True, 'holder_path': dataholder.path}
+    arguments = {'putput': True, 'holder_path': dataholder.path}
     interface = mock.models.create_interface(
         dependency_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), inputs=inputs)
+        operation_kwargs=dict(function=op_path(host_ip, module_path=__name__), arguments=arguments)
     )
     dependency_node.interfaces[interface.name] = interface
     dependency_node.attributes['ip'] = models.Parameter.wrap('ip', '1.1.1.1')
@@ -105,7 +105,7 @@ def test_host_ip(workflow_context, executor, dataholder):
                 dependency_node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs
+                inputs=arguments
             )
         )
 
@@ -118,13 +118,13 @@ def test_relationship_tool_belt(workflow_context, executor, dataholder):
     interface_name = 'Configure'
     operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
-    inputs = {'putput': True, 'holder_path': dataholder.path}
+    arguments = {'putput': True, 'holder_path': dataholder.path}
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__),
-                              inputs=inputs)
+        operation_kwargs=dict(function=op_path(relationship_operation, module_path=__name__),
+                              arguments=arguments)
     )
     relationship.interfaces[interface.name] = interface
     workflow_context.model.relationship.update(relationship)
@@ -136,7 +136,7 @@ def test_relationship_tool_belt(workflow_context, executor, dataholder):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs
+                inputs=arguments
             )
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index d9115e1..0dfd512 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -463,7 +463,7 @@ if __name__ == '__main__':
              inputs=None):
         local_script_path = script_path
         script_path = os.path.basename(local_script_path) if local_script_path else ''
-        inputs = inputs or {}
+        arguments = inputs or {}
         process = process or {}
         if script_path:
             workflow_context.resource.service.upload(
@@ -471,7 +471,7 @@ if __name__ == '__main__':
                 source=local_script_path,
                 path=script_path)
 
-        inputs.update({
+        arguments.update({
             'script_path': script_path,
             'process': process,
             'input_as_env_var': env_var
@@ -485,17 +485,17 @@ if __name__ == '__main__':
                 'test',
                 'op',
                 operation_kwargs=dict(
-                    implementation='{0}.{1}'.format(
+                    function='{0}.{1}'.format(
                         operations.__name__,
                         operations.run_script_locally.__name__),
-                    inputs=inputs)
+                    arguments=arguments)
             )
             node.interfaces[interface.name] = interface
             graph.add_tasks(api.task.OperationTask(
                 node,
                 interface_name='test',
                 operation_name='op',
-                inputs=inputs))
+                inputs=arguments))
             return graph
         tasks_graph = mock_workflow(ctx=workflow_context)  # pylint: disable=no-value-for-parameter
         eng = engine.Engine(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index 92d250e..a369f8f 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -217,7 +217,7 @@ class TestWithActualSSHServer(object):
         @workflow
         def mock_workflow(ctx, graph):
             node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-            inputs = {
+            arguments = {
                 'script_path': script_path,
                 'fabric_env': _FABRIC_ENV,
                 'process': process,
@@ -226,24 +226,24 @@ class TestWithActualSSHServer(object):
                 'test_operation': '',
             }
             if hide_output:
-                inputs['hide_output'] = hide_output
+                arguments['hide_output'] = hide_output
             if commands:
-                inputs['commands'] = commands
+                arguments['commands'] = commands
             interface = mock.models.create_interface(
                 node.service,
                 'test',
                 'op',
                 operation_kwargs=dict(
-                    implementation='{0}.{1}'.format(
+                    function='{0}.{1}'.format(
                         operations.__name__,
                         operation.__name__),
-                    inputs=inputs)
+                    arguments=arguments)
             )
             node.interfaces[interface.name] = interface
 
             ops = []
             for test_operation in test_operations:
-                op_inputs = inputs.copy()
+                op_inputs = arguments.copy()
                 op_inputs['test_operation'] = test_operation
                 ops.append(api.task.OperationTask(
                     node,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/test_workflow_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py
index 7374e50..405cb80 100644
--- a/tests/orchestrator/test_workflow_runner.py
+++ b/tests/orchestrator/test_workflow_runner.py
@@ -190,7 +190,7 @@ def test_execution_inputs_override_workflow_inputs(request):
 def test_execution_inputs_undeclared_inputs(request):
     mock_workflow = _setup_mock_workflow_in_service(request)
 
-    with pytest.raises(modeling_exceptions.UndeclaredInputsException):
+    with pytest.raises(modeling_exceptions.UndeclaredParametersException):
         _create_workflow_runner(request, mock_workflow, inputs={'undeclared_input': 'value'})
 
 
@@ -198,7 +198,7 @@ def test_execution_inputs_missing_required_inputs(request):
     mock_workflow = _setup_mock_workflow_in_service(
         request, inputs={'required_input': models.Parameter.wrap('required_input', value=None)})
 
-    with pytest.raises(modeling_exceptions.MissingRequiredInputsException):
+    with pytest.raises(modeling_exceptions.MissingRequiredParametersException):
         _create_workflow_runner(request, mock_workflow, inputs={})
 
 
@@ -206,13 +206,13 @@ def test_execution_inputs_wrong_type_inputs(request):
     mock_workflow = _setup_mock_workflow_in_service(
         request, inputs={'input': models.Parameter.wrap('input', 'value')})
 
-    with pytest.raises(modeling_exceptions.InputsOfWrongTypeException):
+    with pytest.raises(modeling_exceptions.ParametersOfWrongTypeException):
         _create_workflow_runner(request, mock_workflow, inputs={'input': 5})
 
 
 def test_execution_inputs_builtin_workflow_with_inputs(request):
     # built-in workflows don't have inputs
-    with pytest.raises(modeling_exceptions.UndeclaredInputsException):
+    with pytest.raises(modeling_exceptions.UndeclaredParametersException):
         _create_workflow_runner(request, 'install', inputs={'undeclared_input': 'value'})
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 642c785..d57e424 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -44,15 +44,15 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.node.update(plugin)
 
-        inputs = {'test_input': True}
+        arguments = {'test_input': True}
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path',
-                                  inputs=inputs),)
+                                  function='op_path',
+                                  arguments=arguments),)
 
         node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
         node.interfaces[interface_name] = interface
@@ -66,7 +66,7 @@ class TestOperationTask(object):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs,
+                inputs=arguments,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval,
                 ignore_failure=ignore_failure)
@@ -92,15 +92,15 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.plugin.update(plugin)
 
-        inputs = {'test_input': True}
+        arguments = {'test_input': True}
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path',
-                                  inputs=inputs)
+                                  function='op_path',
+                                  arguments=arguments)
         )
 
         relationship = ctx.model.relationship.list()[0]
@@ -113,7 +113,7 @@ class TestOperationTask(object):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs,
+                inputs=arguments,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
@@ -137,15 +137,15 @@ class TestOperationTask(object):
         plugin = mock.models.create_plugin('test_plugin', '0.1')
         ctx.model.node.update(plugin)
 
-        inputs = {'test_input': True}
+        arguments = {'test_input': True}
 
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path',
-                                  inputs=inputs)
+                                  function='op_path',
+                                  arguments=arguments)
         )
 
         relationship = ctx.model.relationship.list()[0]
@@ -158,7 +158,7 @@ class TestOperationTask(object):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs,
+                inputs=arguments,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
@@ -189,7 +189,7 @@ class TestOperationTask(object):
             interface_name,
             operation_name,
             operation_kwargs=dict(plugin=plugin,
-                                  implementation='op_path'))
+                                  function='op_path'))
         dependency_node.interfaces[interface_name] = interface
 
         with context.workflow.current.push(ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index 4cddbe6..88818ca 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -35,7 +35,7 @@ def test_execute_operation(ctx):
         ctx.service,
         interface_name,
         operation_name,
-        operation_kwargs={'implementation': 'test'}
+        operation_kwargs=dict(function='test')
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 8c0705b..43ec9f1 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -63,11 +63,11 @@ class BaseTest(object):
             ignore_failure=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
         interface_name = 'aria.interfaces.lifecycle'
-        operation_kwargs = dict(implementation='{name}.{func.__name__}'.format(
+        operation_kwargs = dict(function='{name}.{func.__name__}'.format(
             name=__name__, func=func))
         if inputs:
-            # the operation has to declare the inputs before those may be passed
-            operation_kwargs['inputs'] = inputs
+            # the operation has to declare the arguments before those may be passed
+            operation_kwargs['arguments'] = inputs
         operation_name = 'create'
         interface = mock.models.create_interface(node.service, interface_name, operation_name,
                                                  operation_kwargs=operation_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/core/test_events.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_events.py b/tests/orchestrator/workflows/core/test_events.py
index 184071d..6d542e9 100644
--- a/tests/orchestrator/workflows/core/test_events.py
+++ b/tests/orchestrator/workflows/core/test_events.py
@@ -110,8 +110,7 @@ def run_operation_on_node(ctx, op_name, interface_name):
         service=node.service,
         interface_name=interface_name,
         operation_name=op_name,
-        operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
-                                                                             func=func)))
+        operation_kwargs=dict(function='{name}.{func.__name__}'.format(name=__name__, func=func)))
     node.interfaces[interface.name] = interface
 
     eng = engine.Engine(executor=ThreadExecutor(),

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index e488933..1ba6422 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -43,7 +43,7 @@ def ctx(tmpdir):
         relationship.source_node.service,
         RELATIONSHIP_INTERFACE_NAME,
         RELATIONSHIP_OPERATION_NAME,
-        operation_kwargs={'implementation': 'test'}
+        operation_kwargs=dict(function='test')
     )
     relationship.interfaces[interface.name] = interface
     context.model.relationship.update(relationship)
@@ -53,7 +53,7 @@ def ctx(tmpdir):
         node.service,
         NODE_INTERFACE_NAME,
         NODE_OPERATION_NAME,
-        operation_kwargs={'implementation': 'test'}
+        operation_kwargs=dict(function='test')
     )
     node.interfaces[interface.name] = interface
     context.model.node.update(node)
@@ -92,7 +92,7 @@ class TestOperationTask(object):
             node.service,
             NODE_INTERFACE_NAME,
             NODE_OPERATION_NAME,
-            operation_kwargs=dict(plugin=storage_plugin, implementation='test')
+            operation_kwargs=dict(plugin=storage_plugin, function='test')
         )
         node.interfaces[interface.name] = interface
         ctx.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
index 2a96d01..1633d4a 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -32,7 +32,7 @@ def test_task_graph_into_execution_graph(tmpdir):
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs={'implementation': 'test'}
+        operation_kwargs=dict(function='test')
     )
     node.interfaces[interface.name] = interface
     task_context.model.node.update(node)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 92f0fc4..8c3f72a 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -67,7 +67,7 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
     key = 'key'
     first_value = 'value1'
     second_value = 'value2'
-    inputs = {
+    arguments = {
         'lock_files': lock_files,
         'key': key,
         'first_value': first_value,
@@ -80,8 +80,8 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
         node.service,
         interface_name,
         operation_name,
-        operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__),
-                              inputs=inputs)
+        operation_kwargs=dict(function='{0}.{1}'.format(__name__, func.__name__),
+                              arguments=arguments)
     )
     node.interfaces[interface.name] = interface
     context.model.node.update(node)
@@ -93,12 +93,12 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs),
+                inputs=arguments),
             api.task.OperationTask(
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=inputs)
+                inputs=arguments)
         )
 
     signal = events.on_failure_task_signal

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 30b23ed..acca0bf 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -27,7 +27,7 @@ from tests import storage
 
 
 def test_decorate_extension(context, executor):
-    inputs = {'input1': 1, 'input2': 2}
+    arguments = {'input1': 1, 'input2': 2}
 
     def get_node(ctx):
         return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
@@ -41,24 +41,23 @@ def test_decorate_extension(context, executor):
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
-                                                                  _mock_operation.__name__),
-                                  inputs=inputs)
+            operation_kwargs=dict(function='{0}.{1}'.format(__name__, _mock_operation.__name__),
+                                  arguments=arguments)
         )
         node.interfaces[interface.name] = interface
         task = api.task.OperationTask(
             node,
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=inputs)
+            inputs=arguments)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
     out = get_node(context).attributes.get('out').value
-    assert out['wrapper_inputs'] == inputs
-    assert out['function_inputs'] == inputs
+    assert out['wrapper_inputs'] == arguments
+    assert out['function_inputs'] == arguments
 
 
 @extension.process_executor

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index 2b628a0..c766fe4 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -88,20 +88,20 @@ def _run_workflow(context, executor, op_func, inputs=None):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
         interface_name = 'test_interface'
         operation_name = 'operation'
-        wf_inputs = inputs or {}
+        wf_arguments = inputs or {}
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
             operation_name,
-            operation_kwargs=dict(implementation=_operation_mapping(op_func),
-                                  inputs=wf_inputs)
+            operation_kwargs=dict(function=_operation_mapping(op_func),
+                                  arguments=wf_arguments)
         )
         node.interfaces[interface.name] = interface
         task = api.task.OperationTask(
             node,
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=wf_inputs)
+            inputs=wf_arguments)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/f6ee65a9/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index 8e80640..ee9e094 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -104,6 +104,16 @@ topology_template:
         Maintenance:
           enable: juju > charm.maintenance_on
           disable: juju > charm.maintenance_off
+        Standard:
+          create:
+            implementation:
+              primary: create_node_cellar.sh
+              dependencies:
+                - "process.args.1 > { get_attribute: [ SELF, tosca_id ] }"
+                - "process.args.2 > { get_property: [ HOST, flavor_name ] }"
+                - ssh.user > admin
+                - ssh.password > '1234'
+                - ssh.use_sudo > true
       requirements:
         - database: node_cellar_database
       capabilities:
@@ -161,16 +171,7 @@ topology_template:
             relationship:
               interfaces:
                 Configure:
-                  target_changed:
-                    implementation:
-                      primary: changed.sh
-                      dependencies:
-                        #- { concat: [ process.args.1 >, mongodb ] }
-                        - process.args.1 > mongodb
-                        - process.args.2 > host
-                        - ssh.user > admin
-                        - ssh.password > 1234
-                        - ssh.use_sudo > true
+                  target_changed: changed.sh
 
     nginx:
       type: nginx.Nginx
@@ -251,6 +252,7 @@ topology_template:
         Standard:
           inputs:
             openstack_credential: { get_input: openstack_credential }
+          create: create_data_volume.sh
 
   groups:
   
@@ -309,7 +311,7 @@ policy_types:
       client connections cleanly and shut down services. 
     derived_from: aria.Workflow
     properties:
-      implementation:
+      function:
         type: string
         default: workflows.maintenance
       enabled:


[4/7] incubator-ariatosca git commit: ARIA-261 Single-source ARIA version

Posted by em...@apache.org.
ARIA-261 Single-source ARIA version


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/adf76079
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/adf76079
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/adf76079

Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: adf76079568b6ccaf89871907b9b98c22ef6681e
Parents: 50b997e
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Thu May 25 12:18:58 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Thu May 25 18:49:49 2017 +0300

----------------------------------------------------------------------
 MANIFEST.in                  |  2 ++
 VERSION                      |  1 +
 aria/VERSION.py              | 21 ---------------------
 aria/__init__.py             | 13 ++++---------
 aria/parser/reading/jinja.py |  2 +-
 requirements.in              |  3 ++-
 requirements.txt             |  8 ++++++--
 setup.py                     | 14 +++++++++-----
 8 files changed, 25 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/MANIFEST.in
----------------------------------------------------------------------
diff --git a/MANIFEST.in b/MANIFEST.in
index d934e18..6c79a3a 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,2 +1,4 @@
 include requirements.txt
+include VERSION
+include LICENSE
 recursive-include examples *

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/VERSION
----------------------------------------------------------------------
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000..6c6aa7c
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+0.1.0
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/aria/VERSION.py
----------------------------------------------------------------------
diff --git a/aria/VERSION.py b/aria/VERSION.py
deleted file mode 100644
index 9ce332c..0000000
--- a/aria/VERSION.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-ARIA Version module:
-    * version: ARIA Package version
-"""
-
-version = '0.1.0'  # pylint: disable=C0103

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index df75b1e..34db3a8 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -19,7 +19,8 @@ ARIA top level package
 
 import sys
 
-from .VERSION import version as __version__
+import pkg_resources
+__version__ = pkg_resources.get_distribution('aria').version
 
 from .orchestrator.decorators import workflow, operation
 from . import (
@@ -39,11 +40,6 @@ if sys.version_info < (2, 7):
 else:
     from pkgutil import iter_modules
 
-try:
-    import pkg_resources
-except ImportError:
-    pkg_resources = None
-
 __all__ = (
     '__version__',
     'workflow',
@@ -60,9 +56,8 @@ def install_aria_extensions():
     for loader, module_name, _ in iter_modules():
         if module_name.startswith('aria_extension_'):
             loader.find_module(module_name).load_module(module_name)
-    if pkg_resources:
-        for entry_point in pkg_resources.iter_entry_points(group='aria_extension'):
-            entry_point.load()
+    for entry_point in pkg_resources.iter_entry_points(group='aria_extension'):
+        entry_point.load()
     extension.init()
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/aria/parser/reading/jinja.py
----------------------------------------------------------------------
diff --git a/aria/parser/reading/jinja.py b/aria/parser/reading/jinja.py
index 17bf49e..687317a 100644
--- a/aria/parser/reading/jinja.py
+++ b/aria/parser/reading/jinja.py
@@ -14,7 +14,7 @@ import os
 
 from jinja2 import Template
 
-from ...VERSION import version
+from ... import __version__ as version
 from ..loading import LiteralLocation, LiteralLoader
 from .reader import Reader
 from .exceptions import ReaderSyntaxError

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/requirements.in
----------------------------------------------------------------------
diff --git a/requirements.in b/requirements.in
index 1dd5b1f..ab06d93 100644
--- a/requirements.in
+++ b/requirements.in
@@ -28,8 +28,9 @@ SQLAlchemy>=1.1.0, <1.2  # version 1.2 dropped support of python 2.6
 wagon==0.6.0
 bottle>=0.12.0, <0.13
 Fabric>=1.13.0, <1.14
+setuptools>=35.0.0, <36.0.0
 click>=4.1, < 5.0
-colorama>=0.3.7, <= 0.3.9
+colorama>=0.3.7, <=0.3.9
 PrettyTable>=0.7,<0.8
 click_didyoumean==0.0.3
 backports.shutil_get_terminal_size==1.0.0

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/requirements.txt
----------------------------------------------------------------------
diff --git a/requirements.txt b/requirements.txt
index f7efce9..8551c65 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -17,6 +17,7 @@ total-ordering ; python_version < '2.7'  # only one version on pypi
 pypiwin32==219 ; sys_platform == 'win32'
 # ----------------------------------------------------------------------------------
 
+appdirs==1.4.3            # via setuptools
 args==0.1.0               # via clint
 asn1crypto==0.22.0        # via cryptography
 backports.shutil_get_terminal_size==1.0.0
@@ -41,7 +42,7 @@ logutils==0.3.4.1
 markupsafe==1.0           # via jinja2
 msgpack-python==0.4.8     # via cachecontrol
 networkx==1.9.1
-packaging==16.8           # via cryptography
+packaging==16.8           # via cryptography, setuptools
 paramiko==2.1.2           # via fabric
 prettytable==0.7.2
 pyasn1==0.2.3             # via paramiko
@@ -53,7 +54,10 @@ retrying==1.3.3
 ruamel.ordereddict==0.4.9  # via ruamel.yaml
 ruamel.yaml==0.11.15
 shortuuid==0.5.0
-six==1.10.0               # via cryptography, packaging, retrying
+six==1.10.0               # via cryptography, packaging, retrying, setuptools
 sqlalchemy==1.1.6
 wagon==0.6.0
 wheel==0.29.0             # via wagon
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==35.0.2

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/adf76079/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index c0ad75b..d43ce91 100644
--- a/setup.py
+++ b/setup.py
@@ -21,6 +21,7 @@ from setuptools import setup, find_packages
 from setuptools.command.install import install
 from setuptools.command.develop import develop
 
+
 _PACKAGE_NAME = 'aria'
 _PYTHON_SUPPORTED_VERSIONS = [(2, 6), (2, 7)]
 _EXTENSION_DIR = 'extensions'
@@ -35,9 +36,8 @@ if (sys.version_info[0], sys.version_info[1]) not in _PYTHON_SUPPORTED_VERSIONS:
 
 root_dir = os.path.dirname(__file__)
 
-version = '0.1.0'
-execfile(os.path.join(root_dir, _PACKAGE_NAME, 'VERSION.py'))
-
+with open(os.path.join(root_dir, 'VERSION')) as version_file:
+    __version__ = version_file.read().strip()
 
 install_requires = []
 extras_require = {}
@@ -104,9 +104,9 @@ class DevelopCommand(develop):
 
 setup(
     name=_PACKAGE_NAME,
-    version=version,
+    version=__version__,
     description='ARIA',
-    license='Apache License Version 2.0',
+    license='Apache License 2.0',
     author='aria',
     author_email='dev@ariatosca.incubator.apache.org',
     url='http://ariatosca.org',
@@ -119,6 +119,9 @@ setup(
         'License :: OSI Approved :: Apache Software License',
         'Operating System :: OS Independent',
         'Programming Language :: Python',
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.6',
+        'Programming Language :: Python :: 2.7',
         'Topic :: Software Development :: Libraries :: Python Modules',
         'Topic :: System :: Networking',
         'Topic :: System :: Systems Administration'],
@@ -136,6 +139,7 @@ setup(
             'profiles/aria-1.0/**'
         ]
     },
+    platforms=['any'],
     zip_safe=False,
     install_requires=install_requires,
     extras_require=extras_require,


[7/7] incubator-ariatosca git commit: Fixes

Posted by em...@apache.org.
Fixes

* Rename implementation/inputs to function/arguments in Task API
* Rename "create_parameters" to "merge_parameter_values" and improve
* Change workflow "function" back to "implementation"


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/888c5cd6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/888c5cd6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/888c5cd6

Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: 888c5cd6f86a12e096a8ce040aedd0f62c5eac0e
Parents: f6ee65a
Author: Tal Liron <ta...@gmail.com>
Authored: Wed May 24 14:54:07 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Fri May 26 12:45:07 2017 -0500

----------------------------------------------------------------------
 aria/cli/execution_logging.py                   |   4 +-
 aria/modeling/exceptions.py                     |   6 +
 aria/modeling/orchestration.py                  |  14 +--
 aria/modeling/service_instance.py               |  11 +-
 aria/modeling/service_template.py               |  25 ++--
 aria/modeling/utils.py                          | 126 ++++++++++---------
 aria/orchestrator/context/operation.py          |   4 +-
 .../execution_plugin/instantiation.py           |   2 +-
 aria/orchestrator/workflow_runner.py            |  20 +--
 aria/orchestrator/workflows/api/task.py         |  89 +++++++++----
 .../workflows/builtin/execute_operation.py      |   2 +-
 aria/orchestrator/workflows/core/task.py        |   4 +-
 aria/orchestrator/workflows/events_logging.py   |   8 +-
 aria/orchestrator/workflows/executor/base.py    |   4 +-
 aria/orchestrator/workflows/executor/celery.py  |   6 +-
 aria/orchestrator/workflows/executor/dry.py     |   6 +-
 aria/orchestrator/workflows/executor/process.py |  12 +-
 aria/orchestrator/workflows/executor/thread.py  |   6 +-
 aria/utils/formatting.py                        |   4 +-
 aria/utils/validation.py                        |   8 +-
 .../profiles/aria-1.0/aria-1.0.yaml             |   8 +-
 .../simple_v1_0/modeling/__init__.py            |  22 ++--
 tests/modeling/test_models.py                   |  12 +-
 tests/orchestrator/context/test_operation.py    |  47 ++++---
 tests/orchestrator/context/test_serialize.py    |   4 +-
 tests/orchestrator/context/test_toolbelt.py     |   4 +-
 .../orchestrator/execution_plugin/test_local.py |   8 +-
 tests/orchestrator/execution_plugin/test_ssh.py |   6 +-
 tests/orchestrator/test_workflow_runner.py      |   8 +-
 tests/orchestrator/workflows/api/test_task.py   |  20 +--
 .../orchestrator/workflows/core/test_engine.py  |  40 +++---
 tests/orchestrator/workflows/core/test_task.py  |   4 +-
 .../test_task_graph_into_execution_graph.py     |   4 +-
 .../orchestrator/workflows/executor/__init__.py |   6 +-
 .../workflows/executor/test_executor.py         |  10 +-
 .../workflows/executor/test_process_executor.py |   2 +-
 ...process_executor_concurrent_modifications.py |   4 +-
 .../executor/test_process_executor_extension.py |  21 +++-
 .../test_process_executor_tracked_changes.py    |  23 +++-
 .../node-cellar/node-cellar.yaml                |   2 +-
 40 files changed, 350 insertions(+), 266 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/cli/execution_logging.py
----------------------------------------------------------------------
diff --git a/aria/cli/execution_logging.py b/aria/cli/execution_logging.py
index b23165f..b3252f0 100644
--- a/aria/cli/execution_logging.py
+++ b/aria/cli/execution_logging.py
@@ -105,8 +105,8 @@ def stylize_log(item, mark_pattern):
     # implementation
     if item.task:
         # operation task
-        implementation = item.task.implementation
-        inputs = dict(i.unwrap() for i in item.task.inputs.values())
+        implementation = item.task.function
+        inputs = dict(arg.unwrap() for arg in item.task.arguments.values())
     else:
         # execution task
         implementation = item.execution.workflow_name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index d0e3e22..e784d1a 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -57,3 +57,9 @@ class UndeclaredParametersException(ParameterException):
     """
     ARIA modeling exception: Undeclared parameters have been provided.
     """
+
+
+class ForbiddenParameterNamesException(ParameterException):
+    """
+    ARIA modeling exception: Forbidden parameter names have been used.
+    """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index ab9d34d..97de552 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -230,10 +230,10 @@ class TaskBase(ModelMixin):
     :vartype relationship: :class:`Relationship`
     :ivar plugin: The implementing plugin (set to None for default execution plugin)
     :vartype plugin: :class:`Plugin`
-    :ivar inputs: Parameters that can be used by this task
-    :vartype inputs: {basestring: :class:`Parameter`}
-    :ivar implementation: Python path to an ``@operation`` function
-    :vartype implementation: basestring
+    :ivar function: Python path to an ``@operation`` function
+    :vartype function: basestring
+    :ivar arguments: Arguments that can be used by this task
+    :vartype arguments: {basestring: :class:`Parameter`}
     :ivar max_attempts: Maximum number of retries allowed in case of failure
     :vartype max_attempts: int
     :ivar retry_interval: Interval between retries (in seconds)
@@ -300,10 +300,10 @@ class TaskBase(ModelMixin):
         return relationship.many_to_one(cls, 'execution')
 
     @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+    def arguments(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='arguments', dict_key='name')
 
-    implementation = Column(String)
+    function = Column(String)
     max_attempts = Column(Integer, default=1)
     retry_interval = Column(Float, default=0)
     ignore_failure = Column(Boolean, default=False)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 31f7212..72e2478 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -1753,17 +1753,18 @@ class OperationBase(InstanceModelMixin):
         if (self.implementation is None) and (self.function is None):
             return
 
-        if (self.plugin is None) and (self.interface is not None):
-            # Default to execution plugin ("interface" is None for workflow operations)
+        if (self.interface is not None) and (self.plugin is None) and (self.function is None):
+            # ("interface" is None for workflow operations, which do not currently use "plugin")
+            # The default (None) plugin is the execution plugin
             execution_plugin.instantiation.configure_operation(self)
         else:
             # In the future plugins may be able to add their own "configure_operation" hook that
             # can validate the configuration and otherwise create specially derived arguments. For
-            # now, we just send all configuration parameters as arguments
+            # now, we just send all configuration parameters as arguments without validation.
             utils.instantiate_dict(self, self.arguments, self.configuration)
 
-        # Send all inputs as extra arguments. Note that they will override existing arguments of the
-        # same names.
+        # Send all inputs as extra arguments
+        # Note that they will override existing arguments of the same names
         utils.instantiate_dict(self, self.arguments, self.inputs)
 
     @property

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index b4a54ca..42e0d01 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,7 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  service_template=self)
         context.modeling.instance = service
 
-        service.inputs = utils.create_parameters(inputs or {}, self.inputs)
+        service.inputs = utils.merge_parameter_values(inputs, self.inputs)
         # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
 
         for plugin_specification in self.plugin_specifications.itervalues():
@@ -1883,21 +1883,10 @@ class OperationTemplateBase(TemplateModelMixin):
 
     def instantiate(self, container):
         from . import models
-        if self.plugin_specification:
-            if self.plugin_specification.enabled:
-                plugin = self.plugin_specification.plugin
-                function = self.function if plugin is not None else None
-                # "plugin" would be none if a match was not found. In that case, a validation error
-                # should already have been reported in ServiceTemplateBase.instantiate, so we will
-                # continue silently here
-            else:
-                # If the plugin is disabled, the operation should be disabled, too
-                plugin = None
-                function = None
-        else:
-            # Using the default execution plugin (plugin=None)
-            plugin = None
-            function = self.function
+
+        plugin = self.plugin_specification.plugin \
+            if (self.plugin_specification is not None) and self.plugin_specification.enabled \
+            else None
 
         operation = models.Operation(name=self.name,
                                      description=deepcopy_with_locators(self.description),
@@ -1906,12 +1895,14 @@ class OperationTemplateBase(TemplateModelMixin):
                                      dependencies=self.dependencies,
                                      executor=self.executor,
                                      plugin=plugin,
-                                     function=function,
+                                     function=self.function,
                                      max_attempts=self.max_attempts,
                                      retry_interval=self.retry_interval,
                                      operation_template=self)
+
         utils.instantiate_dict(container, operation.inputs, self.inputs)
         utils.instantiate_dict(container, operation.configuration, self.configuration)
+
         return operation
 
     def validate(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 6f4022c..ef9a53a 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -22,6 +22,7 @@ from ..parser.consumption import ConsumptionContext
 from ..utils.console import puts
 from ..utils.type import validate_value_type
 from ..utils.collections import OrderedDict
+from ..utils.formatting import string_list_as_string
 
 
 class ModelJSONEncoder(JSONEncoder):
@@ -52,84 +53,95 @@ class NodeTemplateContainerHolder(object):
         return self.container.service_template
 
 
-def create_parameters(parameters, declared_parameters):
+def merge_parameter_values(parameter_values, declared_parameters, forbidden_names=None):
     """
-    Validates, merges, and wraps parameter values according to those declared by a type.
+    Merges parameter values according to those declared by a type.
 
-    Exceptions will be raised for validation errors:
+    Exceptions will be raised for validation errors.
 
-    * :class:`aria.modeling.exceptions.UndeclaredParametersException` if a key in ``parameters``
-      does not exist in ``declared_parameters``
-    * :class:`aria.modeling.exceptions.MissingRequiredParametersException` if a key in
-      ``declared_parameters`` does not exist in ``parameters`` and also has no default value
-    * :class:`aria.modeling.exceptions.ParametersOfWrongTypeException` if a value in ``parameters``
-      does not match its type in ``declared_parameters``
-
-    :param parameters: Provided parameter values
-    :type parameters: {basestring, object}
-    :param declared_parameters: Declared parameters
+    :param parameter_values: provided parameter values or None
+    :type parameter_values: {basestring, object}
+    :param declared_parameters: declared parameters
     :type declared_parameters: {basestring, :class:`aria.modeling.models.Parameter`}
-    :return: The merged parameters
+    :param forbidden_names: parameters will be validated against these names
+    :type forbidden_names: [basestring]
+    :return: the merged parameters
     :rtype: {basestring, :class:`aria.modeling.models.Parameter`}
+    :raises aria.modeling.exceptions.UndeclaredParametersException: if a key in ``parameter_values``
+            does not exist in ``declared_parameters``
+    :raises aria.modeling.exceptions.MissingRequiredParametersException: if a key in
+            ``declared_parameters`` does not exist in ``parameter_values`` and also has no default
+            value
+    :raises aria.modeling.exceptions.ForbiddenParameterNamesException: if a parameter name is in
+            ``forbidden_names``
+    :raises aria.modeling.exceptions.ParametersOfWrongTypeException: if a value in
+            ``parameter_values`` does not match its type in ``declared_parameters``
     """
 
-    merged_parameters = _merge_and_validate_parameters(parameters, declared_parameters)
-
     from . import models
-    parameters_models = OrderedDict()
-    for parameter_name, parameter_value in merged_parameters.iteritems():
-        parameter = models.Parameter( # pylint: disable=unexpected-keyword-arg
-            name=parameter_name,
-            type_name=declared_parameters[parameter_name].type_name,
-            description=declared_parameters[parameter_name].description,
-            value=parameter_value)
-        parameters_models[parameter.name] = parameter
-
-    return parameters_models
-
-
-def _merge_and_validate_parameters(parameters, declared_parameters):
-    merged_parameters = OrderedDict(parameters)
-
-    missing_parameters = []
-    wrong_type_parameters = OrderedDict()
-    for parameter_name, declared_parameter in declared_parameters.iteritems():
-        if parameter_name not in parameters:
-            if declared_parameter.value is not None:
-                merged_parameters[parameter_name] = declared_parameter.value  # apply default value
-            else:
-                missing_parameters.append(parameter_name)
-        else:
-            # Validate parameter type
+
+    parameter_values = parameter_values or {}
+
+    undeclared_names = list(set(parameter_values.keys()).difference(declared_parameters.keys()))
+    if undeclared_names:
+        raise exceptions.UndeclaredParametersException(
+            'Undeclared parameters have been provided: {0}; Declared: {1}'
+            .format(string_list_as_string(undeclared_names),
+                    string_list_as_string(declared_parameters.keys())))
+
+    parameters = OrderedDict()
+
+    missing_names = []
+    wrong_type_values = OrderedDict()
+    for declared_parameter_name, declared_parameter in declared_parameters.iteritems():
+        if declared_parameter_name in parameter_values:
+            # Value has been provided
+            value = parameter_values[declared_parameter_name]
+
+            # Validate type
+            type_name = declared_parameter.type_name
             try:
-                validate_value_type(parameters[parameter_name], declared_parameter.type_name)
+                validate_value_type(value, type_name)
             except ValueError:
-                wrong_type_parameters[parameter_name] = declared_parameter.type_name
+                wrong_type_values[declared_parameter_name] = type_name
             except RuntimeError:
                 # TODO: This error shouldn't be raised (or caught), but right now we lack support
                 # for custom data_types, which will raise this error. Skipping their validation.
                 pass
 
-    if missing_parameters:
+            # Wrap in Parameter model
+            parameters[declared_parameter_name] = models.Parameter( # pylint: disable=unexpected-keyword-arg
+                name=declared_parameter_name,
+                type_name=type_name,
+                description=declared_parameter.description,
+                value=value)
+        elif declared_parameter.value is not None:
+            # Copy default value from declaration
+            parameters[declared_parameter_name] = declared_parameter.instantiate(None)
+        else:
+            # Required value has not been provided
+            missing_names.append(declared_parameter_name)
+
+    if missing_names:
         raise exceptions.MissingRequiredParametersException(
-            'Required parameters {0} have not been specified; Expected parameters: {1}'
-            .format(missing_parameters, declared_parameters.keys()))
+            'Declared parameters {0} have not been provided values'
+            .format(string_list_as_string(missing_names)))
 
-    if wrong_type_parameters:
+    if forbidden_names:
+        used_forbidden_names = list(set(forbidden_names).intersection(parameters.keys()))
+        if used_forbidden_names:
+            raise exceptions.ForbiddenParameterNamesException(
+                'Forbidden parameter names have been used: {0}'
+                .format(string_list_as_string(used_forbidden_names)))
+
+    if wrong_type_values:
         error_message = StringIO()
-        for param_name, param_type in wrong_type_parameters.iteritems():
-            error_message.write('Parameter "{0}" must be of type {1}{2}'
+        for param_name, param_type in wrong_type_values.iteritems():
+            error_message.write('Parameter "{0}" is not of declared type "{1}"{2}'
                                 .format(param_name, param_type, os.linesep))
         raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
 
-    undeclared_parameters = [parameter_name for parameter_name in parameters.keys()
-                             if parameter_name not in declared_parameters]
-    if undeclared_parameters:
-        raise exceptions.UndeclaredParametersException(
-            'Undeclared parameters have been specified: {0}; Expected parameters: {1}'
-            .format(undeclared_parameters, declared_parameters.keys()))
-
-    return merged_parameters
+    return parameters
 
 
 def coerce_dict_values(the_dict, report_issues=False):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 7c21351..f0ba337 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -42,8 +42,8 @@ class BaseOperationContext(common.BaseContext):
         self._register_logger(task_id=self.task.id, level=logger_level)
 
     def __repr__(self):
-        details = 'implementation={task.implementation}; ' \
-                  'operation_inputs={task.inputs}'\
+        details = 'function={task.function}; ' \
+                  'operation_arguments={task.arguments}'\
             .format(task=self.task)
         return '{name}({0})'.format(details, name=self.name)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
index 26c3913..869127e 100644
--- a/aria/orchestrator/execution_plugin/instantiation.py
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -43,7 +43,7 @@ def configure_operation(operation):
     # kwargs in either "run_script_locally" or "run_script_with_ssh"
     for key, value in operation.configuration.iteritems():
         if key not in ('process', 'ssh'):
-            operation.arguments[key] = value.instantiate()
+            operation.arguments[key] = value.instantiate(None)
 
 
 def _configure_common(operation):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 0c6321f..2d373c8 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -42,9 +42,10 @@ class WorkflowRunner(object):
                  executor=None, task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
                  task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
         """
-        Manages a single workflow execution on a given service
+        Manages a single workflow execution on a given service.
+
         :param workflow_name: Workflow name
-        :param service_id: Service id
+        :param service_id: Service ID
         :param inputs: A key-value dict of inputs for the execution
         :param model_storage: Model storage
         :param resource_storage: Resource storage
@@ -64,7 +65,7 @@ class WorkflowRunner(object):
 
         self._validate_workflow_exists_for_service()
 
-        workflow_fn = self._get_workflow_fn()
+        workflow_fn = self._workflow_fn
 
         execution = self._create_execution_model(inputs)
         self._execution_id = execution.id
@@ -119,7 +120,7 @@ class WorkflowRunner(object):
         else:
             workflow_inputs = self.service.workflows[self._workflow_name].inputs
 
-        execution.inputs = modeling_utils.create_parameters(inputs, workflow_inputs)
+        execution.inputs = modeling_utils.merge_parameter_values(inputs, workflow_inputs)
         # TODO: these two following calls should execute atomically
         self._validate_no_active_executions(execution)
         self._model_storage.execution.put(execution)
@@ -136,10 +137,11 @@ class WorkflowRunner(object):
         active_executions = [e for e in self.service.executions if e.is_active()]
         if active_executions:
             raise exceptions.ActiveExecutionsError(
-                "Can't start execution; Service {0} has an active execution with id {1}"
+                "Can't start execution; Service {0} has an active execution with ID {1}"
                 .format(self.service.name, active_executions[0].id))
 
-    def _get_workflow_fn(self):
+    @property
+    def _workflow_fn(self):
         if self._workflow_name in builtin.BUILTIN_WORKFLOWS:
             return import_fullname('{0}.{1}'.format(builtin.BUILTIN_WORKFLOWS_PATH_PREFIX,
                                                     self._workflow_name))
@@ -156,10 +158,10 @@ class WorkflowRunner(object):
         sys.path.append(service_template_resources_path)
 
         try:
-            workflow_fn = import_fullname(workflow.implementation)
+            workflow_fn = import_fullname(workflow.function)
         except ImportError:
             raise exceptions.WorkflowImplementationNotFoundError(
-                'Could not find workflow {0} implementation at {1}'.format(
-                    self._workflow_name, workflow.implementation))
+                'Could not find workflow {0} function at {1}'.format(
+                    self._workflow_name, workflow.function))
 
         return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index aa6ac45..feacaf4 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -55,7 +55,28 @@ class BaseTask(object):
 
 class OperationTask(BaseTask):
     """
-    Represents an operation task in the task graph
+    Represents an operation task in the task graph.
+
+    :ivar name: formatted name (includes actor type, actor name, and interface/operation names)
+    :vartype name: basestring
+    :ivar actor: node or relationship
+    :vartype actor: :class:`aria.modeling.models.Node`|:class:`aria.modeling.models.Relationship`
+    :ivar interface_name: interface name on actor
+    :vartype interface_name: basestring
+    :ivar operation_name: operation name on interface
+    :vartype operation_name: basestring
+    :ivar plugin: plugin (or None for default plugin)
+    :vartype plugin: :class:`aria.modeling.models.Plugin`
+    :ivar function: path to Python function
+    :vartype function: basestring
+    :ivar arguments: arguments to send to Python function
+    :vartype arguments: {basestring, :class:`aria.modeling.models.Parameter`}
+    :ivar ignore_failure: whether to ignore failures
+    :vartype ignore_failure: bool
+    :ivar max_attempts: maximum number of attempts allowed in case of failure
+    :vartype max_attempts: int
+    :ivar retry_interval: interval between retries (in seconds)
+    :vartype retry_interval: int
     """
 
     NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
@@ -64,43 +85,61 @@ class OperationTask(BaseTask):
                  actor,
                  interface_name,
                  operation_name,
-                 inputs=None,
+                 arguments=None,
+                 ignore_failure=None,
                  max_attempts=None,
-                 retry_interval=None,
-                 ignore_failure=None):
+                 retry_interval=None):
         """
-        Do not call this constructor directly. Instead, use :meth:`for_node` or
-        :meth:`for_relationship`.
+        :param actor: node or relationship
+        :type actor: :class:`aria.modeling.models.Node`|:class:`aria.modeling.models.Relationship`
+        :param interface_name: interface name on actor
+        :type interface_name: basestring
+        :param operation_name: operation name on interface
+        :type operation_name: basestring
+        :param arguments: override argument values
+        :type arguments: {basestring, object}
+        :param ignore_failure: override whether to ignore failures
+        :type ignore_failure: bool
+        :param max_attempts: override maximum number of attempts allowed in case of failure
+        :type max_attempts: int
+        :param retry_interval: override interval between retries (in seconds)
+        :type retry_interval: int
+        :raises aria.orchestrator.workflows.exceptions.OperationNotFoundException: if
+                ``interface_name`` and ``operation_name`` to not refer to an operation on the actor
         """
+
         assert isinstance(actor, (models.Node, models.Relationship))
-        super(OperationTask, self).__init__()
-        self.actor = actor
-        self.interface_name = interface_name
-        self.operation_name = operation_name
-        self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
-        self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
-        self.ignore_failure = \
-            self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
-        self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
-                                                     name=actor.name,
-                                                     interface=self.interface_name,
-                                                     operation=self.operation_name)
+
         # Creating OperationTask directly should raise an error when there is no
         # interface/operation.
-
-        if not has_operation(self.actor, self.interface_name, self.operation_name):
+        if not has_operation(actor, interface_name, operation_name):
             raise exceptions.OperationNotFoundException(
-                'Could not find operation "{self.operation_name}" on interface '
-                '"{self.interface_name}" for {actor_type} "{actor.name}"'.format(
-                    self=self,
+                'Could not find operation "{operation_name}" on interface '
+                '"{interface_name}" for {actor_type} "{actor.name}"'.format(
+                    operation_name=operation_name,
+                    interface_name=interface_name,
                     actor_type=type(actor).__name__.lower(),
                     actor=actor)
             )
 
+        super(OperationTask, self).__init__()
+
+        self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
+                                                     name=actor.name,
+                                                     interface=interface_name,
+                                                     operation=operation_name)
+        self.actor = actor
+        self.interface_name = interface_name
+        self.operation_name = operation_name
+        self.ignore_failure = \
+            self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
+        self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
+        self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
+
         operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
         self.plugin = operation.plugin
-        self.inputs = modeling_utils.create_parameters(inputs or {}, operation.arguments)
-        self.implementation = operation.function
+        self.function = operation.function
+        self.arguments = modeling_utils.merge_parameter_values(arguments, operation.arguments)
 
     def __repr__(self):
         return self.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 02a654a..437e584 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -69,7 +69,7 @@ def execute_operation(
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=operation_kwargs
+                arguments=operation_kwargs
             )
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 0d6eb11..72d83ea 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -146,8 +146,8 @@ class OperationTask(BaseTask):
 
             # Only non-stub tasks have these fields
             plugin=api_task.plugin,
-            implementation=api_task.implementation,
-            inputs=api_task.inputs
+            function=api_task.function,
+            arguments=api_task.arguments
         )
         self._workflow_context.model.task.put(task_model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 236a55f..0c93b85 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -35,12 +35,12 @@ def _get_task_name(task):
 
 @events.start_task_signal.connect
 def _start_task_handler(task, **kwargs):
-    # If the task has not implementation this is an empty task.
-    if task.implementation:
+    # If the task has no function this is an empty task.
+    if task.function:
         suffix = 'started...'
         logger = task.context.logger.info
     else:
-        suffix = 'has no implementation'
+        suffix = 'has no function'
         logger = task.context.logger.debug
 
     logger('{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
@@ -48,7 +48,7 @@ def _start_task_handler(task, **kwargs):
 
 @events.on_success_task_signal.connect
 def _success_task_handler(task, **kwargs):
-    if not task.implementation:
+    if not task.function:
         return
     task.context.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
                              .format(name=_get_task_name(task), task=task))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/executor/base.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/base.py b/aria/orchestrator/workflows/executor/base.py
index c543278..7fece6f 100644
--- a/aria/orchestrator/workflows/executor/base.py
+++ b/aria/orchestrator/workflows/executor/base.py
@@ -33,10 +33,10 @@ class BaseExecutor(logger.LoggerMixin):
         Execute a task
         :param task: task to execute
         """
-        if task.implementation:
+        if task.function:
             self._execute(task)
         else:
-            # In this case the task is missing an implementation. This task still gets to an
+            # In this case the task is missing a function. This task still gets to an
             # executor, but since there is nothing to run, we by default simply skip the execution
             # itself.
             self._task_started(task)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index bbddc25..3935b07 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -44,11 +44,11 @@ class CeleryExecutor(BaseExecutor):
 
     def _execute(self, task):
         self._tasks[task.id] = task
-        inputs = dict(inp.unwrap() for inp in task.inputs.values())
-        inputs['ctx'] = task.context
+        arguments = dict(arg.unwrap() for arg in task.arguments.values())
+        arguments['ctx'] = task.context
         self._results[task.id] = self._app.send_task(
             task.operation_mapping,
-            kwargs=inputs,
+            kwargs=arguments,
             task_id=task.id,
             queue=self._get_queue(task))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index 63ec392..8848df8 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -33,7 +33,7 @@ class DryExecutor(BaseExecutor):
             task.status = task.STARTED
 
         dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
-        logger = task.context.logger.info if task.implementation else task.context.logger.debug
+        logger = task.context.logger.info if task.function else task.context.logger.debug
 
         if hasattr(task.actor, 'source_node'):
             name = '{source_node.name}->{target_node.name}'.format(
@@ -41,11 +41,11 @@ class DryExecutor(BaseExecutor):
         else:
             name = task.actor.name
 
-        if task.implementation:
+        if task.function:
             logger(dry_msg.format(name=name, task=task, suffix='started...'))
             logger(dry_msg.format(name=name, task=task, suffix='successful'))
         else:
-            logger(dry_msg.format(name=name, task=task, suffix='has no implementation'))
+            logger(dry_msg.format(name=name, task=task, suffix='has no function'))
 
         # updating the task manually instead of calling self._task_succeeded(task),
         # to avoid any side effects raising that event might cause

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index f02e0a6..7472a2e 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -140,8 +140,8 @@ class ProcessExecutor(base.BaseExecutor):
     def _create_arguments_dict(self, task):
         return {
             'task_id': task.id,
-            'implementation': task.implementation,
-            'operation_inputs': dict(inp.unwrap() for inp in task.inputs.values()),
+            'function': task.function,
+            'operation_arguments': dict(arg.unwrap() for arg in task.arguments.values()),
             'port': self._server_port,
             'context': task.context.serialization_dict,
         }
@@ -290,8 +290,8 @@ def _main():
     port = arguments['port']
     messenger = _Messenger(task_id=task_id, port=port)
 
-    implementation = arguments['implementation']
-    operation_inputs = arguments['operation_inputs']
+    function = arguments['function']
+    operation_arguments = arguments['operation_arguments']
     context_dict = arguments['context']
 
     try:
@@ -302,11 +302,11 @@ def _main():
 
     try:
         messenger.started()
-        task_func = imports.load_attribute(implementation)
+        task_func = imports.load_attribute(function)
         aria.install_aria_extensions()
         for decorate in process_executor.decorate():
             task_func = decorate(task_func)
-        task_func(ctx=ctx, **operation_inputs)
+        task_func(ctx=ctx, **operation_arguments)
         ctx.close()
         messenger.succeeded()
     except BaseException as e:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index f53362a..2c5ef16 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -60,9 +60,9 @@ class ThreadExecutor(BaseExecutor):
                 task = self._queue.get(timeout=1)
                 self._task_started(task)
                 try:
-                    task_func = imports.load_attribute(task.implementation)
-                    inputs = dict(inp.unwrap() for inp in task.inputs.values())
-                    task_func(ctx=task.context, **inputs)
+                    task_func = imports.load_attribute(task.function)
+                    arguments = dict(arg.unwrap() for arg in task.arguments.values())
+                    task_func(ctx=task.context, **arguments)
                     self._task_succeeded(task)
                 except BaseException as e:
                     self._task_failed(task,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index f96a4ce..b8d24cd 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -124,7 +124,9 @@ def string_list_as_string(strings):
     Nice representation of a list of strings.
     """
 
-    return ', '.join('"%s"' % safe_str(v) for v in strings)
+    if not strings:
+        return 'none'
+    return ', '.join('"{0}"'.format(safe_str(v)) for v in strings)
 
 
 def pluralize(noun):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/aria/utils/validation.py
----------------------------------------------------------------------
diff --git a/aria/utils/validation.py b/aria/utils/validation.py
index a33f7a2..193cb33 100644
--- a/aria/utils/validation.py
+++ b/aria/utils/validation.py
@@ -17,6 +17,8 @@
 Contains validation related utilities
 """
 
+from .formatting import string_list_as_string
+
 
 class ValidatorMixin(object):
     """
@@ -82,8 +84,8 @@ def validate_function_arguments(func, func_kwargs):
     for arg in non_default_args:
         if arg not in func_kwargs:
             raise ValueError(
-                "The argument '{arg}' doest not have a default value, and it "
-                "isn't passed to {func.__name__}".format(arg=arg, func=func))
+                'The argument "{arg}" is not provided and does not have a default value for '
+                'function "{func.__name__}"'.format(arg=arg, func=func))
 
     # check if there are any extra kwargs
     extra_kwargs = [arg for arg in func_kwargs.keys() if arg not in args]
@@ -91,5 +93,5 @@ def validate_function_arguments(func, func_kwargs):
     # assert that the function has kwargs
     if extra_kwargs and not has_kwargs:
         raise ValueError("The following extra kwargs were supplied: {extra_kwargs}".format(
-            extra_kwargs=extra_kwargs
+            extra_kwargs=string_list_as_string(extra_kwargs)
         ))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
index abac03b..c1dc11d 100644
--- a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
@@ -52,10 +52,10 @@ policy_types:
       should be inherited and extended with additional properties.
     derived_from: tosca.policies.Root
     properties:
-      function:
+      implementation:
         description: >-
-          The interpretation of the function string depends on the orchestrator. In ARIA it is the
-          full path to a Python @workflow function that generates a task graph based on the service
-          topology.
+          The interpretation of the implementation string depends on the orchestrator. In ARIA it is
+          the full path to a Python @workflow function that generates a task graph based on the
+          service topology.
         type: string
         required: true

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 0b04fdc..c88bf41 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -378,7 +378,7 @@ def create_operation_template_model(context, service_template, operation):
     implementation = operation.implementation
     if implementation is not None:
         primary = implementation.primary
-        set_implementation(context, service_template, operation, model, primary)
+        extract_implementation_primary(context, service_template, operation, model, primary)
         relationship_edge = operation._get_extensions(context).get('relationship_edge')
         if relationship_edge is not None:
             if relationship_edge == 'source':
@@ -392,6 +392,8 @@ def create_operation_template_model(context, service_template, operation):
             for dependency in dependencies:
                 key, value = split_prefix(dependency)
                 if key is not None:
+                    # Special ARIA prefix: signifies configuration parameters
+
                     # Parse as YAML
                     try:
                         value = yaml.load(value)
@@ -512,15 +514,13 @@ def create_workflow_operation_template_model(context, service_template, policy):
 
     properties = policy._get_property_values(context)
     for prop_name, prop in properties.iteritems():
-        if prop_name == 'function':
+        if prop_name == 'implementation':
             model.function = prop.value
-        elif prop_name == 'dependencies':
-            model.dependencies = prop.value
         else:
-            model.configuration[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
-                                                       type_name=prop.type,
-                                                       value=prop.value,
-                                                       description=prop.description)
+            model.inputs[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
+                                                type_name=prop.type,
+                                                value=prop.value,
+                                                description=prop.description)
 
     return model
 
@@ -667,7 +667,7 @@ def split_prefix(string):
     split = IMPLEMENTATION_PREFIX_REGEX.split(string, 1)
     if len(split) < 2:
         return None, None
-    return split[0].strip(), split[1].lstrip()
+    return split[0].strip(), split[1].strip()
 
 
 def set_nested(the_dict, keys, value):
@@ -693,7 +693,7 @@ def set_nested(the_dict, keys, value):
         set_nested(the_dict[key], keys, value)
 
 
-def set_implementation(context, service_template, presentation, model, primary):
+def extract_implementation_primary(context, service_template, presentation, model, primary):
     prefix, postfix = split_prefix(primary)
     if prefix:
         # Special ARIA prefix
@@ -706,5 +706,5 @@ def set_implementation(context, service_template, presentation, model, primary):
                 locator=presentation._get_child_locator('properties', 'implementation'),
                 level=Issue.BETWEEN_TYPES)
     else:
-        # Standard TOSCA artifact
+        # Standard TOSCA artifact with default plugin
         model.implementation = primary

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 57511dd..df3aebd 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -755,7 +755,7 @@ class TestTask(object):
 
     @pytest.mark.parametrize(
         'is_valid, status, due_at, started_at, ended_at, max_attempts, attempts_count, '
-        'retry_interval, ignore_failure, name, operation_mapping, inputs, plugin_id',
+        'retry_interval, ignore_failure, name, operation_mapping, arguments, plugin_id',
         [
             (False, m_cls, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
             (False, Task.STARTED, m_cls, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
@@ -784,7 +784,7 @@ class TestTask(object):
     )
     def test_task_model_creation(self, execution_storage, is_valid, status, due_at, started_at,
                                  ended_at, max_attempts, attempts_count, retry_interval,
-                                 ignore_failure, name, operation_mapping, inputs, plugin_id):
+                                 ignore_failure, name, operation_mapping, arguments, plugin_id):
         task = _test_model(
             is_valid=is_valid,
             storage=execution_storage,
@@ -800,8 +800,8 @@ class TestTask(object):
                 retry_interval=retry_interval,
                 ignore_failure=ignore_failure,
                 name=name,
-                implementation=operation_mapping,
-                inputs=inputs,
+                function=operation_mapping,
+                arguments=arguments,
                 plugin_fk=plugin_id,
             ))
         if is_valid:
@@ -813,8 +813,8 @@ class TestTask(object):
         def create_task(max_attempts):
             Task(execution_fk='eid',
                  name='name',
-                 implementation='',
-                 inputs={},
+                 function='',
+                 arguments={},
                  max_attempts=max_attempts)
         create_task(max_attempts=1)
         create_task(max_attempts=2)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 7dbdd04..eec75da 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -97,7 +97,7 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments
+                arguments=arguments
             )
         )
 
@@ -115,8 +115,8 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
     )
     operations = interface.operations
     assert len(operations) == 1
-    assert dataholder['implementation'] == operations.values()[0].function             # pylint: disable=no-member
-    assert dataholder['inputs']['putput'] is True
+    assert dataholder['function'] == operations.values()[0].function             # pylint: disable=no-member
+    assert dataholder['arguments']['putput'] is True
 
     # Context based attributes (sugaring)
     assert dataholder['template_name'] == node.node_template.name
@@ -147,7 +147,7 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments
+                arguments=arguments
             )
         )
 
@@ -159,8 +159,8 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
     assert dataholder['actor_name'] == relationship.name
     assert interface_name in dataholder['task_name']
     operations = interface.operations
-    assert dataholder['implementation'] == operations.values()[0].function           # pylint: disable=no-member
-    assert dataholder['inputs']['putput'] is True
+    assert dataholder['function'] == operations.values()[0].function           # pylint: disable=no-member
+    assert dataholder['arguments']['putput'] is True
 
     # Context based attributes (sugaring)
     dependency_node_template = ctx.model.node_template.get_by_name(
@@ -252,7 +252,7 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
             node,
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=arguments))
+            arguments=arguments))
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
     expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service.id),
@@ -301,7 +301,7 @@ def test_node_operation_logging(ctx, executor):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments
+                arguments=arguments
             )
         )
 
@@ -334,7 +334,7 @@ def test_relationship_operation_logging(ctx, executor):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments
+                arguments=arguments
             )
         )
 
@@ -348,15 +348,15 @@ def test_attribute_consumption(ctx, executor, dataholder):
 
     source_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
 
-    inputs = {'dict_': {'key': 'value'},
-              'set_test_dict': {'key2': 'value2'}}
+    arguments = {'dict_': {'key': 'value'},
+                 'set_test_dict': {'key2': 'value2'}}
     interface = mock.models.create_interface(
         source_node.service,
         node_int_name,
         node_op_name,
         operation_kwargs=dict(
-            implementation=op_path(attribute_altering_operation, module_path=__name__),
-            inputs=inputs)
+            function=op_path(attribute_altering_operation, module_path=__name__),
+            arguments=arguments)
     )
     source_node.interfaces[interface.name] = interface
     ctx.model.node.update(source_node)
@@ -371,8 +371,8 @@ def test_attribute_consumption(ctx, executor, dataholder):
         rel_int_name,
         rel_op_name,
         operation_kwargs=dict(
-            implementation=op_path(attribute_consuming_operation, module_path=__name__),
-            inputs={'holder_path': dataholder.path}
+            function=op_path(attribute_consuming_operation, module_path=__name__),
+            arguments={'holder_path': dataholder.path}
         )
     )
     relationship.interfaces[interface.name] = interface
@@ -386,7 +386,7 @@ def test_attribute_consumption(ctx, executor, dataholder):
                 source_node,
                 interface_name=node_int_name,
                 operation_name=node_op_name,
-                inputs=inputs
+                arguments=arguments
             ),
             api.task.OperationTask(
                 relationship,
@@ -410,8 +410,7 @@ def test_attribute_consumption(ctx, executor, dataholder):
            dataholder['key2'] == 'value2'
 
 
-def _assert_loggins(ctx, inputs):
-
+def _assert_loggins(ctx, arguments):
     # The logs should contain the following: Workflow Start, Operation Start, custom operation
     # log string (op_start), custom operation log string (op_end), Operation End, Workflow End.
 
@@ -431,11 +430,11 @@ def _assert_loggins(ctx, inputs):
     assert all(l.execution == execution for l in logs)
     assert all(l in logs and l.task == task for l in task.logs)
 
-    op_start_log = [l for l in logs if inputs['op_start'] in l.msg and l.level.lower() == 'info']
+    op_start_log = [l for l in logs if arguments['op_start'] in l.msg and l.level.lower() == 'info']
     assert len(op_start_log) == 1
     op_start_log = op_start_log[0]
 
-    op_end_log = [l for l in logs if inputs['op_end'] in l.msg and l.level.lower() == 'debug']
+    op_end_log = [l for l in logs if arguments['op_end'] in l.msg and l.level.lower() == 'debug']
     assert len(op_end_log) == 1
     op_end_log = op_end_log[0]
 
@@ -444,10 +443,10 @@ def _assert_loggins(ctx, inputs):
 
 @operation
 def logged_operation(ctx, **_):
-    ctx.logger.info(ctx.task.inputs['op_start'].value)
+    ctx.logger.info(ctx.task.arguments['op_start'].value)
     # enables to check the relation between the created_at field properly
     time.sleep(1)
-    ctx.logger.debug(ctx.task.inputs['op_end'].value)
+    ctx.logger.debug(ctx.task.arguments['op_end'].value)
 
 
 @operation
@@ -476,8 +475,8 @@ def operation_common(ctx, holder):
 
     holder['actor_name'] = ctx.task.actor.name
     holder['task_name'] = ctx.task.name
-    holder['implementation'] = ctx.task.implementation
-    holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
+    holder['function'] = ctx.task.function
+    holder['arguments'] = dict(i.unwrap() for i in ctx.task.arguments.values())
 
 
 @operation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 946b0bd..4db7bf4 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -60,8 +60,8 @@ def _mock_workflow(ctx, graph):
 def _mock_operation(ctx):
     # We test several things in this operation
     # ctx.task, ctx.node, etc... tell us that the model storage was properly re-created
-    # a correct ctx.task.implementation tells us we kept the correct task_id
-    assert ctx.task.implementation == _operation_mapping()
+    # a correct ctx.task.function tells us we kept the correct task_id
+    assert ctx.task.function == _operation_mapping()
     # a correct ctx.node.name tells us we kept the correct actor_id
     assert ctx.node.name == mock.models.DEPENDENCY_NODE_NAME
     # a correct ctx.name tells us we kept the correct name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 26a15e5..326ce83 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -105,7 +105,7 @@ def test_host_ip(workflow_context, executor, dataholder):
                 dependency_node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments
+                arguments=arguments
             )
         )
 
@@ -136,7 +136,7 @@ def test_relationship_tool_belt(workflow_context, executor, dataholder):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments
+                arguments=arguments
             )
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index 0dfd512..d792a57 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -199,7 +199,7 @@ if __name__ == '__main__':
         props = self._run(
             executor, workflow_context,
             script_path=script_path,
-            inputs={'key': 'value'})
+            arguments={'key': 'value'})
         assert props['key'].value == 'value'
 
     @pytest.mark.parametrize(
@@ -460,10 +460,10 @@ if __name__ == '__main__':
              script_path,
              process=None,
              env_var='value',
-             inputs=None):
+             arguments=None):
         local_script_path = script_path
         script_path = os.path.basename(local_script_path) if local_script_path else ''
-        arguments = inputs or {}
+        arguments = arguments or {}
         process = process or {}
         if script_path:
             workflow_context.resource.service.upload(
@@ -495,7 +495,7 @@ if __name__ == '__main__':
                 node,
                 interface_name='test',
                 operation_name='op',
-                inputs=arguments))
+                arguments=arguments))
             return graph
         tasks_graph = mock_workflow(ctx=workflow_context)  # pylint: disable=no-value-for-parameter
         eng = engine.Engine(

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index a369f8f..899a007 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -243,13 +243,13 @@ class TestWithActualSSHServer(object):
 
             ops = []
             for test_operation in test_operations:
-                op_inputs = arguments.copy()
-                op_inputs['test_operation'] = test_operation
+                op_arguments = arguments.copy()
+                op_arguments['test_operation'] = test_operation
                 ops.append(api.task.OperationTask(
                     node,
                     interface_name='test',
                     operation_name='op',
-                    inputs=op_inputs))
+                    arguments=op_arguments))
 
             graph.sequence(*ops)
             return graph

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/test_workflow_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py
index 405cb80..3646339 100644
--- a/tests/orchestrator/test_workflow_runner.py
+++ b/tests/orchestrator/test_workflow_runner.py
@@ -48,8 +48,7 @@ def test_missing_workflow_implementation(service, request):
     workflow = models.Operation(
         name='test_workflow',
         service=service,
-        implementation='nonexistent.workflow.implementation',
-        inputs={})
+        function='nonexistent.workflow.implementation')
     service.workflows['test_workflow'] = workflow
 
     with pytest.raises(exceptions.WorkflowImplementationNotFoundError):
@@ -259,8 +258,9 @@ def _setup_mock_workflow_in_service(request, inputs=None):
     workflow = models.Operation(
         name=mock_workflow_name,
         service=service,
-        implementation='workflow.mock_workflow',
-        inputs=inputs or {})
+        function='workflow.mock_workflow',
+        inputs=inputs or {},
+        arguments=inputs or {})
     service.workflows[mock_workflow_name] = workflow
     return mock_workflow_name
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index d57e424..9d91b6b 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -66,7 +66,7 @@ class TestOperationTask(object):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments,
+                arguments=arguments,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval,
                 ignore_failure=ignore_failure)
@@ -77,9 +77,9 @@ class TestOperationTask(object):
             interface=interface_name,
             operation=operation_name
         )
-        assert api_task.implementation == 'op_path'
+        assert api_task.function == 'op_path'
         assert api_task.actor == node
-        assert api_task.inputs['test_input'].value is True
+        assert api_task.arguments['test_input'].value is True
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.ignore_failure == ignore_failure
@@ -113,7 +113,7 @@ class TestOperationTask(object):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments,
+                arguments=arguments,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
@@ -123,9 +123,9 @@ class TestOperationTask(object):
             interface=interface_name,
             operation=operation_name
         )
-        assert api_task.implementation == 'op_path'
+        assert api_task.function == 'op_path'
         assert api_task.actor == relationship
-        assert api_task.inputs['test_input'].value is True
+        assert api_task.arguments['test_input'].value is True
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.plugin.name == 'test_plugin'
@@ -158,7 +158,7 @@ class TestOperationTask(object):
                 relationship,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments,
+                arguments=arguments,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
@@ -168,9 +168,9 @@ class TestOperationTask(object):
             interface=interface_name,
             operation=operation_name
         )
-        assert api_task.implementation == 'op_path'
+        assert api_task.function == 'op_path'
         assert api_task.actor == relationship
-        assert api_task.inputs['test_input'].value is True
+        assert api_task.arguments['test_input'].value is True
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.plugin.name == 'test_plugin'
@@ -198,7 +198,7 @@ class TestOperationTask(object):
                 interface_name=interface_name,
                 operation_name=operation_name)
 
-        assert task.inputs == {}
+        assert task.arguments == {}
         assert task.retry_interval == ctx._task_retry_interval
         assert task.max_attempts == ctx._task_max_attempts
         assert task.ignore_failure == ctx._task_ignore_failure

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 43ec9f1..6d2836c 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -57,7 +57,7 @@ class BaseTest(object):
     @staticmethod
     def _op(ctx,
             func,
-            inputs=None,
+            arguments=None,
             max_attempts=None,
             retry_interval=None,
             ignore_failure=None):
@@ -65,9 +65,9 @@ class BaseTest(object):
         interface_name = 'aria.interfaces.lifecycle'
         operation_kwargs = dict(function='{name}.{func.__name__}'.format(
             name=__name__, func=func))
-        if inputs:
+        if arguments:
             # the operation has to declare the arguments before those may be passed
-            operation_kwargs['arguments'] = inputs
+            operation_kwargs['arguments'] = arguments
         operation_name = 'create'
         interface = mock.models.create_interface(node.service, interface_name, operation_name,
                                                  operation_kwargs=operation_kwargs)
@@ -77,7 +77,7 @@ class BaseTest(object):
             node,
             interface_name='aria.interfaces.lifecycle',
             operation_name=operation_name,
-            inputs=inputs or {},
+            arguments=arguments,
             max_attempts=max_attempts,
             retry_interval=retry_interval,
             ignore_failure=ignore_failure,
@@ -189,8 +189,8 @@ class TestEngine(BaseTest):
     def test_two_tasks_execution_order(self, workflow_context, executor):
         @workflow
         def mock_workflow(ctx, graph):
-            op1 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 1})
-            op2 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 2})
+            op1 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 1})
+            op2 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 2})
             graph.sequence(op1, op2)
         self._execute(
             workflow_func=mock_workflow,
@@ -204,9 +204,9 @@ class TestEngine(BaseTest):
     def test_stub_and_subworkflow_execution(self, workflow_context, executor):
         @workflow
         def sub_workflow(ctx, graph):
-            op1 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 1})
+            op1 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 1})
             op2 = api.task.StubTask()
-            op3 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 2})
+            op3 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 2})
             graph.sequence(op1, op2, op3)
 
         @workflow
@@ -229,7 +229,7 @@ class TestCancel(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             operations = (
-                self._op(ctx, func=mock_sleep_task, inputs=dict(seconds=0.1))
+                self._op(ctx, func=mock_sleep_task, arguments=dict(seconds=0.1))
                 for _ in range(number_of_tasks)
             )
             return graph.sequence(*operations)
@@ -270,7 +270,7 @@ class TestRetries(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
-                          inputs={'failure_count': 1},
+                          arguments={'failure_count': 1},
                           max_attempts=2)
             graph.add_tasks(op)
         self._execute(
@@ -286,7 +286,7 @@ class TestRetries(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
-                          inputs={'failure_count': 2},
+                          arguments={'failure_count': 2},
                           max_attempts=2)
             graph.add_tasks(op)
         with pytest.raises(exceptions.ExecutorException):
@@ -303,7 +303,7 @@ class TestRetries(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
-                          inputs={'failure_count': 1},
+                          arguments={'failure_count': 1},
                           max_attempts=3)
             graph.add_tasks(op)
         self._execute(
@@ -319,7 +319,7 @@ class TestRetries(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
-                          inputs={'failure_count': 2},
+                          arguments={'failure_count': 2},
                           max_attempts=3)
             graph.add_tasks(op)
         self._execute(
@@ -335,7 +335,7 @@ class TestRetries(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
-                          inputs={'failure_count': 1},
+                          arguments={'failure_count': 1},
                           max_attempts=-1)
             graph.add_tasks(op)
         self._execute(
@@ -361,7 +361,7 @@ class TestRetries(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
-                          inputs={'failure_count': 1},
+                          arguments={'failure_count': 1},
                           max_attempts=2,
                           retry_interval=retry_interval)
             graph.add_tasks(op)
@@ -382,7 +382,7 @@ class TestRetries(BaseTest):
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_conditional_failure_task,
                           ignore_failure=True,
-                          inputs={'failure_count': 100},
+                          arguments={'failure_count': 100},
                           max_attempts=100)
             graph.add_tasks(op)
         self._execute(
@@ -405,7 +405,7 @@ class TestTaskRetryAndAbort(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_task_retry,
-                          inputs={'message': self.message},
+                          arguments={'message': self.message},
                           retry_interval=default_retry_interval,
                           max_attempts=2)
             graph.add_tasks(op)
@@ -429,8 +429,8 @@ class TestTaskRetryAndAbort(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_task_retry,
-                          inputs={'message': self.message,
-                                  'retry_interval': custom_retry_interval},
+                          arguments={'message': self.message,
+                                     'retry_interval': custom_retry_interval},
                           retry_interval=default_retry_interval,
                           max_attempts=2)
             graph.add_tasks(op)
@@ -452,7 +452,7 @@ class TestTaskRetryAndAbort(BaseTest):
         @workflow
         def mock_workflow(ctx, graph):
             op = self._op(ctx, func=mock_task_abort,
-                          inputs={'message': self.message},
+                          arguments={'message': self.message},
                           retry_interval=100,
                           max_attempts=100)
             graph.add_tasks(op)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 1ba6422..a717e19 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -103,9 +103,9 @@ class TestOperationTask(object):
         assert storage_task.actor == core_task.context.node._original_model
         assert core_task.model_task == storage_task
         assert core_task.name == api_task.name
-        assert core_task.implementation == api_task.implementation
+        assert core_task.function == api_task.function
         assert core_task.actor == api_task.actor == node
-        assert core_task.inputs == api_task.inputs == storage_task.inputs
+        assert core_task.arguments == api_task.arguments == storage_task.arguments
         assert core_task.plugin == storage_plugin
 
     def test_relationship_operation_task_creation(self, ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
index 1633d4a..5dd2855 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -108,9 +108,9 @@ def test_task_graph_into_execution_graph(tmpdir):
 def _assert_execution_is_api_task(execution_task, api_task):
     assert execution_task.id == api_task.id
     assert execution_task.name == api_task.name
-    assert execution_task.implementation == api_task.implementation
+    assert execution_task.function == api_task.function
     assert execution_task.actor == api_task.actor
-    assert execution_task.inputs == api_task.inputs
+    assert execution_task.arguments == api_task.arguments
 
 
 def _get_task_by_name(task_name, graph):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/executor/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/__init__.py b/tests/orchestrator/workflows/executor/__init__.py
index 41c4b2e..ac6d325 100644
--- a/tests/orchestrator/workflows/executor/__init__.py
+++ b/tests/orchestrator/workflows/executor/__init__.py
@@ -25,11 +25,11 @@ class MockTask(object):
 
     INFINITE_RETRIES = models.Task.INFINITE_RETRIES
 
-    def __init__(self, implementation, inputs=None, plugin=None, storage=None):
-        self.implementation = self.name = implementation
+    def __init__(self, function, arguments=None, plugin=None, storage=None):
+        self.function = self.name = function
         self.plugin_fk = plugin.id if plugin else None
         self.plugin = plugin or None
-        self.inputs = inputs or {}
+        self.arguments = arguments or {}
         self.states = []
         self.exception = None
         self.id = str(uuid.uuid4())

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 29cb0e8..9ddaef4 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -38,16 +38,16 @@ import tests
 from . import MockTask
 
 
-def _get_implementation(func):
+def _get_function(func):
     return '{module}.{func.__name__}'.format(module=__name__, func=func)
 
 
 def execute_and_assert(executor, storage=None):
     expected_value = 'value'
-    successful_task = MockTask(_get_implementation(mock_successful_task), storage=storage)
-    failing_task = MockTask(_get_implementation(mock_failing_task), storage=storage)
-    task_with_inputs = MockTask(_get_implementation(mock_task_with_input),
-                                inputs={'input': models.Parameter.wrap('input', 'value')},
+    successful_task = MockTask(_get_function(mock_successful_task), storage=storage)
+    failing_task = MockTask(_get_function(mock_failing_task), storage=storage)
+    task_with_inputs = MockTask(_get_function(mock_task_with_input),
+                                arguments={'input': models.Parameter.wrap('input', 'value')},
                                 storage=storage)
 
     for task in [successful_task, failing_task, task_with_inputs]:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index e6333e8..058190e 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -66,7 +66,7 @@ class TestProcessExecutor(object):
     def test_closed(self, executor):
         executor.close()
         with pytest.raises(RuntimeError) as exc_info:
-            executor.execute(task=MockTask(implementation='some.implementation'))
+            executor.execute(task=MockTask(function='some.function'))
         assert 'closed' in exc_info.value.message
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 8c3f72a..6163c09 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -93,12 +93,12 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments),
+                arguments=arguments),
             api.task.OperationTask(
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=arguments)
+                arguments=arguments)
         )
 
     signal = events.on_failure_task_signal

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index acca0bf..29874b2 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -27,7 +27,7 @@ from tests import storage
 
 
 def test_decorate_extension(context, executor):
-    arguments = {'input1': 1, 'input2': 2}
+    arguments = {'arg1': 1, 'arg2': 2}
 
     def get_node(ctx):
         return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
@@ -49,15 +49,21 @@ def test_decorate_extension(context, executor):
             node,
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=arguments)
+            arguments=arguments)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
+<<<<<<< HEAD
     out = get_node(context).attributes.get('out').value
     assert out['wrapper_inputs'] == arguments
     assert out['function_inputs'] == arguments
+=======
+    out = get_node(context).runtime_properties['out']
+    assert out['wrapper_arguments'] == arguments
+    assert out['function_arguments'] == arguments
+>>>>>>> Fixes
 
 
 @extension.process_executor
@@ -65,16 +71,27 @@ class MockProcessExecutorExtension(object):
 
     def decorate(self):
         def decorator(function):
+<<<<<<< HEAD
             def wrapper(ctx, **operation_inputs):
                 ctx.node.attributes['out'] = {'wrapper_inputs': operation_inputs}
                 function(ctx=ctx, **operation_inputs)
+=======
+            def wrapper(ctx, **operation_arguments):
+                ctx.node.runtime_properties['out'] = {'wrapper_arguments': operation_arguments}
+                function(ctx=ctx, **operation_arguments)
+>>>>>>> Fixes
             return wrapper
         return decorator
 
 
 @operation
+<<<<<<< HEAD
 def _mock_operation(ctx, **operation_inputs):
     ctx.node.attributes['out']['function_inputs'] = operation_inputs
+=======
+def _mock_operation(ctx, **operation_arguments):
+    ctx.node.runtime_properties['out']['function_arguments'] = operation_arguments
+>>>>>>> Fixes
 
 
 @pytest.fixture

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index c766fe4..4e55d0f 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -62,19 +62,32 @@ def test_refresh_state_of_tracked_attributes(context, executor):
 
 
 def test_apply_tracked_changes_during_an_operation(context, executor):
-    inputs = {
+    arguments = {
         'committed': {'some': 'new', 'properties': 'right here'},
         'changed_but_refreshed': {'some': 'newer', 'properties': 'right there'}
     }
 
+<<<<<<< HEAD
     expected_initial = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME).attributes
     out = _run_workflow(
         context=context, executor=executor, op_func=_mock_updating_operation, inputs=inputs)
+=======
+    expected_initial = context.model.node.get_by_name(
+        mock.models.DEPENDENCY_NODE_NAME).runtime_properties
+
+    out = _run_workflow(context=context, executor=executor, op_func=_mock_updating_operation,
+                        arguments=arguments)
+>>>>>>> Fixes
 
     expected_after_update = expected_initial.copy()
-    expected_after_update.update(inputs['committed']) # pylint: disable=no-member
+    expected_after_update.update(arguments['committed']) # pylint: disable=no-member
     expected_after_change = expected_after_update.copy()
+<<<<<<< HEAD
     expected_after_change.update(inputs['changed_but_refreshed']) # pylint: disable=no-member
+=======
+    expected_after_change.update(arguments['changed_but_refreshed']) # pylint: disable=no-member
+    expected_after_refresh = expected_after_update
+>>>>>>> Fixes
 
     assert out['initial'] == expected_initial
     assert out['after_update'] == expected_after_update
@@ -82,13 +95,13 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
     assert out['after_refresh'] == expected_after_change
 
 
-def _run_workflow(context, executor, op_func, inputs=None):
+def _run_workflow(context, executor, op_func, arguments=None):
     @workflow
     def mock_workflow(ctx, graph):
         node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
         interface_name = 'test_interface'
         operation_name = 'operation'
-        wf_arguments = inputs or {}
+        wf_arguments = arguments or {}
         interface = mock.models.create_interface(
             ctx.service,
             interface_name,
@@ -101,7 +114,7 @@ def _run_workflow(context, executor, op_func, inputs=None):
             node,
             interface_name=interface_name,
             operation_name=operation_name,
-            inputs=wf_arguments)
+            arguments=wf_arguments)
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/888c5cd6/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index ee9e094..4d53f9b 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -311,7 +311,7 @@ policy_types:
       client connections cleanly and shut down services. 
     derived_from: aria.Workflow
     properties:
-      function:
+      implementation:
         type: string
         default: workflows.maintenance
       enabled: