You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by em...@apache.org on 2017/05/31 20:11:23 UTC

[3/3] incubator-ariatosca git commit: ARIA-149 Enhance operation configuration

ARIA-149 Enhance operation configuration

* Also fixes ARIA-121, ARIA-190
* Parse special "dependencies" configuration parameters as YAML and
  treat as Parameter models, allowing them full use of intrinsic
  functions, type coersions, and validations
* Rename various functions that process "properties" to more generically
  process "parameters" (properties, inputs, attributes, arguments, etc.)
* The "configuration" field in OperationTemplate and Operation models
  is now now a dict of Parameter models
* Add "function" and "arguments" fields to Operation model to preserve
  user data (in "implementation" and "inputs") and to clearly demarcate
  orchestration data from user data; update task API accordingly
* Some cleanup of parser code touched by this commit
* Rename "create_parameters" to "merge_parameter_values" and improve


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/b602f145
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/b602f145
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/b602f145

Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: b602f14554ae27853ea17c29a0c3f8f9f3fb37b6
Parents: 07d7951
Author: Tal Liron <ta...@gmail.com>
Authored: Thu Apr 20 17:54:47 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Wed May 31 15:09:11 2017 -0500

----------------------------------------------------------------------
 aria/cli/commands/services.py                   |   2 +-
 aria/cli/execution_logging.py                   |   6 +-
 aria/core.py                                    |  14 +-
 aria/modeling/constraints.py                    |  28 +++
 aria/modeling/contraints.py                     |  28 ---
 aria/modeling/exceptions.py                     |  16 +-
 aria/modeling/orchestration.py                  |  14 +-
 aria/modeling/service_common.py                 |   7 +-
 aria/modeling/service_instance.py               |  89 +++++---
 aria/modeling/service_template.py               |  67 +++---
 aria/modeling/utils.py                          | 114 +++++-----
 aria/orchestrator/__init__.py                   |   7 +-
 aria/orchestrator/context/operation.py          |   4 +-
 aria/orchestrator/decorators.py                 |   3 +-
 .../execution_plugin/instantiation.py           | 139 +++++++-----
 aria/orchestrator/workflow_runner.py            |  17 +-
 aria/orchestrator/workflows/api/task.py         |  89 +++++---
 .../workflows/builtin/execute_operation.py      |   2 +-
 aria/orchestrator/workflows/core/task.py        |   5 +-
 aria/orchestrator/workflows/events_logging.py   |   6 +-
 aria/orchestrator/workflows/executor/base.py    |   4 +-
 aria/orchestrator/workflows/executor/celery.py  |   6 +-
 aria/orchestrator/workflows/executor/dry.py     |   6 +-
 aria/orchestrator/workflows/executor/process.py |  12 +-
 aria/orchestrator/workflows/executor/thread.py  |   6 +-
 aria/utils/formatting.py                        |   4 +-
 aria/utils/validation.py                        |   8 +-
 .../profiles/aria-1.0/aria-1.0.yaml             |   8 -
 .../simple_v1_0/assignments.py                  |   4 +-
 .../simple_v1_0/modeling/__init__.py            |  73 +++++--
 .../simple_v1_0/modeling/artifacts.py           |   2 +-
 .../simple_v1_0/modeling/capabilities.py        |  24 ++-
 .../simple_v1_0/modeling/constraints.py         |   2 +-
 .../simple_v1_0/modeling/data_types.py          |  16 ++
 .../simple_v1_0/modeling/functions.py           |   4 +-
 .../simple_v1_0/modeling/interfaces.py          |  34 ++-
 .../simple_v1_0/modeling/parameters.py          | 211 +++++++++++++++++++
 .../simple_v1_0/modeling/policies.py            |   2 +
 .../simple_v1_0/modeling/properties.py          | 202 ------------------
 .../simple_v1_0/modeling/requirements.py        |  20 +-
 .../modeling/substitution_mappings.py           |   4 +
 .../simple_v1_0/templates.py                    |  13 +-
 .../aria_extension_tosca/simple_v1_0/types.py   |  24 +--
 tests/cli/test_services.py                      |  14 +-
 tests/mock/models.py                            |  10 +-
 tests/mock/topology.py                          |  12 +-
 tests/modeling/test_models.py                   |  12 +-
 tests/orchestrator/context/test_operation.py    |  86 ++++----
 tests/orchestrator/context/test_serialize.py    |   6 +-
 tests/orchestrator/context/test_toolbelt.py     |  14 +-
 .../orchestrator/execution_plugin/test_local.py |  14 +-
 tests/orchestrator/execution_plugin/test_ssh.py |  16 +-
 tests/orchestrator/test_workflow_runner.py      |  16 +-
 tests/orchestrator/workflows/api/test_task.py   |  40 ++--
 .../workflows/builtin/test_execute_operation.py |   2 +-
 .../orchestrator/workflows/core/test_engine.py  |  44 ++--
 .../orchestrator/workflows/core/test_events.py  |   3 +-
 tests/orchestrator/workflows/core/test_task.py  |  10 +-
 .../test_task_graph_into_execution_graph.py     |   6 +-
 .../orchestrator/workflows/executor/__init__.py |   6 +-
 .../workflows/executor/test_executor.py         |  10 +-
 .../workflows/executor/test_process_executor.py |   2 +-
 ...process_executor_concurrent_modifications.py |  10 +-
 .../executor/test_process_executor_extension.py |  23 +-
 .../test_process_executor_tracked_changes.py    |  18 +-
 .../node-cellar/node-cellar.yaml                |  22 +-
 66 files changed, 979 insertions(+), 763 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 24de7c5..476387c 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -151,7 +151,7 @@ def create(service_template_name,
     except storage_exceptions.StorageError as e:
         utils.check_overriding_storage_exceptions(e, 'service', service_name)
         raise
-    except modeling_exceptions.InputsException:
+    except modeling_exceptions.ParameterException:
         service_templates.print_service_template_inputs(model_storage, service_template_name,
                                                         logger)
         raise

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/cli/execution_logging.py
----------------------------------------------------------------------
diff --git a/aria/cli/execution_logging.py b/aria/cli/execution_logging.py
index b23165f..248ff7c 100644
--- a/aria/cli/execution_logging.py
+++ b/aria/cli/execution_logging.py
@@ -105,12 +105,12 @@ def stylize_log(item, mark_pattern):
     # implementation
     if item.task:
         # operation task
-        implementation = item.task.implementation
-        inputs = dict(i.unwrap() for i in item.task.inputs.values())
+        implementation = item.task.function
+        inputs = dict(arg.unwrapped for arg in item.task.arguments.values())
     else:
         # execution task
         implementation = item.execution.workflow_name
-        inputs = dict(i.unwrap() for i in item.execution.inputs.values())
+        inputs = dict(inp.unwrapped for inp in item.execution.inputs.values())
 
     stylized_str = color.StringStylizer(_get_format())
     _populate_level(stylized_str, item)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index cc943ef..f660167 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -56,7 +56,8 @@ class Core(object):
         service_template = self.model_storage.service_template.get(service_template_id)
         if service_template.services:
             raise exceptions.DependentServicesError(
-                "Can't delete service template {0} - Service template has existing services")
+                'Can\'t delete service template `{0}` - service template has existing services'
+                .format(service_template.name))
 
         self.model_storage.service_template.delete(service_template)
         self.resource_storage.service_template.delete(entry_id=str(service_template.id))
@@ -87,7 +88,8 @@ class Core(object):
                     consumption.CoerceServiceInstanceValues
                 )).consume()
             if context.validation.dump_issues():
-                raise exceptions.InstantiationError('Failed to instantiate service template')
+                raise exceptions.InstantiationError('Failed to instantiate service template `{0}`'
+                                                    .format(service_template.name))
 
         storage_session.flush()  # flushing so service.id would auto-populate
         service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
@@ -100,15 +102,15 @@ class Core(object):
         active_executions = [e for e in service.executions if e.is_active()]
         if active_executions:
             raise exceptions.DependentActiveExecutionsError(
-                "Can't delete service {0} - there is an active execution for this service. "
-                "Active execution id: {1}".format(service.name, active_executions[0].id))
+                'Can\'t delete service `{0}` - there is an active execution for this service. '
+                'Active execution ID: {1}'.format(service.name, active_executions[0].id))
 
         if not force:
             available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()]
             if available_nodes:
                 raise exceptions.DependentAvailableNodesError(
-                    "Can't delete service {0} - there are available nodes for this service. "
-                    "Available node ids: {1}".format(service.name, ', '.join(available_nodes)))
+                    'Can\'t delete service `{0}` - there are available nodes for this service. '
+                    'Available node IDs: {1}'.format(service.name, ', '.join(available_nodes)))
 
         self.model_storage.service.delete(service)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/constraints.py
----------------------------------------------------------------------
diff --git a/aria/modeling/constraints.py b/aria/modeling/constraints.py
new file mode 100644
index 0000000..107b010
--- /dev/null
+++ b/aria/modeling/constraints.py
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class NodeTemplateConstraint(object):
+    """
+    Used to constrain requirements for node templates.
+
+    Must be serializable.
+    """
+
+    def matches(self, source_node_template, target_node_template):
+        """
+        Returns true is the target matches the constraint for the source.
+        """
+        raise NotImplementedError

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/contraints.py
----------------------------------------------------------------------
diff --git a/aria/modeling/contraints.py b/aria/modeling/contraints.py
deleted file mode 100644
index 107b010..0000000
--- a/aria/modeling/contraints.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class NodeTemplateConstraint(object):
-    """
-    Used to constrain requirements for node templates.
-
-    Must be serializable.
-    """
-
-    def matches(self, source_node_template, target_node_template):
-        """
-        Returns true is the target matches the constraint for the source.
-        """
-        raise NotImplementedError

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index 19fd942..d0e3e22 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -22,9 +22,9 @@ class ModelingException(AriaException):
     """
 
 
-class InputsException(ModelingException):
+class ParameterException(ModelingException):
     """
-    ARIA inputs exception.
+    ARIA parameter exception.
     """
     pass
 
@@ -41,19 +41,19 @@ class CannotEvaluateFunctionException(ModelingException):
     """
 
 
-class MissingRequiredInputsException(InputsException):
+class MissingRequiredParametersException(ParameterException):
     """
-    ARIA modeling exception: Required inputs have been omitted.
+    ARIA modeling exception: Required parameters have been omitted.
     """
 
 
-class InputsOfWrongTypeException(InputsException):
+class ParametersOfWrongTypeException(ParameterException):
     """
-    ARIA modeling exception: Inputs of the wrong types have been provided.
+    ARIA modeling exception: Parameters of the wrong types have been provided.
     """
 
 
-class UndeclaredInputsException(InputsException):
+class UndeclaredParametersException(ParameterException):
     """
-    ARIA modeling exception: Undeclared inputs have been provided.
+    ARIA modeling exception: Undeclared parameters have been provided.
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index ab9d34d..97de552 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -230,10 +230,10 @@ class TaskBase(ModelMixin):
     :vartype relationship: :class:`Relationship`
     :ivar plugin: The implementing plugin (set to None for default execution plugin)
     :vartype plugin: :class:`Plugin`
-    :ivar inputs: Parameters that can be used by this task
-    :vartype inputs: {basestring: :class:`Parameter`}
-    :ivar implementation: Python path to an ``@operation`` function
-    :vartype implementation: basestring
+    :ivar function: Python path to an ``@operation`` function
+    :vartype function: basestring
+    :ivar arguments: Arguments that can be used by this task
+    :vartype arguments: {basestring: :class:`Parameter`}
     :ivar max_attempts: Maximum number of retries allowed in case of failure
     :vartype max_attempts: int
     :ivar retry_interval: Interval between retries (in seconds)
@@ -300,10 +300,10 @@ class TaskBase(ModelMixin):
         return relationship.many_to_one(cls, 'execution')
 
     @declared_attr
-    def inputs(cls):
-        return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+    def arguments(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='arguments', dict_key='name')
 
-    implementation = Column(String)
+    function = Column(String)
     max_attempts = Column(Integer, default=1)
     retry_interval = Column(Float, default=0)
     ignore_failure = Column(Boolean, default=False)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index ef19c8e..8f844a2 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -203,7 +203,8 @@ class ParameterBase(TemplateModelMixin, caching.HasCachedMethods):
         if self.description:
             console.puts(context.style.meta(self.description))
 
-    def unwrap(self):
+    @property
+    def unwrapped(self):
         return self.name, self.value
 
     @classmethod
@@ -211,6 +212,10 @@ class ParameterBase(TemplateModelMixin, caching.HasCachedMethods):
         """
         Wraps an arbitrary value as a parameter. The type will be guessed via introspection.
 
+        For primitive types, we will prefer their TOSCA aliases. See the `TOSCA Simple Profile v1.0
+        cos01 specification <http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01
+        /TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc373867862>`__
+
         :param name: Parameter name
         :type name: basestring
         :param value: Parameter value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 7058969..8033f4f 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -31,7 +31,11 @@ from .mixins import InstanceModelMixin
 from ..orchestrator import execution_plugin
 from ..parser import validation
 from ..parser.consumption import ConsumptionContext
-from ..utils import collections, formatting, console
+from ..utils import (
+    collections,
+    formatting,
+    console
+)
 from . import (
     relationship,
     utils,
@@ -1631,20 +1635,24 @@ class OperationBase(InstanceModelMixin):
     :vartype operation_template: :class:`OperationTemplate`
     :ivar description: Human-readable description
     :vartype description: string
-    :ivar plugin: Associated plugin
-    :vartype plugin: :class:`Plugin`
     :ivar relationship_edge: When true specified that the operation is on the relationship's
                              target edge instead of its source (only used by relationship
                              operations)
     :vartype relationship_edge: bool
     :ivar implementation: Implementation (interpreted by the plugin)
     :vartype implementation: basestring
-    :ivar configuration: Configuration (interpreted by the plugin)
-    :vartype configuration: {basestring, object}
     :ivar dependencies: Dependency strings (interpreted by the plugin)
     :vartype dependencies: [basestring]
     :ivar inputs: Parameters that can be used by this operation
     :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar plugin: Associated plugin
+    :vartype plugin: :class:`Plugin`
+    :ivar configuration: Configuration (interpreted by the plugin)
+    :vartype configuration: {basestring, :class:`Parameter`}
+    :ivar function: Name of the operation function
+    :vartype function: basestring
+    :ivar arguments: Arguments to send to the operation function
+    :vartype arguments: {basestring: :class:`Parameter`}
     :ivar executor: Name of executor to run the operation with
     :vartype executor: basestring
     :ivar max_attempts: Maximum number of attempts allowed in case of failure
@@ -1726,34 +1734,55 @@ class OperationBase(InstanceModelMixin):
     def inputs(cls):
         return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
 
+    @declared_attr
+    def configuration(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='configuration', dict_key='name')
+
+    @declared_attr
+    def arguments(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='arguments', dict_key='name')
+
     # endregion
 
     description = Column(Text)
     relationship_edge = Column(Boolean)
     implementation = Column(Text)
-    configuration = Column(modeling_types.StrictDict(key_cls=basestring))
     dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+    function = Column(Text)
     executor = Column(Text)
     max_attempts = Column(Integer)
     retry_interval = Column(Integer)
 
     def configure(self):
-        from . import models
-        # Note: for workflows (operations attached directly to the service) "interface" will be None
-        if (self.implementation is None) or (self.interface is None):
+        if (self.implementation is None) and (self.function is None):
             return
 
-        if self.plugin is None:
-            arguments = execution_plugin.instantiation.configure_operation(self)
+        if (self.interface is not None) and (self.plugin is None) and (self.function is None):
+            # ("interface" is None for workflow operations, which do not currently use "plugin")
+            # The default (None) plugin is the execution plugin
+            execution_plugin.instantiation.configure_operation(self)
         else:
             # In the future plugins may be able to add their own "configure_operation" hook that
-            # can validate the configuration and otherwise return specially derived arguments
-            arguments = self.configuration
+            # can validate the configuration and otherwise create specially derived arguments. For
+            # now, we just send all configuration parameters as arguments without validation.
+            utils.instantiate_dict(self, self.arguments, self.configuration)
+
+        # Send all inputs as extra arguments
+        # Note that they will override existing arguments of the same names
+        utils.instantiate_dict(self, self.arguments, self.inputs)
+
+        # Check for reserved arguments
+        from ..orchestrator.decorators import OPERATION_DECORATOR_RESERVED_ARGUMENTS
+        used_reserved_names = \
+            list(OPERATION_DECORATOR_RESERVED_ARGUMENTS.intersection(self.arguments.keys()))
+        if used_reserved_names:
+            context = ConsumptionContext.get_thread_local()
+            context.validation.report('using reserved arguments in node "{0}": {1}'
+                                      .format(
+                                          self.name,
+                                          formatting.string_list_as_string(used_reserved_names)),
+                                      level=validation.Issue.EXTERNAL)
 
-        # Note: the arguments will *override* operation inputs of the same name
-        if arguments:
-            for k, v in arguments.iteritems():
-                self.inputs[k] = models.Parameter.wrap(k, v)
 
     @property
     def as_raw(self):
@@ -1762,17 +1791,18 @@ class OperationBase(InstanceModelMixin):
             ('description', self.description),
             ('implementation', self.implementation),
             ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_attempts', self.max_attempts),
-            ('retry_interval', self.retry_interval),
             ('inputs', formatting.as_raw_dict(self.inputs))))
 
     def validate(self):
-        # TODO must be associated with interface or service
+        # TODO must be associated with either interface or service
         utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.configuration)
+        utils.validate_dict_values(self.arguments)
 
     def coerce_values(self, report_issues):
         utils.coerce_dict_values(self.inputs, report_issues)
+        utils.coerce_dict_values(self.configuration, report_issues)
+        utils.coerce_dict_values(self.arguments, report_issues)
 
     def dump(self):
         context = ConsumptionContext.get_thread_local()
@@ -1780,21 +1810,14 @@ class OperationBase(InstanceModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
         with context.style.indent:
-            if self.plugin is not None:
-                console.puts('Plugin: {0}'.format(
-                    context.style.literal(self.plugin.name)))
             if self.implementation is not None:
                 console.puts('Implementation: {0}'.format(
                     context.style.literal(self.implementation)))
-            if self.configuration:
-                with context.style.indent:
-                    for k, v in self.configuration.iteritems():
-                        console.puts('{0}: {1}'.format(context.style.property(k),
-                                                       context.style.literal(v)))
             if self.dependencies:
                 console.puts(
                     'Dependencies: {0}'.format(
                         ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+            utils.dump_dict_values(self.inputs, 'Inputs')
             if self.executor is not None:
                 console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
             if self.max_attempts is not None:
@@ -1802,7 +1825,13 @@ class OperationBase(InstanceModelMixin):
             if self.retry_interval is not None:
                 console.puts('Retry interval: {0}'.format(
                     context.style.literal(self.retry_interval)))
-            utils.dump_dict_values(self.inputs, 'Inputs')
+            if self.plugin is not None:
+                console.puts('Plugin: {0}'.format(
+                    context.style.literal(self.plugin.name)))
+            utils.dump_dict_values(self.configuration, 'Configuration')
+            if self.function is not None:
+                console.puts('Function: {0}'.format(context.style.literal(self.function)))
+            utils.dump_dict_values(self.arguments, 'Arguments')
 
 
 class ArtifactBase(InstanceModelMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 3110248..42e0d01 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,7 @@ class ServiceTemplateBase(TemplateModelMixin):
                                  service_template=self)
         context.modeling.instance = service
 
-        service.inputs = utils.create_inputs(inputs or {}, self.inputs)
+        service.inputs = utils.merge_parameter_values(inputs, self.inputs)
         # TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
 
         for plugin_specification in self.plugin_specifications.itervalues():
@@ -1762,20 +1762,22 @@ class OperationTemplateBase(TemplateModelMixin):
     :vartype name: basestring
     :ivar description: Human-readable description
     :vartype description: basestring
-    :ivar plugin_specification: Associated plugin
-    :vartype plugin_specification: :class:`PluginSpecification`
     :ivar relationship_edge: When true specified that the operation is on the relationship's
                              target edge instead of its source (only used by relationship
                              operations)
     :vartype relationship_edge: bool
     :ivar implementation: Implementation (interpreted by the plugin)
     :vartype implementation: basestring
-    :ivar configuration: Configuration (interpreted by the plugin)
-    :vartype configuration: {basestring, object}
     :ivar dependencies: Dependency strings (interpreted by the plugin)
     :vartype dependencies: [basestring]
     :ivar inputs: Parameters that can be used by this operation
     :vartype inputs: {basestring: :class:`Parameter`}
+    :ivar plugin_specification: Associated plugin
+    :vartype plugin_specification: :class:`PluginSpecification`
+    :ivar configuration: Configuration (interpreted by the plugin)
+    :vartype configuration: {basestring, :class:`Parameter`}
+    :ivar function: Name of the operation function
+    :vartype function: basestring
     :ivar executor: Name of executor to run the operation with
     :vartype executor: basestring
     :ivar max_attempts: Maximum number of attempts allowed in case of failure
@@ -1855,13 +1857,17 @@ class OperationTemplateBase(TemplateModelMixin):
     def inputs(cls):
         return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
 
+    @declared_attr
+    def configuration(cls):
+        return relationship.many_to_many(cls, 'parameter', prefix='configuration', dict_key='name')
+
     # endregion
 
     description = Column(Text)
     relationship_edge = Column(Boolean)
     implementation = Column(Text)
-    configuration = Column(modeling_types.StrictDict(key_cls=basestring))
     dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+    function = Column(Text)
     executor = Column(Text)
     max_attempts = Column(Integer)
     retry_interval = Column(Integer)
@@ -1873,48 +1879,39 @@ class OperationTemplateBase(TemplateModelMixin):
             ('description', self.description),
             ('implementation', self.implementation),
             ('dependencies', self.dependencies),
-            ('executor', self.executor),
-            ('max_attempts', self.max_attempts),
-            ('retry_interval', self.retry_interval),
             ('inputs', formatting.as_raw_dict(self.inputs))))
 
     def instantiate(self, container):
         from . import models
-        if self.plugin_specification:
-            if self.plugin_specification.enabled:
-                plugin = self.plugin_specification.plugin
-                implementation = self.implementation if plugin is not None else None
-                # "plugin" would be none if a match was not found. In that case, a validation error
-                # should already have been reported in ServiceTemplateBase.instantiate, so we will
-                # continue silently here
-            else:
-                # If the plugin is disabled, the operation should be disabled, too
-                plugin = None
-                implementation = None
-        else:
-            # Using the execution plugin
-            plugin = None
-            implementation = self.implementation
+
+        plugin = self.plugin_specification.plugin \
+            if (self.plugin_specification is not None) and self.plugin_specification.enabled \
+            else None
 
         operation = models.Operation(name=self.name,
                                      description=deepcopy_with_locators(self.description),
                                      relationship_edge=self.relationship_edge,
-                                     plugin=plugin,
-                                     implementation=implementation,
-                                     configuration=self.configuration,
+                                     implementation=self.implementation,
                                      dependencies=self.dependencies,
                                      executor=self.executor,
+                                     plugin=plugin,
+                                     function=self.function,
                                      max_attempts=self.max_attempts,
                                      retry_interval=self.retry_interval,
                                      operation_template=self)
+
         utils.instantiate_dict(container, operation.inputs, self.inputs)
+        utils.instantiate_dict(container, operation.configuration, self.configuration)
+
         return operation
 
     def validate(self):
         utils.validate_dict_values(self.inputs)
+        utils.validate_dict_values(self.configuration)
 
     def coerce_values(self, report_issues):
         utils.coerce_dict_values(self.inputs, report_issues)
+        utils.coerce_dict_values(self.configuration, report_issues)
 
     def dump(self):
         context = ConsumptionContext.get_thread_local()
@@ -1922,20 +1919,13 @@ class OperationTemplateBase(TemplateModelMixin):
         if self.description:
             console.puts(context.style.meta(self.description))
         with context.style.indent:
-            if self.plugin_specification is not None:
-                console.puts('Plugin specification: {0}'.format(
-                    context.style.literal(self.plugin_specification.name)))
             if self.implementation is not None:
                 console.puts('Implementation: {0}'.format(
                     context.style.literal(self.implementation)))
-            if self.configuration:
-                with context.style.indent:
-                    for k, v in self.configuration.iteritems():
-                        console.puts('{0}: {1}'.format(context.style.property(k),
-                                                       context.style.literal(v)))
             if self.dependencies:
                 console.puts('Dependencies: {0}'.format(
                     ', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+            utils.dump_dict_values(self.inputs, 'Inputs')
             if self.executor is not None:
                 console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
             if self.max_attempts is not None:
@@ -1943,7 +1933,12 @@ class OperationTemplateBase(TemplateModelMixin):
             if self.retry_interval is not None:
                 console.puts('Retry interval: {0}'.format(
                     context.style.literal(self.retry_interval)))
-            utils.dump_dict_values(self.inputs, 'Inputs')
+            if self.plugin_specification is not None:
+                console.puts('Plugin specification: {0}'.format(
+                    context.style.literal(self.plugin_specification.name)))
+            utils.dump_dict_values(self.configuration, 'Configuration')
+            if self.function is not None:
+                console.puts('Function: {0}'.format(context.style.literal(self.function)))
 
 
 class ArtifactTemplateBase(TemplateModelMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 0404fe4..66e9c99 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -21,6 +21,8 @@ from . import exceptions
 from ..parser.consumption import ConsumptionContext
 from ..utils.console import puts
 from ..utils.type import validate_value_type
+from ..utils.collections import OrderedDict
+from ..utils.formatting import string_list_as_string
 
 
 class ModelJSONEncoder(JSONEncoder):
@@ -39,7 +41,7 @@ class ModelJSONEncoder(JSONEncoder):
 class NodeTemplateContainerHolder(object):
     """
     Wrapper that allows using a :class:`aria.modeling.models.NodeTemplate` model directly as the
-    ``container_holder`` argument for :func:`aria.modeling.functions.evaluate`.
+    ``container_holder`` input for :func:`aria.modeling.functions.evaluate`.
     """
 
     def __init__(self, node_template):
@@ -51,74 +53,84 @@ class NodeTemplateContainerHolder(object):
         return self.container.service_template
 
 
-def create_inputs(inputs, template_inputs):
+def merge_parameter_values(parameter_values, declared_parameters):
     """
-    :param inputs: key-value dict
-    :param template_inputs: parameter name to parameter object dict
-    :return: dict of parameter name to Parameter models
+    Merges parameter values according to those declared by a type.
+
+    Exceptions will be raised for validation errors.
+
+    :param parameter_values: provided parameter values or None
+    :type parameter_values: {basestring, object}
+    :param declared_parameters: declared parameters
+    :type declared_parameters: {basestring, :class:`aria.modeling.models.Parameter`}
+    :return: the merged parameters
+    :rtype: {basestring, :class:`aria.modeling.models.Parameter`}
+    :raises aria.modeling.exceptions.UndeclaredParametersException: if a key in ``parameter_values``
+            does not exist in ``declared_parameters``
+    :raises aria.modeling.exceptions.MissingRequiredParametersException: if a key in
+            ``declared_parameters`` does not exist in ``parameter_values`` and also has no default
+            value
+    :raises aria.modeling.exceptions.ParametersOfWrongTypeException: if a value in
+            ``parameter_values`` does not match its type in ``declared_parameters``
     """
-    merged_inputs = _merge_and_validate_inputs(inputs, template_inputs)
 
     from . import models
-    input_models = []
-    for input_name, input_val in merged_inputs.iteritems():
-        parameter = models.Parameter( # pylint: disable=unexpected-keyword-arg
-            name=input_name,
-            type_name=template_inputs[input_name].type_name,
-            description=template_inputs[input_name].description,
-            value=input_val)
-        input_models.append(parameter)
 
-    return dict((inp.name, inp) for inp in input_models)
+    parameter_values = parameter_values or {}
 
+    undeclared_names = list(set(parameter_values.keys()).difference(declared_parameters.keys()))
+    if undeclared_names:
+        raise exceptions.UndeclaredParametersException(
+            'Undeclared parameters have been provided: {0}; Declared: {1}'
+            .format(string_list_as_string(undeclared_names),
+                    string_list_as_string(declared_parameters.keys())))
 
-def _merge_and_validate_inputs(inputs, template_inputs):
-    """
-    :param inputs: key-value dict
-    :param template_inputs: parameter name to parameter object dict
-    :return:
-    """
-    merged_inputs = inputs.copy()
-
-    missing_inputs = []
-    wrong_type_inputs = {}
-    for input_name, input_template in template_inputs.iteritems():
-        if input_name not in inputs:
-            if input_template.value is not None:
-                merged_inputs[input_name] = input_template.value  # apply default value
-            else:
-                missing_inputs.append(input_name)
-        else:
-            # Validate input type
+    parameters = OrderedDict()
+
+    missing_names = []
+    wrong_type_values = OrderedDict()
+    for declared_parameter_name, declared_parameter in declared_parameters.iteritems():
+        if declared_parameter_name in parameter_values:
+            # Value has been provided
+            value = parameter_values[declared_parameter_name]
+
+            # Validate type
+            type_name = declared_parameter.type_name
             try:
-                validate_value_type(inputs[input_name], input_template.type_name)
+                validate_value_type(value, type_name)
             except ValueError:
-                wrong_type_inputs[input_name] = input_template.type_name
+                wrong_type_values[declared_parameter_name] = type_name
             except RuntimeError:
                 # TODO: This error shouldn't be raised (or caught), but right now we lack support
                 # for custom data_types, which will raise this error. Skipping their validation.
                 pass
 
-    if missing_inputs:
-        raise exceptions.MissingRequiredInputsException(
-            'Required inputs {0} have not been specified - expected inputs: {1}'
-            .format(missing_inputs, template_inputs.keys()))
+            # Wrap in Parameter model
+            parameters[declared_parameter_name] = models.Parameter( # pylint: disable=unexpected-keyword-arg
+                name=declared_parameter_name,
+                type_name=type_name,
+                description=declared_parameter.description,
+                value=value)
+        elif declared_parameter.value is not None:
+            # Copy default value from declaration
+            parameters[declared_parameter_name] = declared_parameter.instantiate(None)
+        else:
+            # Required value has not been provided
+            missing_names.append(declared_parameter_name)
+
+    if missing_names:
+        raise exceptions.MissingRequiredParametersException(
+            'Declared parameters {0} have not been provided values'
+            .format(string_list_as_string(missing_names)))
 
-    if wrong_type_inputs:
+    if wrong_type_values:
         error_message = StringIO()
-        for param_name, param_type in wrong_type_inputs.iteritems():
-            error_message.write('Input "{0}" must be of type {1}{2}'
+        for param_name, param_type in wrong_type_values.iteritems():
+            error_message.write('Parameter "{0}" is not of declared type "{1}"{2}'
                                 .format(param_name, param_type, os.linesep))
-        raise exceptions.InputsOfWrongTypeException(error_message.getvalue())
-
-    undeclared_inputs = [input_name for input_name in inputs.keys()
-                         if input_name not in template_inputs]
-    if undeclared_inputs:
-        raise exceptions.UndeclaredInputsException(
-            'Undeclared inputs have been specified: {0}; Expected inputs: {1}'
-            .format(undeclared_inputs, template_inputs.keys()))
+        raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
 
-    return merged_inputs
+    return parameters
 
 
 def coerce_dict_values(the_dict, report_issues=False):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/__init__.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/__init__.py b/aria/orchestrator/__init__.py
index 0b57e4b..b855aed 100644
--- a/aria/orchestrator/__init__.py
+++ b/aria/orchestrator/__init__.py
@@ -15,7 +15,12 @@
 """
 ARIA orchestrator
 """
-from .decorators import workflow, operation, WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
+from .decorators import (
+    workflow,
+    operation,
+    WORKFLOW_DECORATOR_RESERVED_ARGUMENTS,
+    OPERATION_DECORATOR_RESERVED_ARGUMENTS
+)
 
 from . import (
     context,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 7c21351..f0ba337 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -42,8 +42,8 @@ class BaseOperationContext(common.BaseContext):
         self._register_logger(task_id=self.task.id, level=logger_level)
 
     def __repr__(self):
-        details = 'implementation={task.implementation}; ' \
-                  'operation_inputs={task.inputs}'\
+        details = 'function={task.function}; ' \
+                  'operation_arguments={task.arguments}'\
             .format(task=self.task)
         return '{name}({0})'.format(details, name=self.name)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/decorators.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/decorators.py b/aria/orchestrator/decorators.py
index 62e4a14..4051a54 100644
--- a/aria/orchestrator/decorators.py
+++ b/aria/orchestrator/decorators.py
@@ -26,7 +26,8 @@ from . import context
 from .workflows.api import task_graph
 
 
-WORKFLOW_DECORATOR_RESERVED_ARGUMENTS = ('ctx', 'graph')
+WORKFLOW_DECORATOR_RESERVED_ARGUMENTS = set(('ctx', 'graph'))
+OPERATION_DECORATOR_RESERVED_ARGUMENTS = set(('ctx', 'toolbelt'))
 
 
 def workflow(func=None, suffix_template=''):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
index c09434e..9b5152d 100644
--- a/aria/orchestrator/execution_plugin/instantiation.py
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -16,19 +16,14 @@
 # TODO: this module will eventually be moved to a new "aria.instantiation" package
 
 from ...utils.type import full_type_name
+from ...utils.formatting import safe_repr
 from ...utils.collections import OrderedDict
 from ...parser import validation
 from ...parser.consumption import ConsumptionContext
+from ...modeling.functions import Function
 
 
 def configure_operation(operation):
-    configuration = OrderedDict(operation.configuration) if operation.configuration else {}
-
-    arguments = OrderedDict()
-    arguments['script_path'] = operation.implementation
-    arguments['process'] = _get_process(configuration.pop('process')) \
-        if 'process' in configuration else dict()
-
     host = None
     interface = operation.interface
     if interface.node is not None:
@@ -36,107 +31,143 @@ def configure_operation(operation):
     elif interface.relationship is not None:
         if operation.relationship_edge is True:
             host = interface.relationship.target_node.host
-        else: # either False or None
+        else: # either False or None (None meaning that edge was not specified)
             host = interface.relationship.source_node.host
 
+    _configure_common(operation)
     if host is None:
         _configure_local(operation)
     else:
-        _configure_remote(operation, configuration, arguments)
+        _configure_remote(operation)
+
+    # Any remaining un-handled configuration parameters will become extra arguments, available as
+    # kwargs in either "run_script_locally" or "run_script_with_ssh"
+    for key, value in operation.configuration.iteritems():
+        if key not in ('process', 'ssh'):
+            operation.arguments[key] = value.instantiate(None)
 
-    # Any remaining unhandled configuration values will become extra arguments, available as kwargs
-    # in either "run_script_locally" or "run_script_with_ssh"
-    arguments.update(configuration)
 
-    return arguments
+def _configure_common(operation):
+    """
+    Local and remote operations.
+    """
+
+    from ...modeling.models import Parameter
+    operation.arguments['script_path'] = Parameter.wrap('script_path', operation.implementation,
+                                                        'Relative path to the executable file.')
+    operation.arguments['process'] = Parameter.wrap('process', _get_process(operation),
+                                                    'Sub-process configuration.')
+
 
 def _configure_local(operation):
     """
     Local operation.
     """
+
     from . import operations
-    operation.implementation = '{0}.{1}'.format(operations.__name__,
-                                                operations.run_script_locally.__name__)
+    operation.function = '{0}.{1}'.format(operations.__name__,
+                                          operations.run_script_locally.__name__)
 
 
-def _configure_remote(operation, configuration, arguments):
+def _configure_remote(operation):
     """
     Remote SSH operation via Fabric.
     """
+
+    from ...modeling.models import Parameter
+    from . import operations
+
+    ssh = _get_ssh(operation)
+
+    # Defaults
     # TODO: find a way to configure these generally in the service template
     default_user = ''
     default_password = ''
-
-    ssh = _get_ssh(configuration.pop('ssh')) if 'ssh' in configuration else {}
     if 'user' not in ssh:
         ssh['user'] = default_user
     if ('password' not in ssh) and ('key' not in ssh) and ('key_filename' not in ssh):
         ssh['password'] = default_password
 
-    arguments['use_sudo'] = ssh.get('use_sudo', False)
-    arguments['hide_output'] = ssh.get('hide_output', [])
-    arguments['fabric_env'] = {}
+    operation.arguments['use_sudo'] = Parameter.wrap('use_sudo', ssh.get('use_sudo', False),
+                                                     'Whether to execute with sudo.')
+
+    operation.arguments['hide_output'] = Parameter.wrap('hide_output', ssh.get('hide_output', []),
+                                                        'Hide output of these Fabric groups.')
+
+    fabric_env = {}
     if 'warn_only' in ssh:
-        arguments['fabric_env']['warn_only'] = ssh['warn_only']
-    arguments['fabric_env']['user'] = ssh.get('user')
-    arguments['fabric_env']['password'] = ssh.get('password')
-    arguments['fabric_env']['key'] = ssh.get('key')
-    arguments['fabric_env']['key_filename'] = ssh.get('key_filename')
+        fabric_env['warn_only'] = ssh['warn_only']
+    fabric_env['user'] = ssh.get('user')
+    fabric_env['password'] = ssh.get('password')
+    fabric_env['key'] = ssh.get('key')
+    fabric_env['key_filename'] = ssh.get('key_filename')
     if 'address' in ssh:
-        arguments['fabric_env']['host_string'] = ssh['address']
+        fabric_env['host_string'] = ssh['address']
 
-    if arguments['fabric_env'].get('user') is None:
+    # Make sure we have a user
+    if fabric_env.get('user') is None:
         context = ConsumptionContext.get_thread_local()
         context.validation.report('must configure "ssh.user" for "{0}"'
                                   .format(operation.implementation),
                                   level=validation.Issue.BETWEEN_TYPES)
-    if (arguments['fabric_env'].get('password') is None) and \
-        (arguments['fabric_env'].get('key') is None) and \
-        (arguments['fabric_env'].get('key_filename') is None):
+
+    # Make sure we have an authentication value
+    if (fabric_env.get('password') is None) and \
+        (fabric_env.get('key') is None) and \
+        (fabric_env.get('key_filename') is None):
         context = ConsumptionContext.get_thread_local()
         context.validation.report('must configure "ssh.password", "ssh.key", or "ssh.key_filename" '
                                   'for "{0}"'
                                   .format(operation.implementation),
                                   level=validation.Issue.BETWEEN_TYPES)
 
-    from . import operations
-    operation.implementation = '{0}.{1}'.format(operations.__name__,
-                                                operations.run_script_with_ssh.__name__)
+    operation.arguments['fabric_env'] = Parameter.wrap('fabric_env', fabric_env,
+                                                       'Fabric configuration.')
 
+    operation.function = '{0}.{1}'.format(operations.__name__,
+                                          operations.run_script_with_ssh.__name__)
 
-def _get_process(value):
+
+def _get_process(operation):
+    value = operation.configuration.get('process')._value \
+        if 'process' in operation.configuration else None
     if value is None:
-        return None
+        return {}
     _validate_type(value, dict, 'process')
+    value = OrderedDict(value)
     for k, v in value.iteritems():
         if k == 'eval_python':
-            value[k] = _str_to_bool(v, 'process.eval_python')
+            value[k] = _coerce_bool(v, 'process.eval_python')
         elif k == 'cwd':
             _validate_type(v, basestring, 'process.cwd')
         elif k == 'command_prefix':
             _validate_type(v, basestring, 'process.command_prefix')
         elif k == 'args':
-            value[k] = _dict_to_list(v, 'process.args')
+            value[k] = _dict_to_list_of_strings(v, 'process.args')
         elif k == 'env':
             _validate_type(v, dict, 'process.env')
         else:
             context = ConsumptionContext.get_thread_local()
-            context.validation.report('unsupported configuration: "process.{0}"'.format(k),
+            context.validation.report('unsupported configuration parameter: "process.{0}"'
+                                      .format(k),
                                       level=validation.Issue.BETWEEN_TYPES)
     return value
 
 
-def _get_ssh(value):
+def _get_ssh(operation):
+    value = operation.configuration.get('ssh')._value \
+        if 'ssh' in operation.configuration else None
     if value is None:
         return {}
     _validate_type(value, dict, 'ssh')
+    value = OrderedDict(value)
     for k, v in value.iteritems():
         if k == 'use_sudo':
-            value[k] = _str_to_bool(v, 'ssh.use_sudo')
+            value[k] = _coerce_bool(v, 'ssh.use_sudo')
         elif k == 'hide_output':
-            value[k] = _dict_to_list(v, 'ssh.hide_output')
+            value[k] = _dict_to_list_of_strings(v, 'ssh.hide_output')
         elif k == 'warn_only':
-            value[k] = _str_to_bool(v, 'ssh.warn_only')
+            value[k] = _coerce_bool(v, 'ssh.warn_only')
         elif k == 'user':
             _validate_type(v, basestring, 'ssh.user')
         elif k == 'password':
@@ -149,22 +180,26 @@ def _get_ssh(value):
             _validate_type(v, basestring, 'ssh.address')
         else:
             context = ConsumptionContext.get_thread_local()
-            context.validation.report('unsupported configuration: "ssh.{0}"'.format(k),
+            context.validation.report('unsupported configuration parameter: "ssh.{0}"'.format(k),
                                       level=validation.Issue.BETWEEN_TYPES)
     return value
 
 
 def _validate_type(value, the_type, name):
+    if isinstance(value, Function):
+        return
     if not isinstance(value, the_type):
         context = ConsumptionContext.get_thread_local()
-        context.validation.report('"{0}" configuration is not a {1}'
-                                  .format(name, full_type_name(the_type)),
+        context.validation.report('"{0}" configuration is not a {1}: {2}'
+                                  .format(name, full_type_name(the_type), safe_repr(value)),
                                   level=validation.Issue.BETWEEN_TYPES)
 
 
-def _str_to_bool(value, name):
+def _coerce_bool(value, name):
     if value is None:
         return None
+    if isinstance(value, bool):
+        return value
     _validate_type(value, basestring, name)
     if value == 'true':
         return True
@@ -173,19 +208,15 @@ def _str_to_bool(value, name):
     else:
         context = ConsumptionContext.get_thread_local()
         context.validation.report('"{0}" configuration is not "true" or "false": {1}'
-                                  .format(name, repr(value)),
+                                  .format(name, safe_repr(value)),
                                   level=validation.Issue.BETWEEN_TYPES)
 
 
-def _dict_to_list(the_dict, name):
+def _dict_to_list_of_strings(the_dict, name):
     _validate_type(the_dict, dict, name)
     value = []
     for k in sorted(the_dict):
         v = the_dict[k]
-        if not isinstance(v, basestring):
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('"{0}.{1}" configuration is not a string: {2}'
-                                      .format(name, k, repr(v)),
-                                      level=validation.Issue.BETWEEN_TYPES)
+        _validate_type(v, basestring, '{0}.{1}'.format(name, k))
         value.append(v)
     return value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 8f25cce..15a55b0 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -42,9 +42,10 @@ class WorkflowRunner(object):
                  executor=None, task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
                  task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
         """
-        Manages a single workflow execution on a given service
+        Manages a single workflow execution on a given service.
+
         :param workflow_name: Workflow name
-        :param service_id: Service id
+        :param service_id: Service ID
         :param inputs: A key-value dict of inputs for the execution
         :param model_storage: Model storage
         :param resource_storage: Resource storage
@@ -80,7 +81,7 @@ class WorkflowRunner(object):
             task_retry_interval=task_retry_interval)
 
         # transforming the execution inputs to dict, to pass them to the workflow function
-        execution_inputs_dict = dict(inp.unwrap() for inp in self.execution.inputs.values())
+        execution_inputs_dict = dict(inp.unwrapped for inp in self.execution.inputs.values())
         self._tasks_graph = workflow_fn(ctx=workflow_context, **execution_inputs_dict)
 
         executor = executor or ProcessExecutor(plugin_manager=plugin_manager)
@@ -119,7 +120,7 @@ class WorkflowRunner(object):
         else:
             workflow_inputs = self.service.workflows[self._workflow_name].inputs
 
-        execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs)
+        execution.inputs = modeling_utils.merge_parameter_values(inputs, workflow_inputs)
         # TODO: these two following calls should execute atomically
         self._validate_no_active_executions(execution)
         self._model_storage.execution.put(execution)
@@ -136,7 +137,7 @@ class WorkflowRunner(object):
         active_executions = [e for e in self.service.executions if e.is_active()]
         if active_executions:
             raise exceptions.ActiveExecutionsError(
-                "Can't start execution; Service {0} has an active execution with id {1}"
+                "Can't start execution; Service {0} has an active execution with ID {1}"
                 .format(self.service.name, active_executions[0].id))
 
     def _get_workflow_fn(self):
@@ -156,10 +157,10 @@ class WorkflowRunner(object):
         sys.path.append(service_template_resources_path)
 
         try:
-            workflow_fn = import_fullname(workflow.implementation)
+            workflow_fn = import_fullname(workflow.function)
         except ImportError:
             raise exceptions.WorkflowImplementationNotFoundError(
-                'Could not find workflow {0} implementation at {1}'.format(
-                    self._workflow_name, workflow.implementation))
+                'Could not find workflow {0} function at {1}'.format(
+                    self._workflow_name, workflow.function))
 
         return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index cb79eb3..feacaf4 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -55,7 +55,28 @@ class BaseTask(object):
 
 class OperationTask(BaseTask):
     """
-    Represents an operation task in the task graph
+    Represents an operation task in the task graph.
+
+    :ivar name: formatted name (includes actor type, actor name, and interface/operation names)
+    :vartype name: basestring
+    :ivar actor: node or relationship
+    :vartype actor: :class:`aria.modeling.models.Node`|:class:`aria.modeling.models.Relationship`
+    :ivar interface_name: interface name on actor
+    :vartype interface_name: basestring
+    :ivar operation_name: operation name on interface
+    :vartype operation_name: basestring
+    :ivar plugin: plugin (or None for default plugin)
+    :vartype plugin: :class:`aria.modeling.models.Plugin`
+    :ivar function: path to Python function
+    :vartype function: basestring
+    :ivar arguments: arguments to send to Python function
+    :vartype arguments: {basestring, :class:`aria.modeling.models.Parameter`}
+    :ivar ignore_failure: whether to ignore failures
+    :vartype ignore_failure: bool
+    :ivar max_attempts: maximum number of attempts allowed in case of failure
+    :vartype max_attempts: int
+    :ivar retry_interval: interval between retries (in seconds)
+    :vartype retry_interval: int
     """
 
     NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
@@ -64,43 +85,61 @@ class OperationTask(BaseTask):
                  actor,
                  interface_name,
                  operation_name,
-                 inputs=None,
+                 arguments=None,
+                 ignore_failure=None,
                  max_attempts=None,
-                 retry_interval=None,
-                 ignore_failure=None):
+                 retry_interval=None):
         """
-        Do not call this constructor directly. Instead, use :meth:`for_node` or
-        :meth:`for_relationship`.
+        :param actor: node or relationship
+        :type actor: :class:`aria.modeling.models.Node`|:class:`aria.modeling.models.Relationship`
+        :param interface_name: interface name on actor
+        :type interface_name: basestring
+        :param operation_name: operation name on interface
+        :type operation_name: basestring
+        :param arguments: override argument values
+        :type arguments: {basestring, object}
+        :param ignore_failure: override whether to ignore failures
+        :type ignore_failure: bool
+        :param max_attempts: override maximum number of attempts allowed in case of failure
+        :type max_attempts: int
+        :param retry_interval: override interval between retries (in seconds)
+        :type retry_interval: int
+        :raises aria.orchestrator.workflows.exceptions.OperationNotFoundException: if
+                ``interface_name`` and ``operation_name`` to not refer to an operation on the actor
         """
+
         assert isinstance(actor, (models.Node, models.Relationship))
-        super(OperationTask, self).__init__()
-        self.actor = actor
-        self.interface_name = interface_name
-        self.operation_name = operation_name
-        self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
-        self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
-        self.ignore_failure = \
-            self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
-        self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
-                                                     name=actor.name,
-                                                     interface=self.interface_name,
-                                                     operation=self.operation_name)
+
         # Creating OperationTask directly should raise an error when there is no
         # interface/operation.
-
-        if not has_operation(self.actor, self.interface_name, self.operation_name):
+        if not has_operation(actor, interface_name, operation_name):
             raise exceptions.OperationNotFoundException(
-                'Could not find operation "{self.operation_name}" on interface '
-                '"{self.interface_name}" for {actor_type} "{actor.name}"'.format(
-                    self=self,
+                'Could not find operation "{operation_name}" on interface '
+                '"{interface_name}" for {actor_type} "{actor.name}"'.format(
+                    operation_name=operation_name,
+                    interface_name=interface_name,
                     actor_type=type(actor).__name__.lower(),
                     actor=actor)
             )
 
+        super(OperationTask, self).__init__()
+
+        self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
+                                                     name=actor.name,
+                                                     interface=interface_name,
+                                                     operation=operation_name)
+        self.actor = actor
+        self.interface_name = interface_name
+        self.operation_name = operation_name
+        self.ignore_failure = \
+            self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
+        self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
+        self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
+
         operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
         self.plugin = operation.plugin
-        self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs)
-        self.implementation = operation.implementation
+        self.function = operation.function
+        self.arguments = modeling_utils.merge_parameter_values(arguments, operation.arguments)
 
     def __repr__(self):
         return self.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 02a654a..437e584 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -69,7 +69,7 @@ def execute_operation(
                 node,
                 interface_name=interface_name,
                 operation_name=operation_name,
-                inputs=operation_kwargs
+                arguments=operation_kwargs
             )
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index b3dfb3c..72d83ea 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -146,9 +146,8 @@ class OperationTask(BaseTask):
 
             # Only non-stub tasks have these fields
             plugin=api_task.plugin,
-            implementation=api_task.implementation,
-            inputs=api_task.inputs
-
+            function=api_task.function,
+            arguments=api_task.arguments
         )
         self._workflow_context.model.task.put(task_model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 236a55f..036c1f7 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -35,8 +35,8 @@ def _get_task_name(task):
 
 @events.start_task_signal.connect
 def _start_task_handler(task, **kwargs):
-    # If the task has not implementation this is an empty task.
-    if task.implementation:
+    # If the task has no function this is an empty task.
+    if task.function:
         suffix = 'started...'
         logger = task.context.logger.info
     else:
@@ -48,7 +48,7 @@ def _start_task_handler(task, **kwargs):
 
 @events.on_success_task_signal.connect
 def _success_task_handler(task, **kwargs):
-    if not task.implementation:
+    if not task.function:
         return
     task.context.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
                              .format(name=_get_task_name(task), task=task))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/executor/base.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/base.py b/aria/orchestrator/workflows/executor/base.py
index c543278..7fece6f 100644
--- a/aria/orchestrator/workflows/executor/base.py
+++ b/aria/orchestrator/workflows/executor/base.py
@@ -33,10 +33,10 @@ class BaseExecutor(logger.LoggerMixin):
         Execute a task
         :param task: task to execute
         """
-        if task.implementation:
+        if task.function:
             self._execute(task)
         else:
-            # In this case the task is missing an implementation. This task still gets to an
+            # In this case the task is missing a function. This task still gets to an
             # executor, but since there is nothing to run, we by default simply skip the execution
             # itself.
             self._task_started(task)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index bbddc25..9d66d26 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -44,11 +44,11 @@ class CeleryExecutor(BaseExecutor):
 
     def _execute(self, task):
         self._tasks[task.id] = task
-        inputs = dict(inp.unwrap() for inp in task.inputs.values())
-        inputs['ctx'] = task.context
+        arguments = dict(arg.unwrapped for arg in task.arguments.values())
+        arguments['ctx'] = task.context
         self._results[task.id] = self._app.send_task(
             task.operation_mapping,
-            kwargs=inputs,
+            kwargs=arguments,
             task_id=task.id,
             queue=self._get_queue(task))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index 63ec392..8848df8 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -33,7 +33,7 @@ class DryExecutor(BaseExecutor):
             task.status = task.STARTED
 
         dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
-        logger = task.context.logger.info if task.implementation else task.context.logger.debug
+        logger = task.context.logger.info if task.function else task.context.logger.debug
 
         if hasattr(task.actor, 'source_node'):
             name = '{source_node.name}->{target_node.name}'.format(
@@ -41,11 +41,11 @@ class DryExecutor(BaseExecutor):
         else:
             name = task.actor.name
 
-        if task.implementation:
+        if task.function:
             logger(dry_msg.format(name=name, task=task, suffix='started...'))
             logger(dry_msg.format(name=name, task=task, suffix='successful'))
         else:
-            logger(dry_msg.format(name=name, task=task, suffix='has no implementation'))
+            logger(dry_msg.format(name=name, task=task, suffix='has no function'))
 
         # updating the task manually instead of calling self._task_succeeded(task),
         # to avoid any side effects raising that event might cause

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index f02e0a6..634f1f2 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -140,8 +140,8 @@ class ProcessExecutor(base.BaseExecutor):
     def _create_arguments_dict(self, task):
         return {
             'task_id': task.id,
-            'implementation': task.implementation,
-            'operation_inputs': dict(inp.unwrap() for inp in task.inputs.values()),
+            'function': task.function,
+            'operation_arguments': dict(arg.unwrapped for arg in task.arguments.values()),
             'port': self._server_port,
             'context': task.context.serialization_dict,
         }
@@ -290,8 +290,8 @@ def _main():
     port = arguments['port']
     messenger = _Messenger(task_id=task_id, port=port)
 
-    implementation = arguments['implementation']
-    operation_inputs = arguments['operation_inputs']
+    function = arguments['function']
+    operation_arguments = arguments['operation_arguments']
     context_dict = arguments['context']
 
     try:
@@ -302,11 +302,11 @@ def _main():
 
     try:
         messenger.started()
-        task_func = imports.load_attribute(implementation)
+        task_func = imports.load_attribute(function)
         aria.install_aria_extensions()
         for decorate in process_executor.decorate():
             task_func = decorate(task_func)
-        task_func(ctx=ctx, **operation_inputs)
+        task_func(ctx=ctx, **operation_arguments)
         ctx.close()
         messenger.succeeded()
     except BaseException as e:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index f53362a..56a56a5 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -60,9 +60,9 @@ class ThreadExecutor(BaseExecutor):
                 task = self._queue.get(timeout=1)
                 self._task_started(task)
                 try:
-                    task_func = imports.load_attribute(task.implementation)
-                    inputs = dict(inp.unwrap() for inp in task.inputs.values())
-                    task_func(ctx=task.context, **inputs)
+                    task_func = imports.load_attribute(task.function)
+                    arguments = dict(arg.unwrapped for arg in task.arguments.values())
+                    task_func(ctx=task.context, **arguments)
                     self._task_succeeded(task)
                 except BaseException as e:
                     self._task_failed(task,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index f96a4ce..b8d24cd 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -124,7 +124,9 @@ def string_list_as_string(strings):
     Nice representation of a list of strings.
     """
 
-    return ', '.join('"%s"' % safe_str(v) for v in strings)
+    if not strings:
+        return 'none'
+    return ', '.join('"{0}"'.format(safe_str(v)) for v in strings)
 
 
 def pluralize(noun):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/aria/utils/validation.py
----------------------------------------------------------------------
diff --git a/aria/utils/validation.py b/aria/utils/validation.py
index a33f7a2..193cb33 100644
--- a/aria/utils/validation.py
+++ b/aria/utils/validation.py
@@ -17,6 +17,8 @@
 Contains validation related utilities
 """
 
+from .formatting import string_list_as_string
+
 
 class ValidatorMixin(object):
     """
@@ -82,8 +84,8 @@ def validate_function_arguments(func, func_kwargs):
     for arg in non_default_args:
         if arg not in func_kwargs:
             raise ValueError(
-                "The argument '{arg}' doest not have a default value, and it "
-                "isn't passed to {func.__name__}".format(arg=arg, func=func))
+                'The argument "{arg}" is not provided and does not have a default value for '
+                'function "{func.__name__}"'.format(arg=arg, func=func))
 
     # check if there are any extra kwargs
     extra_kwargs = [arg for arg in func_kwargs.keys() if arg not in args]
@@ -91,5 +93,5 @@ def validate_function_arguments(func, func_kwargs):
     # assert that the function has kwargs
     if extra_kwargs and not has_kwargs:
         raise ValueError("The following extra kwargs were supplied: {extra_kwargs}".format(
-            extra_kwargs=extra_kwargs
+            extra_kwargs=string_list_as_string(extra_kwargs)
         ))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
index 0c5e77f..c1dc11d 100644
--- a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
@@ -59,11 +59,3 @@ policy_types:
           service topology.
         type: string
         required: true
-      dependencies:
-        description: >-
-          The optional ordered list of one or more dependent or secondary implementation artifact
-          name which are referenced by the primary implementation artifact (e.g., a library the
-          script installs or a secondary script).
-        type: list
-        entry_schema: string
-        required: false

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/extensions/aria_extension_tosca/simple_v1_0/assignments.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/assignments.py b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
index d929ce0..79f6377 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/assignments.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
@@ -23,7 +23,7 @@ from aria.parser.presentation import (AsIsPresentation, has_fields, allow_unknow
 
 from .filters import NodeFilter
 from .misc import Description, OperationImplementation
-from .modeling.properties import get_assigned_and_defined_property_values
+from .modeling.parameters import get_assigned_and_defined_parameter_values
 from .presentation.extensible import ExtensiblePresentation
 from .presentation.field_validators import (node_template_or_type_validator,
                                             relationship_template_or_type_validator,
@@ -428,7 +428,7 @@ class ArtifactAssignment(ExtensiblePresentation):
 
     @cachedmethod
     def _get_property_values(self, context):
-        return FrozenDict(get_assigned_and_defined_property_values(context, self))
+        return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
 
     @cachedmethod
     def _validate(self, context):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 99389e4..b5b09e7 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -26,14 +26,19 @@ import re
 from types import FunctionType
 from datetime import datetime
 
+from ruamel import yaml
+
 from aria.parser.validation import Issue
-from aria.utils.collections import StrictDict
+from aria.utils.formatting import string_list_as_string
+from aria.utils.collections import (StrictDict, OrderedDict)
+from aria.orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
 from aria.modeling.models import (Type, ServiceTemplate, NodeTemplate,
                                   RequirementTemplate, RelationshipTemplate, CapabilityTemplate,
                                   GroupTemplate, PolicyTemplate, SubstitutionTemplate,
                                   SubstitutionTemplateMapping, InterfaceTemplate, OperationTemplate,
                                   ArtifactTemplate, Metadata, Parameter, PluginSpecification)
 
+from .parameters import coerce_parameter_value
 from .constraints import (Equal, GreaterThan, GreaterOrEqual, LessThan, LessOrEqual, InRange,
                           ValidValues, Length, MinLength, MaxLength, Pattern)
 from ..data_types import coerce_value
@@ -375,7 +380,7 @@ def create_operation_template_model(context, service_template, operation):
     implementation = operation.implementation
     if implementation is not None:
         primary = implementation.primary
-        parse_implementation_string(context, service_template, operation, model, primary)
+        extract_implementation_primary(context, service_template, operation, model, primary)
         relationship_edge = operation._get_extensions(context).get('relationship_edge')
         if relationship_edge is not None:
             if relationship_edge == 'source':
@@ -384,18 +389,39 @@ def create_operation_template_model(context, service_template, operation):
                 model.relationship_edge = True
 
         dependencies = implementation.dependencies
+        configuration = OrderedDict()
         if dependencies:
             for dependency in dependencies:
                 key, value = split_prefix(dependency)
                 if key is not None:
-                    if model.configuration is None:
-                        model.configuration = {}
-                    set_nested(model.configuration, key.split('.'), value)
+                    # Special ARIA prefix: signifies configuration parameters
+
+                    # Parse as YAML
+                    try:
+                        value = yaml.load(value)
+                    except yaml.parser.MarkedYAMLError as e:
+                        context.validation.report(
+                            'YAML parser {0} in operation configuration: {1}'
+                            .format(e.problem, value),
+                            locator=implementation._locator,
+                            level=Issue.FIELD)
+                        continue
+
+                    # Coerce to intrinsic functions, if there are any
+                    value = coerce_parameter_value(context, implementation, None, value).value
+
+                    # Support dot-notation nesting
+                    set_nested(configuration, key.split('.'), value)
                 else:
                     if model.dependencies is None:
                         model.dependencies = []
                     model.dependencies.append(dependency)
 
+        # Convert configuration to Parameter models
+        for key, value in configuration.iteritems():
+            model.configuration[key] = Parameter.wrap(key, value,
+                                                      description='Operation configuration.')
+
     inputs = operation.inputs
     if inputs:
         for input_name, the_input in inputs.iteritems():
@@ -491,15 +517,23 @@ def create_workflow_operation_template_model(context, service_template, policy):
     properties = policy._get_property_values(context)
     for prop_name, prop in properties.iteritems():
         if prop_name == 'implementation':
-            parse_implementation_string(context, service_template, policy, model, prop.value)
-        elif prop_name == 'dependencies':
-            model.dependencies = prop.value
+            model.function = prop.value
         else:
             model.inputs[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
                                                 type_name=prop.type,
                                                 value=prop.value,
                                                 description=prop.description)
 
+    used_reserved_names = \
+        list(WORKFLOW_DECORATOR_RESERVED_ARGUMENTS.intersection(model.inputs.keys()))
+    if used_reserved_names:
+        context.validation.report('using reserved arguments in worfklow policy "{0}": {1}'
+                                  .format(
+                                      policy._name,
+                                      string_list_as_string(used_reserved_names)),
+                                  locator=policy._locator,
+                                  level=Issue.EXTERNAL)
+
     return model
 
 
@@ -639,13 +673,13 @@ def create_constraint(context, node_filter, constraint_clause, property_name, ca
 
 def split_prefix(string):
     """
-    Splits the prefix on the first unescaped ">".
+    Splits the prefix on the first non-escaped ">".
     """
 
-    split = IMPLEMENTATION_PREFIX_REGEX.split(string, 2)
+    split = IMPLEMENTATION_PREFIX_REGEX.split(string, 1)
     if len(split) < 2:
-        return None, string
-    return split[0].strip(), split[1].lstrip()
+        return None, None
+    return split[0].strip(), split[1].strip()
 
 
 def set_nested(the_dict, keys, value):
@@ -671,13 +705,18 @@ def set_nested(the_dict, keys, value):
         set_nested(the_dict[key], keys, value)
 
 
-def parse_implementation_string(context, service_template, presentation, model, implementation):
-    plugin_name, model.implementation = split_prefix(implementation)
-    if plugin_name is not None:
-        model.plugin_specification = service_template.plugin_specifications.get(plugin_name)
+def extract_implementation_primary(context, service_template, presentation, model, primary):
+    prefix, postfix = split_prefix(primary)
+    if prefix:
+        # Special ARIA prefix
+        model.plugin_specification = service_template.plugin_specifications.get(prefix)
+        model.function = postfix
         if model.plugin_specification is None:
             context.validation.report(
                 'no policy for plugin "{0}" specified in operation implementation: {1}'
-                .format(plugin_name, implementation),
+                .format(prefix, primary),
                 locator=presentation._get_child_locator('properties', 'implementation'),
                 level=Issue.BETWEEN_TYPES)
+    else:
+        # Standard TOSCA artifact with default plugin
+        model.implementation = primary

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b602f145/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
index 4f61ef5..dd9eeb4 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
@@ -15,11 +15,11 @@
 
 from aria.utils.collections import OrderedDict
 
+
 #
 # NodeType, NodeTemplate
 #
 
-
 def get_inherited_artifact_definitions(context, presentation, for_presentation=None):
 
     if hasattr(presentation, '_get_type'):