You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by em...@apache.org on 2017/05/30 18:03:16 UTC
[1/5] incubator-ariatosca git commit: ARIA-268 Add NOTICE file
[Forced Update!]
Repository: incubator-ariatosca
Updated Branches:
refs/heads/ARIA-149-functions-in-operation-configuration dc2d69c98 -> fdec01ab0 (forced update)
ARIA-268 Add NOTICE file
Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/07d79513
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/07d79513
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/07d79513
Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: 07d79513a4ac41ba66d84f2922a17fd0ab7ec39c
Parents: adf7607
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Mon May 29 17:11:04 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Mon May 29 17:11:04 2017 +0300
----------------------------------------------------------------------
NOTICE | 5 +++++
1 file changed, 5 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/07d79513/NOTICE
----------------------------------------------------------------------
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000..bf03ab5
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1,5 @@
+Apache AriaTosca
+Copyright 2016-2017 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
\ No newline at end of file
[3/5] incubator-ariatosca git commit: ARIA-149 Enhance operation
configuration
Posted by em...@apache.org.
ARIA-149 Enhance operation configuration
* Parse special "dependencies" configuration parameters as YAML and
treat as Parameter models, allowing them full use of intrinsic
functions, type coersions, and validations
* Rename various functions that process "properties" to more generically
process "parameters" (properties, inputs, attributes, arguments, etc.)
* The "configuration" field in OperationTemplate and Operation models
is now now a dict of Parameter models
* Added "function" and "arguments" fields to Operation model to preserve
user data (in "implementation" and "inputs") and to clearly demarcate
orchestration data from user data
* Some cleanup of parser code touched by this commit
Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/8fe7f4b1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/8fe7f4b1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/8fe7f4b1
Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: 8fe7f4b1c89c44793a0db6cdb1e78c2722934334
Parents: 07d7951
Author: Tal Liron <ta...@gmail.com>
Authored: Thu Apr 20 17:54:47 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Tue May 30 13:03:04 2017 -0500
----------------------------------------------------------------------
aria/cli/commands/services.py | 2 +-
aria/core.py | 14 +-
aria/modeling/constraints.py | 28 +++
aria/modeling/contraints.py | 28 ---
aria/modeling/exceptions.py | 16 +-
aria/modeling/service_common.py | 4 +
aria/modeling/service_instance.py | 69 +++---
aria/modeling/service_template.py | 54 ++---
aria/modeling/utils.py | 105 ++++-----
.../execution_plugin/instantiation.py | 133 +++++++-----
aria/orchestrator/workflow_runner.py | 2 +-
aria/orchestrator/workflows/api/task.py | 4 +-
aria/orchestrator/workflows/core/task.py | 1 -
.../profiles/aria-1.0/aria-1.0.yaml | 16 +-
.../simple_v1_0/assignments.py | 4 +-
.../simple_v1_0/modeling/__init__.py | 65 ++++--
.../simple_v1_0/modeling/artifacts.py | 2 +-
.../simple_v1_0/modeling/capabilities.py | 24 ++-
.../simple_v1_0/modeling/constraints.py | 2 +-
.../simple_v1_0/modeling/data_types.py | 16 ++
.../simple_v1_0/modeling/interfaces.py | 34 ++-
.../simple_v1_0/modeling/parameters.py | 211 +++++++++++++++++++
.../simple_v1_0/modeling/policies.py | 2 +
.../simple_v1_0/modeling/properties.py | 202 ------------------
.../simple_v1_0/modeling/requirements.py | 20 +-
.../modeling/substitution_mappings.py | 4 +
.../simple_v1_0/templates.py | 13 +-
.../aria_extension_tosca/simple_v1_0/types.py | 24 +--
tests/cli/test_services.py | 14 +-
tests/mock/models.py | 10 +-
tests/mock/topology.py | 12 +-
tests/orchestrator/context/test_operation.py | 53 +++--
tests/orchestrator/context/test_serialize.py | 2 +-
tests/orchestrator/context/test_toolbelt.py | 14 +-
.../orchestrator/execution_plugin/test_local.py | 10 +-
tests/orchestrator/execution_plugin/test_ssh.py | 12 +-
tests/orchestrator/test_workflow_runner.py | 8 +-
tests/orchestrator/workflows/api/test_task.py | 26 +--
.../workflows/builtin/test_execute_operation.py | 2 +-
.../orchestrator/workflows/core/test_engine.py | 6 +-
.../orchestrator/workflows/core/test_events.py | 3 +-
tests/orchestrator/workflows/core/test_task.py | 6 +-
.../test_task_graph_into_execution_graph.py | 2 +-
...process_executor_concurrent_modifications.py | 10 +-
.../executor/test_process_executor_extension.py | 13 +-
.../test_process_executor_tracked_changes.py | 8 +-
.../node-cellar/node-cellar.yaml | 24 ++-
47 files changed, 737 insertions(+), 597 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 24de7c5..476387c 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -151,7 +151,7 @@ def create(service_template_name,
except storage_exceptions.StorageError as e:
utils.check_overriding_storage_exceptions(e, 'service', service_name)
raise
- except modeling_exceptions.InputsException:
+ except modeling_exceptions.ParameterException:
service_templates.print_service_template_inputs(model_storage, service_template_name,
logger)
raise
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/core.py
----------------------------------------------------------------------
diff --git a/aria/core.py b/aria/core.py
index cc943ef..f660167 100644
--- a/aria/core.py
+++ b/aria/core.py
@@ -56,7 +56,8 @@ class Core(object):
service_template = self.model_storage.service_template.get(service_template_id)
if service_template.services:
raise exceptions.DependentServicesError(
- "Can't delete service template {0} - Service template has existing services")
+ 'Can\'t delete service template `{0}` - service template has existing services'
+ .format(service_template.name))
self.model_storage.service_template.delete(service_template)
self.resource_storage.service_template.delete(entry_id=str(service_template.id))
@@ -87,7 +88,8 @@ class Core(object):
consumption.CoerceServiceInstanceValues
)).consume()
if context.validation.dump_issues():
- raise exceptions.InstantiationError('Failed to instantiate service template')
+ raise exceptions.InstantiationError('Failed to instantiate service template `{0}`'
+ .format(service_template.name))
storage_session.flush() # flushing so service.id would auto-populate
service.name = service_name or '{0}_{1}'.format(service_template.name, service.id)
@@ -100,15 +102,15 @@ class Core(object):
active_executions = [e for e in service.executions if e.is_active()]
if active_executions:
raise exceptions.DependentActiveExecutionsError(
- "Can't delete service {0} - there is an active execution for this service. "
- "Active execution id: {1}".format(service.name, active_executions[0].id))
+ 'Can\'t delete service `{0}` - there is an active execution for this service. '
+ 'Active execution ID: {1}'.format(service.name, active_executions[0].id))
if not force:
available_nodes = [str(n.id) for n in service.nodes.values() if n.is_available()]
if available_nodes:
raise exceptions.DependentAvailableNodesError(
- "Can't delete service {0} - there are available nodes for this service. "
- "Available node ids: {1}".format(service.name, ', '.join(available_nodes)))
+ 'Can\'t delete service `{0}` - there are available nodes for this service. '
+ 'Available node IDs: {1}'.format(service.name, ', '.join(available_nodes)))
self.model_storage.service.delete(service)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/constraints.py
----------------------------------------------------------------------
diff --git a/aria/modeling/constraints.py b/aria/modeling/constraints.py
new file mode 100644
index 0000000..107b010
--- /dev/null
+++ b/aria/modeling/constraints.py
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class NodeTemplateConstraint(object):
+ """
+ Used to constrain requirements for node templates.
+
+ Must be serializable.
+ """
+
+ def matches(self, source_node_template, target_node_template):
+ """
+ Returns true is the target matches the constraint for the source.
+ """
+ raise NotImplementedError
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/contraints.py
----------------------------------------------------------------------
diff --git a/aria/modeling/contraints.py b/aria/modeling/contraints.py
deleted file mode 100644
index 107b010..0000000
--- a/aria/modeling/contraints.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class NodeTemplateConstraint(object):
- """
- Used to constrain requirements for node templates.
-
- Must be serializable.
- """
-
- def matches(self, source_node_template, target_node_template):
- """
- Returns true is the target matches the constraint for the source.
- """
- raise NotImplementedError
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index 19fd942..d0e3e22 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -22,9 +22,9 @@ class ModelingException(AriaException):
"""
-class InputsException(ModelingException):
+class ParameterException(ModelingException):
"""
- ARIA inputs exception.
+ ARIA parameter exception.
"""
pass
@@ -41,19 +41,19 @@ class CannotEvaluateFunctionException(ModelingException):
"""
-class MissingRequiredInputsException(InputsException):
+class MissingRequiredParametersException(ParameterException):
"""
- ARIA modeling exception: Required inputs have been omitted.
+ ARIA modeling exception: Required parameters have been omitted.
"""
-class InputsOfWrongTypeException(InputsException):
+class ParametersOfWrongTypeException(ParameterException):
"""
- ARIA modeling exception: Inputs of the wrong types have been provided.
+ ARIA modeling exception: Parameters of the wrong types have been provided.
"""
-class UndeclaredInputsException(InputsException):
+class UndeclaredParametersException(ParameterException):
"""
- ARIA modeling exception: Undeclared inputs have been provided.
+ ARIA modeling exception: Undeclared parameters have been provided.
"""
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index ef19c8e..3bae490 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -211,6 +211,10 @@ class ParameterBase(TemplateModelMixin, caching.HasCachedMethods):
"""
Wraps an arbitrary value as a parameter. The type will be guessed via introspection.
+ For primitive types, we will prefer their TOSCA aliases. See the `TOSCA Simple Profile v1.0
+ cos01 specification <http://docs.oasis-open.org/tosca/TOSCA-Simple-Profile-YAML/v1.0/cos01
+ /TOSCA-Simple-Profile-YAML-v1.0-cos01.html#_Toc373867862>`__
+
:param name: Parameter name
:type name: basestring
:param value: Parameter value
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 7058969..31f7212 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -1631,20 +1631,24 @@ class OperationBase(InstanceModelMixin):
:vartype operation_template: :class:`OperationTemplate`
:ivar description: Human-readable description
:vartype description: string
- :ivar plugin: Associated plugin
- :vartype plugin: :class:`Plugin`
:ivar relationship_edge: When true specified that the operation is on the relationship's
target edge instead of its source (only used by relationship
operations)
:vartype relationship_edge: bool
:ivar implementation: Implementation (interpreted by the plugin)
:vartype implementation: basestring
- :ivar configuration: Configuration (interpreted by the plugin)
- :vartype configuration: {basestring, object}
:ivar dependencies: Dependency strings (interpreted by the plugin)
:vartype dependencies: [basestring]
:ivar inputs: Parameters that can be used by this operation
:vartype inputs: {basestring: :class:`Parameter`}
+ :ivar plugin: Associated plugin
+ :vartype plugin: :class:`Plugin`
+ :ivar configuration: Configuration (interpreted by the plugin)
+ :vartype configuration: {basestring, :class:`Parameter`}
+ :ivar function: Name of the operation function
+ :vartype function: basestring
+ :ivar arguments: Arguments to send to the operation function
+ :vartype arguments: {basestring: :class:`Parameter`}
:ivar executor: Name of executor to run the operation with
:vartype executor: basestring
:ivar max_attempts: Maximum number of attempts allowed in case of failure
@@ -1726,34 +1730,41 @@ class OperationBase(InstanceModelMixin):
def inputs(cls):
return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+ @declared_attr
+ def configuration(cls):
+ return relationship.many_to_many(cls, 'parameter', prefix='configuration', dict_key='name')
+
+ @declared_attr
+ def arguments(cls):
+ return relationship.many_to_many(cls, 'parameter', prefix='arguments', dict_key='name')
+
# endregion
description = Column(Text)
relationship_edge = Column(Boolean)
implementation = Column(Text)
- configuration = Column(modeling_types.StrictDict(key_cls=basestring))
dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+ function = Column(Text)
executor = Column(Text)
max_attempts = Column(Integer)
retry_interval = Column(Integer)
def configure(self):
- from . import models
- # Note: for workflows (operations attached directly to the service) "interface" will be None
- if (self.implementation is None) or (self.interface is None):
+ if (self.implementation is None) and (self.function is None):
return
- if self.plugin is None:
- arguments = execution_plugin.instantiation.configure_operation(self)
+ if (self.plugin is None) and (self.interface is not None):
+ # Default to execution plugin ("interface" is None for workflow operations)
+ execution_plugin.instantiation.configure_operation(self)
else:
# In the future plugins may be able to add their own "configure_operation" hook that
- # can validate the configuration and otherwise return specially derived arguments
- arguments = self.configuration
+ # can validate the configuration and otherwise create specially derived arguments. For
+ # now, we just send all configuration parameters as arguments
+ utils.instantiate_dict(self, self.arguments, self.configuration)
- # Note: the arguments will *override* operation inputs of the same name
- if arguments:
- for k, v in arguments.iteritems():
- self.inputs[k] = models.Parameter.wrap(k, v)
+ # Send all inputs as extra arguments. Note that they will override existing arguments of the
+ # same names.
+ utils.instantiate_dict(self, self.arguments, self.inputs)
@property
def as_raw(self):
@@ -1762,17 +1773,18 @@ class OperationBase(InstanceModelMixin):
('description', self.description),
('implementation', self.implementation),
('dependencies', self.dependencies),
- ('executor', self.executor),
- ('max_attempts', self.max_attempts),
- ('retry_interval', self.retry_interval),
('inputs', formatting.as_raw_dict(self.inputs))))
def validate(self):
- # TODO must be associated with interface or service
+ # TODO must be associated with either interface or service
utils.validate_dict_values(self.inputs)
+ utils.validate_dict_values(self.configuration)
+ utils.validate_dict_values(self.arguments)
def coerce_values(self, report_issues):
utils.coerce_dict_values(self.inputs, report_issues)
+ utils.coerce_dict_values(self.configuration, report_issues)
+ utils.coerce_dict_values(self.arguments, report_issues)
def dump(self):
context = ConsumptionContext.get_thread_local()
@@ -1780,21 +1792,14 @@ class OperationBase(InstanceModelMixin):
if self.description:
console.puts(context.style.meta(self.description))
with context.style.indent:
- if self.plugin is not None:
- console.puts('Plugin: {0}'.format(
- context.style.literal(self.plugin.name)))
if self.implementation is not None:
console.puts('Implementation: {0}'.format(
context.style.literal(self.implementation)))
- if self.configuration:
- with context.style.indent:
- for k, v in self.configuration.iteritems():
- console.puts('{0}: {1}'.format(context.style.property(k),
- context.style.literal(v)))
if self.dependencies:
console.puts(
'Dependencies: {0}'.format(
', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+ utils.dump_dict_values(self.inputs, 'Inputs')
if self.executor is not None:
console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
if self.max_attempts is not None:
@@ -1802,7 +1807,13 @@ class OperationBase(InstanceModelMixin):
if self.retry_interval is not None:
console.puts('Retry interval: {0}'.format(
context.style.literal(self.retry_interval)))
- utils.dump_dict_values(self.inputs, 'Inputs')
+ if self.plugin is not None:
+ console.puts('Plugin: {0}'.format(
+ context.style.literal(self.plugin.name)))
+ utils.dump_dict_values(self.configuration, 'Configuration')
+ if self.function is not None:
+ console.puts('Function: {0}'.format(context.style.literal(self.function)))
+ utils.dump_dict_values(self.arguments, 'Arguments')
class ArtifactBase(InstanceModelMixin):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 3110248..b4a54ca 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,7 @@ class ServiceTemplateBase(TemplateModelMixin):
service_template=self)
context.modeling.instance = service
- service.inputs = utils.create_inputs(inputs or {}, self.inputs)
+ service.inputs = utils.create_parameters(inputs or {}, self.inputs)
# TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
for plugin_specification in self.plugin_specifications.itervalues():
@@ -1762,20 +1762,22 @@ class OperationTemplateBase(TemplateModelMixin):
:vartype name: basestring
:ivar description: Human-readable description
:vartype description: basestring
- :ivar plugin_specification: Associated plugin
- :vartype plugin_specification: :class:`PluginSpecification`
:ivar relationship_edge: When true specified that the operation is on the relationship's
target edge instead of its source (only used by relationship
operations)
:vartype relationship_edge: bool
:ivar implementation: Implementation (interpreted by the plugin)
:vartype implementation: basestring
- :ivar configuration: Configuration (interpreted by the plugin)
- :vartype configuration: {basestring, object}
:ivar dependencies: Dependency strings (interpreted by the plugin)
:vartype dependencies: [basestring]
:ivar inputs: Parameters that can be used by this operation
:vartype inputs: {basestring: :class:`Parameter`}
+ :ivar plugin_specification: Associated plugin
+ :vartype plugin_specification: :class:`PluginSpecification`
+ :ivar configuration: Configuration (interpreted by the plugin)
+ :vartype configuration: {basestring, :class:`Parameter`}
+ :ivar function: Name of the operation function
+ :vartype function: basestring
:ivar executor: Name of executor to run the operation with
:vartype executor: basestring
:ivar max_attempts: Maximum number of attempts allowed in case of failure
@@ -1855,13 +1857,17 @@ class OperationTemplateBase(TemplateModelMixin):
def inputs(cls):
return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+ @declared_attr
+ def configuration(cls):
+ return relationship.many_to_many(cls, 'parameter', prefix='configuration', dict_key='name')
+
# endregion
description = Column(Text)
relationship_edge = Column(Boolean)
implementation = Column(Text)
- configuration = Column(modeling_types.StrictDict(key_cls=basestring))
dependencies = Column(modeling_types.StrictList(item_cls=basestring))
+ function = Column(Text)
executor = Column(Text)
max_attempts = Column(Integer)
retry_interval = Column(Integer)
@@ -1873,9 +1879,6 @@ class OperationTemplateBase(TemplateModelMixin):
('description', self.description),
('implementation', self.implementation),
('dependencies', self.dependencies),
- ('executor', self.executor),
- ('max_attempts', self.max_attempts),
- ('retry_interval', self.retry_interval),
('inputs', formatting.as_raw_dict(self.inputs))))
def instantiate(self, container):
@@ -1883,38 +1886,41 @@ class OperationTemplateBase(TemplateModelMixin):
if self.plugin_specification:
if self.plugin_specification.enabled:
plugin = self.plugin_specification.plugin
- implementation = self.implementation if plugin is not None else None
+ function = self.function if plugin is not None else None
# "plugin" would be none if a match was not found. In that case, a validation error
# should already have been reported in ServiceTemplateBase.instantiate, so we will
# continue silently here
else:
# If the plugin is disabled, the operation should be disabled, too
plugin = None
- implementation = None
+ function = None
else:
- # Using the execution plugin
+ # Using the default execution plugin (plugin=None)
plugin = None
- implementation = self.implementation
+ function = self.function
operation = models.Operation(name=self.name,
description=deepcopy_with_locators(self.description),
relationship_edge=self.relationship_edge,
- plugin=plugin,
- implementation=implementation,
- configuration=self.configuration,
+ implementation=self.implementation,
dependencies=self.dependencies,
executor=self.executor,
+ plugin=plugin,
+ function=function,
max_attempts=self.max_attempts,
retry_interval=self.retry_interval,
operation_template=self)
utils.instantiate_dict(container, operation.inputs, self.inputs)
+ utils.instantiate_dict(container, operation.configuration, self.configuration)
return operation
def validate(self):
utils.validate_dict_values(self.inputs)
+ utils.validate_dict_values(self.configuration)
def coerce_values(self, report_issues):
utils.coerce_dict_values(self.inputs, report_issues)
+ utils.coerce_dict_values(self.configuration, report_issues)
def dump(self):
context = ConsumptionContext.get_thread_local()
@@ -1922,20 +1928,13 @@ class OperationTemplateBase(TemplateModelMixin):
if self.description:
console.puts(context.style.meta(self.description))
with context.style.indent:
- if self.plugin_specification is not None:
- console.puts('Plugin specification: {0}'.format(
- context.style.literal(self.plugin_specification.name)))
if self.implementation is not None:
console.puts('Implementation: {0}'.format(
context.style.literal(self.implementation)))
- if self.configuration:
- with context.style.indent:
- for k, v in self.configuration.iteritems():
- console.puts('{0}: {1}'.format(context.style.property(k),
- context.style.literal(v)))
if self.dependencies:
console.puts('Dependencies: {0}'.format(
', '.join((str(context.style.literal(v)) for v in self.dependencies))))
+ utils.dump_dict_values(self.inputs, 'Inputs')
if self.executor is not None:
console.puts('Executor: {0}'.format(context.style.literal(self.executor)))
if self.max_attempts is not None:
@@ -1943,7 +1942,12 @@ class OperationTemplateBase(TemplateModelMixin):
if self.retry_interval is not None:
console.puts('Retry interval: {0}'.format(
context.style.literal(self.retry_interval)))
- utils.dump_dict_values(self.inputs, 'Inputs')
+ if self.plugin_specification is not None:
+ console.puts('Plugin specification: {0}'.format(
+ context.style.literal(self.plugin_specification.name)))
+ utils.dump_dict_values(self.configuration, 'Configuration')
+ if self.function is not None:
+ console.puts('Function: {0}'.format(context.style.literal(self.function)))
class ArtifactTemplateBase(TemplateModelMixin):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 0404fe4..6f4022c 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -21,6 +21,7 @@ from . import exceptions
from ..parser.consumption import ConsumptionContext
from ..utils.console import puts
from ..utils.type import validate_value_type
+from ..utils.collections import OrderedDict
class ModelJSONEncoder(JSONEncoder):
@@ -39,7 +40,7 @@ class ModelJSONEncoder(JSONEncoder):
class NodeTemplateContainerHolder(object):
"""
Wrapper that allows using a :class:`aria.modeling.models.NodeTemplate` model directly as the
- ``container_holder`` argument for :func:`aria.modeling.functions.evaluate`.
+ ``container_holder`` input for :func:`aria.modeling.functions.evaluate`.
"""
def __init__(self, node_template):
@@ -51,74 +52,84 @@ class NodeTemplateContainerHolder(object):
return self.container.service_template
-def create_inputs(inputs, template_inputs):
+def create_parameters(parameters, declared_parameters):
"""
- :param inputs: key-value dict
- :param template_inputs: parameter name to parameter object dict
- :return: dict of parameter name to Parameter models
+ Validates, merges, and wraps parameter values according to those declared by a type.
+
+ Exceptions will be raised for validation errors:
+
+ * :class:`aria.modeling.exceptions.UndeclaredParametersException` if a key in ``parameters``
+ does not exist in ``declared_parameters``
+ * :class:`aria.modeling.exceptions.MissingRequiredParametersException` if a key in
+ ``declared_parameters`` does not exist in ``parameters`` and also has no default value
+ * :class:`aria.modeling.exceptions.ParametersOfWrongTypeException` if a value in ``parameters``
+ does not match its type in ``declared_parameters``
+
+ :param parameters: Provided parameter values
+ :type parameters: {basestring, object}
+ :param declared_parameters: Declared parameters
+ :type declared_parameters: {basestring, :class:`aria.modeling.models.Parameter`}
+ :return: The merged parameters
+ :rtype: {basestring, :class:`aria.modeling.models.Parameter`}
"""
- merged_inputs = _merge_and_validate_inputs(inputs, template_inputs)
+
+ merged_parameters = _merge_and_validate_parameters(parameters, declared_parameters)
from . import models
- input_models = []
- for input_name, input_val in merged_inputs.iteritems():
+ parameters_models = OrderedDict()
+ for parameter_name, parameter_value in merged_parameters.iteritems():
parameter = models.Parameter( # pylint: disable=unexpected-keyword-arg
- name=input_name,
- type_name=template_inputs[input_name].type_name,
- description=template_inputs[input_name].description,
- value=input_val)
- input_models.append(parameter)
+ name=parameter_name,
+ type_name=declared_parameters[parameter_name].type_name,
+ description=declared_parameters[parameter_name].description,
+ value=parameter_value)
+ parameters_models[parameter.name] = parameter
- return dict((inp.name, inp) for inp in input_models)
+ return parameters_models
-def _merge_and_validate_inputs(inputs, template_inputs):
- """
- :param inputs: key-value dict
- :param template_inputs: parameter name to parameter object dict
- :return:
- """
- merged_inputs = inputs.copy()
-
- missing_inputs = []
- wrong_type_inputs = {}
- for input_name, input_template in template_inputs.iteritems():
- if input_name not in inputs:
- if input_template.value is not None:
- merged_inputs[input_name] = input_template.value # apply default value
+def _merge_and_validate_parameters(parameters, declared_parameters):
+ merged_parameters = OrderedDict(parameters)
+
+ missing_parameters = []
+ wrong_type_parameters = OrderedDict()
+ for parameter_name, declared_parameter in declared_parameters.iteritems():
+ if parameter_name not in parameters:
+ if declared_parameter.value is not None:
+ merged_parameters[parameter_name] = declared_parameter.value # apply default value
else:
- missing_inputs.append(input_name)
+ missing_parameters.append(parameter_name)
else:
- # Validate input type
+ # Validate parameter type
try:
- validate_value_type(inputs[input_name], input_template.type_name)
+ validate_value_type(parameters[parameter_name], declared_parameter.type_name)
except ValueError:
- wrong_type_inputs[input_name] = input_template.type_name
+ wrong_type_parameters[parameter_name] = declared_parameter.type_name
except RuntimeError:
# TODO: This error shouldn't be raised (or caught), but right now we lack support
# for custom data_types, which will raise this error. Skipping their validation.
pass
- if missing_inputs:
- raise exceptions.MissingRequiredInputsException(
- 'Required inputs {0} have not been specified - expected inputs: {1}'
- .format(missing_inputs, template_inputs.keys()))
+ if missing_parameters:
+ raise exceptions.MissingRequiredParametersException(
+ 'Required parameters {0} have not been specified; Expected parameters: {1}'
+ .format(missing_parameters, declared_parameters.keys()))
- if wrong_type_inputs:
+ if wrong_type_parameters:
error_message = StringIO()
- for param_name, param_type in wrong_type_inputs.iteritems():
- error_message.write('Input "{0}" must be of type {1}{2}'
+ for param_name, param_type in wrong_type_parameters.iteritems():
+ error_message.write('Parameter "{0}" must be of type {1}{2}'
.format(param_name, param_type, os.linesep))
- raise exceptions.InputsOfWrongTypeException(error_message.getvalue())
+ raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
- undeclared_inputs = [input_name for input_name in inputs.keys()
- if input_name not in template_inputs]
- if undeclared_inputs:
- raise exceptions.UndeclaredInputsException(
- 'Undeclared inputs have been specified: {0}; Expected inputs: {1}'
- .format(undeclared_inputs, template_inputs.keys()))
+ undeclared_parameters = [parameter_name for parameter_name in parameters.keys()
+ if parameter_name not in declared_parameters]
+ if undeclared_parameters:
+ raise exceptions.UndeclaredParametersException(
+ 'Undeclared parameters have been specified: {0}; Expected parameters: {1}'
+ .format(undeclared_parameters, declared_parameters.keys()))
- return merged_inputs
+ return merged_parameters
def coerce_dict_values(the_dict, report_issues=False):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
index c09434e..26c3913 100644
--- a/aria/orchestrator/execution_plugin/instantiation.py
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -16,19 +16,13 @@
# TODO: this module will eventually be moved to a new "aria.instantiation" package
from ...utils.type import full_type_name
-from ...utils.collections import OrderedDict
+from ...utils.formatting import safe_repr
from ...parser import validation
from ...parser.consumption import ConsumptionContext
+from ...modeling.functions import Function
def configure_operation(operation):
- configuration = OrderedDict(operation.configuration) if operation.configuration else {}
-
- arguments = OrderedDict()
- arguments['script_path'] = operation.implementation
- arguments['process'] = _get_process(configuration.pop('process')) \
- if 'process' in configuration else dict()
-
host = None
interface = operation.interface
if interface.node is not None:
@@ -36,87 +30,118 @@ def configure_operation(operation):
elif interface.relationship is not None:
if operation.relationship_edge is True:
host = interface.relationship.target_node.host
- else: # either False or None
+ else: # either False or None (None meaning that edge was not specified)
host = interface.relationship.source_node.host
+ _configure_common(operation)
if host is None:
_configure_local(operation)
else:
- _configure_remote(operation, configuration, arguments)
+ _configure_remote(operation)
+
+ # Any remaining un-handled configuration parameters will become extra arguments, available as
+ # kwargs in either "run_script_locally" or "run_script_with_ssh"
+ for key, value in operation.configuration.iteritems():
+ if key not in ('process', 'ssh'):
+ operation.arguments[key] = value.instantiate()
- # Any remaining unhandled configuration values will become extra arguments, available as kwargs
- # in either "run_script_locally" or "run_script_with_ssh"
- arguments.update(configuration)
- return arguments
+def _configure_common(operation):
+ """
+ Local and remote operations.
+ """
+
+ from ...modeling.models import Parameter
+ operation.arguments['script_path'] = Parameter.wrap('script_path', operation.implementation,
+ 'Relative path to the executable file.')
+ operation.arguments['process'] = Parameter.wrap('process', _get_process(operation),
+ 'Sub-process configuration.')
+
def _configure_local(operation):
"""
Local operation.
"""
+
from . import operations
- operation.implementation = '{0}.{1}'.format(operations.__name__,
- operations.run_script_locally.__name__)
+ operation.function = '{0}.{1}'.format(operations.__name__,
+ operations.run_script_locally.__name__)
-def _configure_remote(operation, configuration, arguments):
+def _configure_remote(operation):
"""
Remote SSH operation via Fabric.
"""
+
+ from ...modeling.models import Parameter
+ from . import operations
+
+ ssh = _get_ssh(operation)
+
+ # Defaults
# TODO: find a way to configure these generally in the service template
default_user = ''
default_password = ''
-
- ssh = _get_ssh(configuration.pop('ssh')) if 'ssh' in configuration else {}
if 'user' not in ssh:
ssh['user'] = default_user
if ('password' not in ssh) and ('key' not in ssh) and ('key_filename' not in ssh):
ssh['password'] = default_password
- arguments['use_sudo'] = ssh.get('use_sudo', False)
- arguments['hide_output'] = ssh.get('hide_output', [])
- arguments['fabric_env'] = {}
+ operation.arguments['use_sudo'] = Parameter.wrap('use_sudo', ssh.get('use_sudo', False),
+ 'Whether to execute with sudo.')
+
+ operation.arguments['hide_output'] = Parameter.wrap('hide_output', ssh.get('hide_output', []),
+ 'Hide output of these Fabric groups.')
+
+ fabric_env = {}
if 'warn_only' in ssh:
- arguments['fabric_env']['warn_only'] = ssh['warn_only']
- arguments['fabric_env']['user'] = ssh.get('user')
- arguments['fabric_env']['password'] = ssh.get('password')
- arguments['fabric_env']['key'] = ssh.get('key')
- arguments['fabric_env']['key_filename'] = ssh.get('key_filename')
+ fabric_env['warn_only'] = ssh['warn_only']
+ fabric_env['user'] = ssh.get('user')
+ fabric_env['password'] = ssh.get('password')
+ fabric_env['key'] = ssh.get('key')
+ fabric_env['key_filename'] = ssh.get('key_filename')
if 'address' in ssh:
- arguments['fabric_env']['host_string'] = ssh['address']
+ fabric_env['host_string'] = ssh['address']
- if arguments['fabric_env'].get('user') is None:
+ # Make sure we have a user
+ if fabric_env.get('user') is None:
context = ConsumptionContext.get_thread_local()
context.validation.report('must configure "ssh.user" for "{0}"'
.format(operation.implementation),
level=validation.Issue.BETWEEN_TYPES)
- if (arguments['fabric_env'].get('password') is None) and \
- (arguments['fabric_env'].get('key') is None) and \
- (arguments['fabric_env'].get('key_filename') is None):
+
+ # Make sure we have an authentication value
+ if (fabric_env.get('password') is None) and \
+ (fabric_env.get('key') is None) and \
+ (fabric_env.get('key_filename') is None):
context = ConsumptionContext.get_thread_local()
context.validation.report('must configure "ssh.password", "ssh.key", or "ssh.key_filename" '
'for "{0}"'
.format(operation.implementation),
level=validation.Issue.BETWEEN_TYPES)
- from . import operations
- operation.implementation = '{0}.{1}'.format(operations.__name__,
- operations.run_script_with_ssh.__name__)
+ operation.arguments['fabric_env'] = Parameter.wrap('fabric_env', fabric_env,
+ 'Fabric configuration.')
+ operation.function = '{0}.{1}'.format(operations.__name__,
+ operations.run_script_with_ssh.__name__)
-def _get_process(value):
+
+def _get_process(operation):
+ value = operation.configuration.get('process')._value \
+ if 'process' in operation.configuration else None
if value is None:
- return None
+ return {}
_validate_type(value, dict, 'process')
for k, v in value.iteritems():
if k == 'eval_python':
- value[k] = _str_to_bool(v, 'process.eval_python')
+ value[k] = _coerce_bool(v, 'process.eval_python')
elif k == 'cwd':
_validate_type(v, basestring, 'process.cwd')
elif k == 'command_prefix':
_validate_type(v, basestring, 'process.command_prefix')
elif k == 'args':
- value[k] = _dict_to_list(v, 'process.args')
+ value[k] = _dict_to_list_of_strings(v, 'process.args')
elif k == 'env':
_validate_type(v, dict, 'process.env')
else:
@@ -126,17 +151,19 @@ def _get_process(value):
return value
-def _get_ssh(value):
+def _get_ssh(operation):
+ value = operation.configuration.get('ssh')._value \
+ if 'process' in operation.configuration else None
if value is None:
return {}
_validate_type(value, dict, 'ssh')
for k, v in value.iteritems():
if k == 'use_sudo':
- value[k] = _str_to_bool(v, 'ssh.use_sudo')
+ value[k] = _coerce_bool(v, 'ssh.use_sudo')
elif k == 'hide_output':
- value[k] = _dict_to_list(v, 'ssh.hide_output')
+ value[k] = _dict_to_list_of_strings(v, 'ssh.hide_output')
elif k == 'warn_only':
- value[k] = _str_to_bool(v, 'ssh.warn_only')
+ value[k] = _coerce_bool(v, 'ssh.warn_only')
elif k == 'user':
_validate_type(v, basestring, 'ssh.user')
elif k == 'password':
@@ -155,16 +182,20 @@ def _get_ssh(value):
def _validate_type(value, the_type, name):
+ if isinstance(value, Function):
+ return
if not isinstance(value, the_type):
context = ConsumptionContext.get_thread_local()
- context.validation.report('"{0}" configuration is not a {1}'
- .format(name, full_type_name(the_type)),
+ context.validation.report('"{0}" configuration is not a {1}: {2}'
+ .format(name, full_type_name(the_type), safe_repr(value)),
level=validation.Issue.BETWEEN_TYPES)
-def _str_to_bool(value, name):
+def _coerce_bool(value, name):
if value is None:
return None
+ if isinstance(value, bool):
+ return value
_validate_type(value, basestring, name)
if value == 'true':
return True
@@ -173,19 +204,15 @@ def _str_to_bool(value, name):
else:
context = ConsumptionContext.get_thread_local()
context.validation.report('"{0}" configuration is not "true" or "false": {1}'
- .format(name, repr(value)),
+ .format(name, safe_repr(value)),
level=validation.Issue.BETWEEN_TYPES)
-def _dict_to_list(the_dict, name):
+def _dict_to_list_of_strings(the_dict, name):
_validate_type(the_dict, dict, name)
value = []
for k in sorted(the_dict):
v = the_dict[k]
- if not isinstance(v, basestring):
- context = ConsumptionContext.get_thread_local()
- context.validation.report('"{0}.{1}" configuration is not a string: {2}'
- .format(name, k, repr(v)),
- level=validation.Issue.BETWEEN_TYPES)
+ _validate_type(v, basestring, '{0}.{1}'.format(name, k))
value.append(v)
return value
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 8f25cce..0c6321f 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -119,7 +119,7 @@ class WorkflowRunner(object):
else:
workflow_inputs = self.service.workflows[self._workflow_name].inputs
- execution.inputs = modeling_utils.create_inputs(inputs, workflow_inputs)
+ execution.inputs = modeling_utils.create_parameters(inputs, workflow_inputs)
# TODO: these two following calls should execute atomically
self._validate_no_active_executions(execution)
self._model_storage.execution.put(execution)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index cb79eb3..aa6ac45 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -99,8 +99,8 @@ class OperationTask(BaseTask):
operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
self.plugin = operation.plugin
- self.inputs = modeling_utils.create_inputs(inputs or {}, operation.inputs)
- self.implementation = operation.implementation
+ self.inputs = modeling_utils.create_parameters(inputs or {}, operation.arguments)
+ self.implementation = operation.function
def __repr__(self):
return self.name
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index b3dfb3c..0d6eb11 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -148,7 +148,6 @@ class OperationTask(BaseTask):
plugin=api_task.plugin,
implementation=api_task.implementation,
inputs=api_task.inputs
-
)
self._workflow_context.model.task.put(task_model)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
index 0c5e77f..abac03b 100644
--- a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
@@ -52,18 +52,10 @@ policy_types:
should be inherited and extended with additional properties.
derived_from: tosca.policies.Root
properties:
- implementation:
+ function:
description: >-
- The interpretation of the implementation string depends on the orchestrator. In ARIA it is
- the full path to a Python @workflow function that generates a task graph based on the
- service topology.
+ The interpretation of the function string depends on the orchestrator. In ARIA it is the
+ full path to a Python @workflow function that generates a task graph based on the service
+ topology.
type: string
required: true
- dependencies:
- description: >-
- The optional ordered list of one or more dependent or secondary implementation artifact
- name which are referenced by the primary implementation artifact (e.g., a library the
- script installs or a secondary script).
- type: list
- entry_schema: string
- required: false
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/assignments.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/assignments.py b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
index d929ce0..79f6377 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/assignments.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/assignments.py
@@ -23,7 +23,7 @@ from aria.parser.presentation import (AsIsPresentation, has_fields, allow_unknow
from .filters import NodeFilter
from .misc import Description, OperationImplementation
-from .modeling.properties import get_assigned_and_defined_property_values
+from .modeling.parameters import get_assigned_and_defined_parameter_values
from .presentation.extensible import ExtensiblePresentation
from .presentation.field_validators import (node_template_or_type_validator,
relationship_template_or_type_validator,
@@ -428,7 +428,7 @@ class ArtifactAssignment(ExtensiblePresentation):
@cachedmethod
def _get_property_values(self, context):
- return FrozenDict(get_assigned_and_defined_property_values(context, self))
+ return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
@cachedmethod
def _validate(self, context):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 99389e4..0b04fdc 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -26,14 +26,17 @@ import re
from types import FunctionType
from datetime import datetime
+from ruamel import yaml
+
from aria.parser.validation import Issue
-from aria.utils.collections import StrictDict
+from aria.utils.collections import (StrictDict, OrderedDict)
from aria.modeling.models import (Type, ServiceTemplate, NodeTemplate,
RequirementTemplate, RelationshipTemplate, CapabilityTemplate,
GroupTemplate, PolicyTemplate, SubstitutionTemplate,
SubstitutionTemplateMapping, InterfaceTemplate, OperationTemplate,
ArtifactTemplate, Metadata, Parameter, PluginSpecification)
+from .parameters import coerce_parameter_value
from .constraints import (Equal, GreaterThan, GreaterOrEqual, LessThan, LessOrEqual, InRange,
ValidValues, Length, MinLength, MaxLength, Pattern)
from ..data_types import coerce_value
@@ -375,7 +378,7 @@ def create_operation_template_model(context, service_template, operation):
implementation = operation.implementation
if implementation is not None:
primary = implementation.primary
- parse_implementation_string(context, service_template, operation, model, primary)
+ set_implementation(context, service_template, operation, model, primary)
relationship_edge = operation._get_extensions(context).get('relationship_edge')
if relationship_edge is not None:
if relationship_edge == 'source':
@@ -384,18 +387,37 @@ def create_operation_template_model(context, service_template, operation):
model.relationship_edge = True
dependencies = implementation.dependencies
+ configuration = OrderedDict()
if dependencies:
for dependency in dependencies:
key, value = split_prefix(dependency)
if key is not None:
- if model.configuration is None:
- model.configuration = {}
- set_nested(model.configuration, key.split('.'), value)
+ # Parse as YAML
+ try:
+ value = yaml.load(value)
+ except yaml.parser.MarkedYAMLError as e:
+ context.validation.report(
+ 'YAML parser {0} in operation configuration: {1}'
+ .format(e.problem, value),
+ locator=implementation._locator,
+ level=Issue.FIELD)
+ continue
+
+ # Coerce to intrinsic functions, if there are any
+ value = coerce_parameter_value(context, implementation, None, value).value
+
+ # Support dot-notation nesting
+ set_nested(configuration, key.split('.'), value)
else:
if model.dependencies is None:
model.dependencies = []
model.dependencies.append(dependency)
+ # Convert configuration to Parameter models
+ for key, value in configuration.iteritems():
+ model.configuration[key] = Parameter.wrap(key, value,
+ description='Operation configuration.')
+
inputs = operation.inputs
if inputs:
for input_name, the_input in inputs.iteritems():
@@ -490,15 +512,15 @@ def create_workflow_operation_template_model(context, service_template, policy):
properties = policy._get_property_values(context)
for prop_name, prop in properties.iteritems():
- if prop_name == 'implementation':
- parse_implementation_string(context, service_template, policy, model, prop.value)
+ if prop_name == 'function':
+ model.function = prop.value
elif prop_name == 'dependencies':
model.dependencies = prop.value
else:
- model.inputs[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
- type_name=prop.type,
- value=prop.value,
- description=prop.description)
+ model.configuration[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
+ type_name=prop.type,
+ value=prop.value,
+ description=prop.description)
return model
@@ -639,12 +661,12 @@ def create_constraint(context, node_filter, constraint_clause, property_name, ca
def split_prefix(string):
"""
- Splits the prefix on the first unescaped ">".
+ Splits the prefix on the first non-escaped ">".
"""
- split = IMPLEMENTATION_PREFIX_REGEX.split(string, 2)
+ split = IMPLEMENTATION_PREFIX_REGEX.split(string, 1)
if len(split) < 2:
- return None, string
+ return None, None
return split[0].strip(), split[1].lstrip()
@@ -671,13 +693,18 @@ def set_nested(the_dict, keys, value):
set_nested(the_dict[key], keys, value)
-def parse_implementation_string(context, service_template, presentation, model, implementation):
- plugin_name, model.implementation = split_prefix(implementation)
- if plugin_name is not None:
- model.plugin_specification = service_template.plugin_specifications.get(plugin_name)
+def set_implementation(context, service_template, presentation, model, primary):
+ prefix, postfix = split_prefix(primary)
+ if prefix:
+ # Special ARIA prefix
+ model.plugin_specification = service_template.plugin_specifications.get(prefix)
+ model.function = postfix
if model.plugin_specification is None:
context.validation.report(
'no policy for plugin "{0}" specified in operation implementation: {1}'
- .format(plugin_name, implementation),
+ .format(prefix, primary),
locator=presentation._get_child_locator('properties', 'implementation'),
level=Issue.BETWEEN_TYPES)
+ else:
+ # Standard TOSCA artifact
+ model.implementation = primary
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
index 4f61ef5..dd9eeb4 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/artifacts.py
@@ -15,11 +15,11 @@
from aria.utils.collections import OrderedDict
+
#
# NodeType, NodeTemplate
#
-
def get_inherited_artifact_definitions(context, presentation, for_presentation=None):
if hasattr(presentation, '_get_type'):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
index 6df7177..a90a9fc 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/capabilities.py
@@ -16,8 +16,9 @@
from aria.utils.collections import deepcopy_with_locators, OrderedDict
from aria.parser.validation import Issue
-from .properties import (convert_property_definitions_to_values, merge_raw_property_definitions,
- get_assigned_and_defined_property_values)
+from .parameters import (convert_parameter_definitions_to_values, merge_raw_parameter_definitions,
+ get_assigned_and_defined_parameter_values)
+
#
# CapabilityType
@@ -38,6 +39,7 @@ def get_inherited_valid_source_types(context, presentation):
return valid_source_types
+
#
# NodeType
#
@@ -92,6 +94,7 @@ def get_inherited_capability_definitions(context, presentation, for_presentation
return capability_definitions
+
#
# NodeTemplate
#
@@ -127,8 +130,9 @@ def get_template_capabilities(context, presentation):
capability_assignment = capability_assignments[capability_name]
# Assign properties
- values = get_assigned_and_defined_property_values(context,
- our_capability_assignment)
+ values = get_assigned_and_defined_parameter_values(context,
+ our_capability_assignment,
+ 'property')
if values:
capability_assignment._raw['properties'] = values
else:
@@ -139,6 +143,7 @@ def get_template_capabilities(context, presentation):
return capability_assignments
+
#
# Utils
#
@@ -150,24 +155,25 @@ def convert_capability_from_definition_to_assignment(context, presentation, cont
properties = presentation.properties
if properties is not None:
- raw['properties'] = convert_property_definitions_to_values(context, properties)
+ raw['properties'] = convert_parameter_definitions_to_values(context, properties)
# TODO attributes
return CapabilityAssignment(name=presentation._name, raw=raw, container=container)
+
def merge_capability_definition_from_type(context, presentation, capability_definition):
raw_properties = OrderedDict()
# Merge properties from type
the_type = capability_definition._get_type(context)
type_property_defintions = the_type._get_properties(context)
- merge_raw_property_definitions(context, presentation, raw_properties, type_property_defintions,
- 'properties')
+ merge_raw_parameter_definitions(context, presentation, raw_properties, type_property_defintions,
+ 'properties')
# Merge our properties
- merge_raw_property_definitions(context, presentation, raw_properties,
- capability_definition.properties, 'properties')
+ merge_raw_parameter_definitions(context, presentation, raw_properties,
+ capability_definition.properties, 'properties')
if raw_properties:
capability_definition._raw['properties'] = raw_properties
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
index 7c99eab..9a30cc1 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/constraints.py
@@ -15,7 +15,7 @@
import re
-from aria.modeling.contraints import NodeTemplateConstraint
+from aria.modeling.constraints import NodeTemplateConstraint
from aria.modeling.utils import NodeTemplateContainerHolder
from aria.modeling.functions import evaluate
from aria.parser import implements_specification
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
index 3952785..c0d79e5 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/data_types.py
@@ -26,6 +26,7 @@ from aria.parser.validation import Issue
from .functions import get_function
from ..presentation.types import get_type_by_full_or_shorthand_name
+
#
# DataType
#
@@ -50,6 +51,7 @@ def get_inherited_constraints(context, presentation):
return constraints
+
def coerce_data_type_value(context, presentation, data_type, entry_schema, constraints, value, # pylint: disable=unused-argument
aspect):
"""
@@ -121,6 +123,7 @@ def coerce_data_type_value(context, presentation, data_type, entry_schema, const
return value
+
def validate_data_type_name(context, presentation):
"""
Makes sure the complex data type's name is not that of a built-in type.
@@ -132,6 +135,7 @@ def validate_data_type_name(context, presentation):
% safe_repr(name),
locator=presentation._locator, level=Issue.BETWEEN_TYPES)
+
#
# PropertyDefinition, AttributeDefinition, EntrySchema, DataType
#
@@ -172,6 +176,7 @@ def get_data_type(context, presentation, field_name, allow_none=False):
# Try primitive data type
return get_primitive_data_type(type_name)
+
#
# PropertyDefinition, EntrySchema
#
@@ -195,6 +200,7 @@ def get_property_constraints(context, presentation):
return constraints
+
#
# ConstraintClause
#
@@ -310,6 +316,7 @@ def apply_constraint_to_value(context, presentation, constraint_clause, value):
return True
+
#
# Repository
#
@@ -326,6 +333,7 @@ def get_data_type_value(context, presentation, field_name, type_name):
locator=presentation._locator, level=Issue.BETWEEN_TYPES)
return None
+
#
# Utils
#
@@ -345,6 +353,7 @@ PRIMITIVE_DATA_TYPES = {
'boolean': bool,
'null': None.__class__}
+
@implements_specification('3.2.1-3', 'tosca-simple-1.0')
def get_primitive_data_type(type_name):
"""
@@ -358,6 +367,7 @@ def get_primitive_data_type(type_name):
return PRIMITIVE_DATA_TYPES.get(type_name)
+
def get_data_type_name(the_type):
"""
Returns the name of the type, whether it's a DataType, a primitive type, or another class.
@@ -365,6 +375,7 @@ def get_data_type_name(the_type):
return the_type._name if hasattr(the_type, '_name') else full_type_name(the_type)
+
def coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect=None): # pylint: disable=too-many-return-statements
"""
Returns the value after it's coerced to its type, reporting validation errors if it cannot be
@@ -410,6 +421,7 @@ def coerce_value(context, presentation, the_type, entry_schema, constraints, val
# Coerce to primitive type
return coerce_to_primitive(context, presentation, the_type, constraints, value, aspect)
+
def coerce_to_primitive(context, presentation, primitive_type, constraints, value, aspect=None):
"""
Returns the value after it's coerced to a primitive type, translating exceptions to validation
@@ -435,6 +447,7 @@ def coerce_to_primitive(context, presentation, primitive_type, constraints, valu
return value
+
def coerce_to_data_type_class(context, presentation, cls, entry_schema, constraints, value,
aspect=None):
"""
@@ -463,6 +476,7 @@ def coerce_to_data_type_class(context, presentation, cls, entry_schema, constrai
return value
+
def apply_constraints_to_value(context, presentation, constraints, value):
"""
Applies all constraints to the value. If the value conforms, returns the value. If it does not
@@ -478,6 +492,7 @@ def apply_constraints_to_value(context, presentation, constraints, value):
value = None
return value
+
def get_container_data_type(presentation):
if presentation is None:
return None
@@ -485,6 +500,7 @@ def get_container_data_type(presentation):
return presentation
return get_container_data_type(presentation._container)
+
def report_issue_for_bad_format(context, presentation, the_type, value, aspect, e):
if aspect == 'default':
aspect = '"default" value'
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
index 3e6aa6f..e04ac4a 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/interfaces.py
@@ -13,11 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from aria.utils.collections import merge, deepcopy_with_locators, OrderedDict
+from aria.utils.collections import (merge, deepcopy_with_locators, OrderedDict)
from aria.parser.presentation import get_locator
from aria.parser.validation import Issue
-from .properties import (coerce_property_value, convert_property_definitions_to_values)
+from .parameters import (coerce_parameter_value, convert_parameter_definitions_to_values)
+
#
# InterfaceType
@@ -45,6 +46,7 @@ def get_inherited_operations(context, presentation):
return operations
+
#
# InterfaceDefinition
#
@@ -73,6 +75,7 @@ def get_and_override_input_definitions_from_type(context, presentation):
return inputs
+
def get_and_override_operation_definitions_from_type(context, presentation):
"""
Returns our operation definitions added on top of those of the interface type, if specified.
@@ -96,6 +99,7 @@ def get_and_override_operation_definitions_from_type(context, presentation):
return operations
+
#
# NodeType, RelationshipType, GroupType
#
@@ -124,6 +128,7 @@ def get_inherited_interface_definitions(context, presentation, type_name, for_pr
return interfaces
+
#
# NodeTemplate, RelationshipTemplate, GroupTemplate
#
@@ -186,6 +191,7 @@ def get_template_interfaces(context, presentation, type_name):
return template_interfaces
+
#
# Utils
#
@@ -200,13 +206,14 @@ def convert_interface_definition_from_type_to_template(context, presentation, co
raw = convert_interface_definition_from_type_to_raw_template(context, presentation)
return InterfaceAssignment(name=presentation._name, raw=raw, container=container)
+
def convert_interface_definition_from_type_to_raw_template(context, presentation): # pylint: disable=invalid-name
raw = OrderedDict()
# Copy default values for inputs
inputs = presentation._get_inputs(context)
if inputs is not None:
- raw['inputs'] = convert_property_definitions_to_values(context, inputs)
+ raw['inputs'] = convert_parameter_definitions_to_values(context, inputs)
# Copy operations
operations = presentation._get_operations(context)
@@ -221,11 +228,12 @@ def convert_interface_definition_from_type_to_raw_template(context, presentation
raw[operation_name]['implementation'] = deepcopy_with_locators(implementation._raw)
inputs = operation.inputs
if inputs is not None:
- raw[operation_name]['inputs'] = convert_property_definitions_to_values(context,
- inputs)
+ raw[operation_name]['inputs'] = convert_parameter_definitions_to_values(context,
+ inputs)
return raw
+
def convert_requirement_interface_definitions_from_type_to_raw_template(context, raw_requirement, # pylint: disable=invalid-name
interface_definitions):
if not interface_definitions:
@@ -240,6 +248,7 @@ def convert_requirement_interface_definitions_from_type_to_raw_template(context,
else:
raw_requirement['interfaces'][interface_name] = raw_interface
+
def merge_interface(context, presentation, interface_assignment, our_interface_assignment,
interface_definition, interface_name):
# Assign/merge interface inputs
@@ -282,6 +291,7 @@ def merge_interface(context, presentation, interface_assignment, our_interface_a
our_input_assignments, input_definitions, interface_name,
operation_name, presentation)
+
def merge_raw_input_definition(context, the_raw_input, our_input, interface_name, operation_name,
presentation, type_name):
# Check if we changed the type
@@ -305,6 +315,7 @@ def merge_raw_input_definition(context, the_raw_input, our_input, interface_name
# Merge
merge(the_raw_input, our_input._raw)
+
def merge_input_definitions(context, inputs, our_inputs, interface_name, operation_name,
presentation, type_name):
for input_name, our_input in our_inputs.iteritems():
@@ -314,6 +325,7 @@ def merge_input_definitions(context, inputs, our_inputs, interface_name, operati
else:
inputs[input_name] = our_input._clone(presentation)
+
def merge_raw_input_definitions(context, raw_inputs, our_inputs, interface_name, operation_name,
presentation, type_name):
for input_name, our_input in our_inputs.iteritems():
@@ -323,6 +335,7 @@ def merge_raw_input_definitions(context, raw_inputs, our_inputs, interface_name,
else:
raw_inputs[input_name] = deepcopy_with_locators(our_input._raw)
+
def merge_raw_operation_definition(context, raw_operation, our_operation, interface_name,
presentation, type_name):
if not isinstance(our_operation._raw, dict):
@@ -353,6 +366,7 @@ def merge_raw_operation_definition(context, raw_operation, our_operation, interf
raw_operation['implementation'] = \
deepcopy_with_locators(our_operation._raw['implementation'])
+
def merge_operation_definitions(context, operations, our_operations, interface_name, presentation,
type_name):
if not our_operations:
@@ -364,6 +378,7 @@ def merge_operation_definitions(context, operations, our_operations, interface_n
else:
operations[operation_name] = our_operation._clone(presentation)
+
def merge_raw_operation_definitions(context, raw_operations, our_operations, interface_name,
presentation, type_name):
for operation_name, our_operation in our_operations.iteritems():
@@ -378,6 +393,7 @@ def merge_raw_operation_definitions(context, raw_operations, our_operations, int
else:
raw_operations[operation_name] = deepcopy_with_locators(our_operation._raw)
+
# From either an InterfaceType or an InterfaceDefinition:
def merge_interface_definition(context, interface, our_source, presentation, type_name):
if hasattr(our_source, 'type'):
@@ -408,6 +424,7 @@ def merge_interface_definition(context, interface, our_source, presentation, typ
merge_raw_operation_definitions(context, interface._raw, our_operations, our_source._name,
presentation, type_name)
+
def merge_interface_definitions(context, interfaces, our_interfaces, presentation,
for_presentation=None):
if not our_interfaces:
@@ -419,12 +436,14 @@ def merge_interface_definitions(context, interfaces, our_interfaces, presentatio
else:
interfaces[name] = our_interface._clone(for_presentation)
+
def merge_interface_definitions_from_their_types(context, interfaces, presentation):
for interface in interfaces.itervalues():
the_type = interface._get_type(context) # InterfaceType
if the_type is not None:
merge_interface_definition(context, interface, the_type, presentation, 'type')
+
def assign_raw_inputs(context, values, assignments, definitions, interface_name, operation_name,
presentation):
if not assignments:
@@ -454,8 +473,9 @@ def assign_raw_inputs(context, values, assignments, definitions, interface_name,
# Note: default value has already been assigned
# Coerce value
- values['inputs'][input_name] = coerce_property_value(context, assignment, definition,
- assignment.value)
+ values['inputs'][input_name] = coerce_parameter_value(context, assignment, definition,
+ assignment.value)
+
def validate_required_inputs(context, presentation, assignment, definition, original_assignment,
interface_name, operation_name=None):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
new file mode 100644
index 0000000..c910956
--- /dev/null
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/parameters.py
@@ -0,0 +1,211 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from aria.utils.collections import (merge, deepcopy_with_locators, OrderedDict)
+from aria.utils.formatting import pluralize
+from aria.parser.presentation import Value
+from aria.parser.validation import Issue
+
+from .data_types import coerce_value
+
+
+#
+# ArtifactType, DataType, CapabilityType, RelationshipType, NodeType, GroupType, PolicyType
+#
+
+def get_inherited_parameter_definitions(context, presentation, field_name, for_presentation=None):
+ """
+ Returns our parameter definitions added on top of those of our parent, if we have one
+ (recursively).
+
+ Allows overriding all aspects of parent properties except data type.
+ """
+
+ # Get definitions from parent
+ # If we inherit from a primitive, it does not have a parent:
+ parent = presentation._get_parent(context) if hasattr(presentation, '_get_parent') else None
+ definitions = get_inherited_parameter_definitions(context, parent, field_name,
+ for_presentation=presentation) \
+ if parent is not None else OrderedDict()
+
+ # Add/merge our definitions
+ # If we inherit from a primitive, it does not have our field
+ our_definitions = getattr(presentation, field_name, None)
+ if our_definitions:
+ our_definitions_clone = OrderedDict()
+ for name, our_definition in our_definitions.iteritems():
+ our_definitions_clone[name] = our_definition._clone(for_presentation)
+ our_definitions = our_definitions_clone
+ merge_parameter_definitions(context, presentation, definitions, our_definitions, field_name)
+
+ for definition in definitions.itervalues():
+ definition._reset_method_cache()
+
+ return definitions
+
+
+#
+# NodeTemplate, RelationshipTemplate, GroupTemplate, PolicyTemplate
+#
+
+def get_assigned_and_defined_parameter_values(context, presentation, field_name):
+ """
+ Returns the assigned property values while making sure they are defined in our type.
+
+ The property definition's default value, if available, will be used if we did not assign it.
+
+ Makes sure that required properties indeed end up with a value.
+ """
+
+ values = OrderedDict()
+
+ the_type = presentation._get_type(context)
+ field_name_plural = pluralize(field_name)
+ assignments = getattr(presentation, field_name_plural)
+ get_fn_name = '_get_{0}'.format(field_name_plural)
+ definitions = getattr(the_type, get_fn_name)(context) if the_type is not None else None
+
+ # Fill in our assignments, but make sure they are defined
+ if assignments:
+ for name, value in assignments.iteritems():
+ if (definitions is not None) and (name in definitions):
+ definition = definitions[name]
+ values[name] = coerce_parameter_value(context, value, definition, value.value)
+ else:
+ context.validation.report('assignment to undefined {0} "{1}" in "{2}"'
+ .format(field_name, name, presentation._fullname),
+ locator=value._locator, level=Issue.BETWEEN_TYPES)
+
+ # Fill in defaults from the definitions
+ if definitions:
+ for name, definition in definitions.iteritems():
+ if values.get(name) is None:
+ values[name] = coerce_parameter_value(context, presentation, definition,
+ definition.default)
+
+ validate_required_values(context, presentation, values, definitions)
+
+ return values
+
+
+#
+# TopologyTemplate
+#
+
+def get_parameter_values(context, presentation, field_name):
+ values = OrderedDict()
+
+ parameters = getattr(presentation, field_name)
+
+ # Fill in defaults and values
+ if parameters:
+ for name, parameter in parameters.iteritems():
+ if values.get(name) is None:
+ if hasattr(parameter, 'value') and (parameter.value is not None):
+ # For parameters only:
+ values[name] = coerce_parameter_value(context, presentation, parameter,
+ parameter.value)
+ else:
+ default = parameter.default if hasattr(parameter, 'default') else None
+ values[name] = coerce_parameter_value(context, presentation, parameter, default)
+
+ return values
+
+
+#
+# Utils
+#
+
+def validate_required_values(context, presentation, values, definitions):
+ """
+ Check if required properties have not been assigned.
+ """
+
+ if not definitions:
+ return
+ for name, definition in definitions.iteritems():
+ if getattr(definition, 'required', False) \
+ and ((values is None) or (values.get(name) is None)):
+ context.validation.report('required property "%s" is not assigned a value in "%s"'
+ % (name, presentation._fullname),
+ locator=presentation._get_child_locator('properties'),
+ level=Issue.BETWEEN_TYPES)
+
+
+def merge_raw_parameter_definition(context, presentation, raw_property_definition,
+ our_property_definition, field_name, property_name):
+ # Check if we changed the type
+ # TODO: allow a sub-type?
+ type1 = raw_property_definition.get('type')
+ type2 = our_property_definition.type
+ if type1 != type2:
+ context.validation.report(
+ 'override changes type from "%s" to "%s" for property "%s" in "%s"'
+ % (type1, type2, property_name, presentation._fullname),
+ locator=presentation._get_child_locator(field_name, property_name),
+ level=Issue.BETWEEN_TYPES)
+
+ merge(raw_property_definition, our_property_definition._raw)
+
+
+def merge_raw_parameter_definitions(context, presentation, raw_property_definitions,
+ our_property_definitions, field_name):
+ if not our_property_definitions:
+ return
+ for property_name, our_property_definition in our_property_definitions.iteritems():
+ if property_name in raw_property_definitions:
+ raw_property_definition = raw_property_definitions[property_name]
+ merge_raw_parameter_definition(context, presentation, raw_property_definition,
+ our_property_definition, field_name, property_name)
+ else:
+ raw_property_definitions[property_name] = \
+ deepcopy_with_locators(our_property_definition._raw)
+
+
+def merge_parameter_definitions(context, presentation, property_definitions,
+ our_property_definitions, field_name):
+ if not our_property_definitions:
+ return
+ for property_name, our_property_definition in our_property_definitions.iteritems():
+ if property_name in property_definitions:
+ property_definition = property_definitions[property_name]
+ merge_raw_parameter_definition(context, presentation, property_definition._raw,
+ our_property_definition, field_name, property_name)
+ else:
+ property_definitions[property_name] = our_property_definition
+
+
+# Works on properties, inputs, and parameters
+def coerce_parameter_value(context, presentation, definition, value, aspect=None):
+ the_type = definition._get_type(context) if definition is not None else None
+ entry_schema = definition.entry_schema if definition is not None else None
+ constraints = definition._get_constraints(context) \
+ if ((definition is not None) and hasattr(definition, '_get_constraints')) else None
+ value = coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect)
+ if (the_type is not None) and hasattr(the_type, '_name'):
+ type_name = the_type._name
+ else:
+ type_name = getattr(definition, 'type', None)
+ description = getattr(definition, 'description', None)
+ description = description.value if description is not None else None
+ return Value(type_name, value, description)
+
+
+def convert_parameter_definitions_to_values(context, definitions):
+ values = OrderedDict()
+ for name, definition in definitions.iteritems():
+ default = definition.default
+ values[name] = coerce_parameter_value(context, definition, definition, default)
+ return values
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
index fba1972..7dd803b 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/policies.py
@@ -15,6 +15,7 @@
from ..presentation.types import convert_shorthand_to_full_type_name
+
#
# PolicyType
#
@@ -49,6 +50,7 @@ def get_inherited_targets(context, presentation):
return node_types, group_types
+
#
# PolicyTemplate
#
[5/5] incubator-ariatosca git commit: Fixes
Posted by em...@apache.org.
Fixes
* Rename implementation/inputs to function/arguments in Task API
* Rename "create_parameters" to "merge_parameter_values" and improve
* Change workflow "function" back to "implementation"
Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/fdec01ab
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/fdec01ab
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/fdec01ab
Branch: refs/heads/ARIA-149-functions-in-operation-configuration
Commit: fdec01ab0f28152d1e8fb9cd8f73b6a063f556f5
Parents: 8fe7f4b
Author: Tal Liron <ta...@gmail.com>
Authored: Wed May 24 14:54:07 2017 -0500
Committer: Tal Liron <ta...@gmail.com>
Committed: Tue May 30 13:03:05 2017 -0500
----------------------------------------------------------------------
aria/cli/execution_logging.py | 4 +-
aria/modeling/exceptions.py | 6 +
aria/modeling/orchestration.py | 14 +--
aria/modeling/service_instance.py | 11 +-
aria/modeling/service_template.py | 25 ++--
aria/modeling/utils.py | 126 ++++++++++---------
aria/orchestrator/context/operation.py | 4 +-
.../execution_plugin/instantiation.py | 12 +-
aria/orchestrator/workflow_runner.py | 20 +--
aria/orchestrator/workflows/api/task.py | 89 +++++++++----
.../workflows/builtin/execute_operation.py | 2 +-
aria/orchestrator/workflows/core/task.py | 4 +-
aria/orchestrator/workflows/events_logging.py | 8 +-
aria/orchestrator/workflows/executor/base.py | 4 +-
aria/orchestrator/workflows/executor/celery.py | 6 +-
aria/orchestrator/workflows/executor/dry.py | 6 +-
aria/orchestrator/workflows/executor/process.py | 12 +-
aria/orchestrator/workflows/executor/thread.py | 6 +-
aria/utils/formatting.py | 4 +-
aria/utils/validation.py | 8 +-
.../profiles/aria-1.0/aria-1.0.yaml | 8 +-
.../simple_v1_0/modeling/__init__.py | 22 ++--
.../simple_v1_0/modeling/functions.py | 6 +-
tests/modeling/test_models.py | 12 +-
tests/orchestrator/context/test_operation.py | 47 ++++---
tests/orchestrator/context/test_serialize.py | 4 +-
tests/orchestrator/context/test_toolbelt.py | 4 +-
.../orchestrator/execution_plugin/test_local.py | 8 +-
tests/orchestrator/execution_plugin/test_ssh.py | 6 +-
tests/orchestrator/test_workflow_runner.py | 8 +-
tests/orchestrator/workflows/api/test_task.py | 20 +--
.../orchestrator/workflows/core/test_engine.py | 40 +++---
tests/orchestrator/workflows/core/test_task.py | 4 +-
.../test_task_graph_into_execution_graph.py | 4 +-
.../orchestrator/workflows/executor/__init__.py | 6 +-
.../workflows/executor/test_executor.py | 10 +-
.../workflows/executor/test_process_executor.py | 2 +-
...process_executor_concurrent_modifications.py | 4 +-
.../executor/test_process_executor_extension.py | 18 +--
.../test_process_executor_tracked_changes.py | 14 +--
.../node-cellar/node-cellar.yaml | 2 +-
41 files changed, 340 insertions(+), 280 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/cli/execution_logging.py
----------------------------------------------------------------------
diff --git a/aria/cli/execution_logging.py b/aria/cli/execution_logging.py
index b23165f..b3252f0 100644
--- a/aria/cli/execution_logging.py
+++ b/aria/cli/execution_logging.py
@@ -105,8 +105,8 @@ def stylize_log(item, mark_pattern):
# implementation
if item.task:
# operation task
- implementation = item.task.implementation
- inputs = dict(i.unwrap() for i in item.task.inputs.values())
+ implementation = item.task.function
+ inputs = dict(arg.unwrap() for arg in item.task.arguments.values())
else:
# execution task
implementation = item.execution.workflow_name
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/modeling/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/exceptions.py b/aria/modeling/exceptions.py
index d0e3e22..e784d1a 100644
--- a/aria/modeling/exceptions.py
+++ b/aria/modeling/exceptions.py
@@ -57,3 +57,9 @@ class UndeclaredParametersException(ParameterException):
"""
ARIA modeling exception: Undeclared parameters have been provided.
"""
+
+
+class ForbiddenParameterNamesException(ParameterException):
+ """
+ ARIA modeling exception: Forbidden parameter names have been used.
+ """
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index ab9d34d..97de552 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -230,10 +230,10 @@ class TaskBase(ModelMixin):
:vartype relationship: :class:`Relationship`
:ivar plugin: The implementing plugin (set to None for default execution plugin)
:vartype plugin: :class:`Plugin`
- :ivar inputs: Parameters that can be used by this task
- :vartype inputs: {basestring: :class:`Parameter`}
- :ivar implementation: Python path to an ``@operation`` function
- :vartype implementation: basestring
+ :ivar function: Python path to an ``@operation`` function
+ :vartype function: basestring
+ :ivar arguments: Arguments that can be used by this task
+ :vartype arguments: {basestring: :class:`Parameter`}
:ivar max_attempts: Maximum number of retries allowed in case of failure
:vartype max_attempts: int
:ivar retry_interval: Interval between retries (in seconds)
@@ -300,10 +300,10 @@ class TaskBase(ModelMixin):
return relationship.many_to_one(cls, 'execution')
@declared_attr
- def inputs(cls):
- return relationship.many_to_many(cls, 'parameter', prefix='inputs', dict_key='name')
+ def arguments(cls):
+ return relationship.many_to_many(cls, 'parameter', prefix='arguments', dict_key='name')
- implementation = Column(String)
+ function = Column(String)
max_attempts = Column(Integer, default=1)
retry_interval = Column(Float, default=0)
ignore_failure = Column(Boolean, default=False)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 31f7212..72e2478 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -1753,17 +1753,18 @@ class OperationBase(InstanceModelMixin):
if (self.implementation is None) and (self.function is None):
return
- if (self.plugin is None) and (self.interface is not None):
- # Default to execution plugin ("interface" is None for workflow operations)
+ if (self.interface is not None) and (self.plugin is None) and (self.function is None):
+ # ("interface" is None for workflow operations, which do not currently use "plugin")
+ # The default (None) plugin is the execution plugin
execution_plugin.instantiation.configure_operation(self)
else:
# In the future plugins may be able to add their own "configure_operation" hook that
# can validate the configuration and otherwise create specially derived arguments. For
- # now, we just send all configuration parameters as arguments
+ # now, we just send all configuration parameters as arguments without validation.
utils.instantiate_dict(self, self.arguments, self.configuration)
- # Send all inputs as extra arguments. Note that they will override existing arguments of the
- # same names.
+ # Send all inputs as extra arguments
+ # Note that they will override existing arguments of the same names
utils.instantiate_dict(self, self.arguments, self.inputs)
@property
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index b4a54ca..42e0d01 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -287,7 +287,7 @@ class ServiceTemplateBase(TemplateModelMixin):
service_template=self)
context.modeling.instance = service
- service.inputs = utils.create_parameters(inputs or {}, self.inputs)
+ service.inputs = utils.merge_parameter_values(inputs, self.inputs)
# TODO: now that we have inputs, we should scan properties and inputs and evaluate functions
for plugin_specification in self.plugin_specifications.itervalues():
@@ -1883,21 +1883,10 @@ class OperationTemplateBase(TemplateModelMixin):
def instantiate(self, container):
from . import models
- if self.plugin_specification:
- if self.plugin_specification.enabled:
- plugin = self.plugin_specification.plugin
- function = self.function if plugin is not None else None
- # "plugin" would be none if a match was not found. In that case, a validation error
- # should already have been reported in ServiceTemplateBase.instantiate, so we will
- # continue silently here
- else:
- # If the plugin is disabled, the operation should be disabled, too
- plugin = None
- function = None
- else:
- # Using the default execution plugin (plugin=None)
- plugin = None
- function = self.function
+
+ plugin = self.plugin_specification.plugin \
+ if (self.plugin_specification is not None) and self.plugin_specification.enabled \
+ else None
operation = models.Operation(name=self.name,
description=deepcopy_with_locators(self.description),
@@ -1906,12 +1895,14 @@ class OperationTemplateBase(TemplateModelMixin):
dependencies=self.dependencies,
executor=self.executor,
plugin=plugin,
- function=function,
+ function=self.function,
max_attempts=self.max_attempts,
retry_interval=self.retry_interval,
operation_template=self)
+
utils.instantiate_dict(container, operation.inputs, self.inputs)
utils.instantiate_dict(container, operation.configuration, self.configuration)
+
return operation
def validate(self):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 6f4022c..ef9a53a 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -22,6 +22,7 @@ from ..parser.consumption import ConsumptionContext
from ..utils.console import puts
from ..utils.type import validate_value_type
from ..utils.collections import OrderedDict
+from ..utils.formatting import string_list_as_string
class ModelJSONEncoder(JSONEncoder):
@@ -52,84 +53,95 @@ class NodeTemplateContainerHolder(object):
return self.container.service_template
-def create_parameters(parameters, declared_parameters):
+def merge_parameter_values(parameter_values, declared_parameters, forbidden_names=None):
"""
- Validates, merges, and wraps parameter values according to those declared by a type.
+ Merges parameter values according to those declared by a type.
- Exceptions will be raised for validation errors:
+ Exceptions will be raised for validation errors.
- * :class:`aria.modeling.exceptions.UndeclaredParametersException` if a key in ``parameters``
- does not exist in ``declared_parameters``
- * :class:`aria.modeling.exceptions.MissingRequiredParametersException` if a key in
- ``declared_parameters`` does not exist in ``parameters`` and also has no default value
- * :class:`aria.modeling.exceptions.ParametersOfWrongTypeException` if a value in ``parameters``
- does not match its type in ``declared_parameters``
-
- :param parameters: Provided parameter values
- :type parameters: {basestring, object}
- :param declared_parameters: Declared parameters
+ :param parameter_values: provided parameter values or None
+ :type parameter_values: {basestring, object}
+ :param declared_parameters: declared parameters
:type declared_parameters: {basestring, :class:`aria.modeling.models.Parameter`}
- :return: The merged parameters
+ :param forbidden_names: parameters will be validated against these names
+ :type forbidden_names: [basestring]
+ :return: the merged parameters
:rtype: {basestring, :class:`aria.modeling.models.Parameter`}
+ :raises aria.modeling.exceptions.UndeclaredParametersException: if a key in ``parameter_values``
+ does not exist in ``declared_parameters``
+ :raises aria.modeling.exceptions.MissingRequiredParametersException: if a key in
+ ``declared_parameters`` does not exist in ``parameter_values`` and also has no default
+ value
+ :raises aria.modeling.exceptions.ForbiddenParameterNamesException: if a parameter name is in
+ ``forbidden_names``
+ :raises aria.modeling.exceptions.ParametersOfWrongTypeException: if a value in
+ ``parameter_values`` does not match its type in ``declared_parameters``
"""
- merged_parameters = _merge_and_validate_parameters(parameters, declared_parameters)
-
from . import models
- parameters_models = OrderedDict()
- for parameter_name, parameter_value in merged_parameters.iteritems():
- parameter = models.Parameter( # pylint: disable=unexpected-keyword-arg
- name=parameter_name,
- type_name=declared_parameters[parameter_name].type_name,
- description=declared_parameters[parameter_name].description,
- value=parameter_value)
- parameters_models[parameter.name] = parameter
-
- return parameters_models
-
-
-def _merge_and_validate_parameters(parameters, declared_parameters):
- merged_parameters = OrderedDict(parameters)
-
- missing_parameters = []
- wrong_type_parameters = OrderedDict()
- for parameter_name, declared_parameter in declared_parameters.iteritems():
- if parameter_name not in parameters:
- if declared_parameter.value is not None:
- merged_parameters[parameter_name] = declared_parameter.value # apply default value
- else:
- missing_parameters.append(parameter_name)
- else:
- # Validate parameter type
+
+ parameter_values = parameter_values or {}
+
+ undeclared_names = list(set(parameter_values.keys()).difference(declared_parameters.keys()))
+ if undeclared_names:
+ raise exceptions.UndeclaredParametersException(
+ 'Undeclared parameters have been provided: {0}; Declared: {1}'
+ .format(string_list_as_string(undeclared_names),
+ string_list_as_string(declared_parameters.keys())))
+
+ parameters = OrderedDict()
+
+ missing_names = []
+ wrong_type_values = OrderedDict()
+ for declared_parameter_name, declared_parameter in declared_parameters.iteritems():
+ if declared_parameter_name in parameter_values:
+ # Value has been provided
+ value = parameter_values[declared_parameter_name]
+
+ # Validate type
+ type_name = declared_parameter.type_name
try:
- validate_value_type(parameters[parameter_name], declared_parameter.type_name)
+ validate_value_type(value, type_name)
except ValueError:
- wrong_type_parameters[parameter_name] = declared_parameter.type_name
+ wrong_type_values[declared_parameter_name] = type_name
except RuntimeError:
# TODO: This error shouldn't be raised (or caught), but right now we lack support
# for custom data_types, which will raise this error. Skipping their validation.
pass
- if missing_parameters:
+ # Wrap in Parameter model
+ parameters[declared_parameter_name] = models.Parameter( # pylint: disable=unexpected-keyword-arg
+ name=declared_parameter_name,
+ type_name=type_name,
+ description=declared_parameter.description,
+ value=value)
+ elif declared_parameter.value is not None:
+ # Copy default value from declaration
+ parameters[declared_parameter_name] = declared_parameter.instantiate(None)
+ else:
+ # Required value has not been provided
+ missing_names.append(declared_parameter_name)
+
+ if missing_names:
raise exceptions.MissingRequiredParametersException(
- 'Required parameters {0} have not been specified; Expected parameters: {1}'
- .format(missing_parameters, declared_parameters.keys()))
+ 'Declared parameters {0} have not been provided values'
+ .format(string_list_as_string(missing_names)))
- if wrong_type_parameters:
+ if forbidden_names:
+ used_forbidden_names = list(set(forbidden_names).intersection(parameters.keys()))
+ if used_forbidden_names:
+ raise exceptions.ForbiddenParameterNamesException(
+ 'Forbidden parameter names have been used: {0}'
+ .format(string_list_as_string(used_forbidden_names)))
+
+ if wrong_type_values:
error_message = StringIO()
- for param_name, param_type in wrong_type_parameters.iteritems():
- error_message.write('Parameter "{0}" must be of type {1}{2}'
+ for param_name, param_type in wrong_type_values.iteritems():
+ error_message.write('Parameter "{0}" is not of declared type "{1}"{2}'
.format(param_name, param_type, os.linesep))
raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
- undeclared_parameters = [parameter_name for parameter_name in parameters.keys()
- if parameter_name not in declared_parameters]
- if undeclared_parameters:
- raise exceptions.UndeclaredParametersException(
- 'Undeclared parameters have been specified: {0}; Expected parameters: {1}'
- .format(undeclared_parameters, declared_parameters.keys()))
-
- return merged_parameters
+ return parameters
def coerce_dict_values(the_dict, report_issues=False):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 7c21351..f0ba337 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -42,8 +42,8 @@ class BaseOperationContext(common.BaseContext):
self._register_logger(task_id=self.task.id, level=logger_level)
def __repr__(self):
- details = 'implementation={task.implementation}; ' \
- 'operation_inputs={task.inputs}'\
+ details = 'function={task.function}; ' \
+ 'operation_arguments={task.arguments}'\
.format(task=self.task)
return '{name}({0})'.format(details, name=self.name)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
index 26c3913..9b5152d 100644
--- a/aria/orchestrator/execution_plugin/instantiation.py
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -17,6 +17,7 @@
from ...utils.type import full_type_name
from ...utils.formatting import safe_repr
+from ...utils.collections import OrderedDict
from ...parser import validation
from ...parser.consumption import ConsumptionContext
from ...modeling.functions import Function
@@ -43,7 +44,7 @@ def configure_operation(operation):
# kwargs in either "run_script_locally" or "run_script_with_ssh"
for key, value in operation.configuration.iteritems():
if key not in ('process', 'ssh'):
- operation.arguments[key] = value.instantiate()
+ operation.arguments[key] = value.instantiate(None)
def _configure_common(operation):
@@ -133,6 +134,7 @@ def _get_process(operation):
if value is None:
return {}
_validate_type(value, dict, 'process')
+ value = OrderedDict(value)
for k, v in value.iteritems():
if k == 'eval_python':
value[k] = _coerce_bool(v, 'process.eval_python')
@@ -146,17 +148,19 @@ def _get_process(operation):
_validate_type(v, dict, 'process.env')
else:
context = ConsumptionContext.get_thread_local()
- context.validation.report('unsupported configuration: "process.{0}"'.format(k),
+ context.validation.report('unsupported configuration parameter: "process.{0}"'
+ .format(k),
level=validation.Issue.BETWEEN_TYPES)
return value
def _get_ssh(operation):
value = operation.configuration.get('ssh')._value \
- if 'process' in operation.configuration else None
+ if 'ssh' in operation.configuration else None
if value is None:
return {}
_validate_type(value, dict, 'ssh')
+ value = OrderedDict(value)
for k, v in value.iteritems():
if k == 'use_sudo':
value[k] = _coerce_bool(v, 'ssh.use_sudo')
@@ -176,7 +180,7 @@ def _get_ssh(operation):
_validate_type(v, basestring, 'ssh.address')
else:
context = ConsumptionContext.get_thread_local()
- context.validation.report('unsupported configuration: "ssh.{0}"'.format(k),
+ context.validation.report('unsupported configuration parameter: "ssh.{0}"'.format(k),
level=validation.Issue.BETWEEN_TYPES)
return value
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 0c6321f..2d373c8 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -42,9 +42,10 @@ class WorkflowRunner(object):
executor=None, task_max_attempts=DEFAULT_TASK_MAX_ATTEMPTS,
task_retry_interval=DEFAULT_TASK_RETRY_INTERVAL):
"""
- Manages a single workflow execution on a given service
+ Manages a single workflow execution on a given service.
+
:param workflow_name: Workflow name
- :param service_id: Service id
+ :param service_id: Service ID
:param inputs: A key-value dict of inputs for the execution
:param model_storage: Model storage
:param resource_storage: Resource storage
@@ -64,7 +65,7 @@ class WorkflowRunner(object):
self._validate_workflow_exists_for_service()
- workflow_fn = self._get_workflow_fn()
+ workflow_fn = self._workflow_fn
execution = self._create_execution_model(inputs)
self._execution_id = execution.id
@@ -119,7 +120,7 @@ class WorkflowRunner(object):
else:
workflow_inputs = self.service.workflows[self._workflow_name].inputs
- execution.inputs = modeling_utils.create_parameters(inputs, workflow_inputs)
+ execution.inputs = modeling_utils.merge_parameter_values(inputs, workflow_inputs)
# TODO: these two following calls should execute atomically
self._validate_no_active_executions(execution)
self._model_storage.execution.put(execution)
@@ -136,10 +137,11 @@ class WorkflowRunner(object):
active_executions = [e for e in self.service.executions if e.is_active()]
if active_executions:
raise exceptions.ActiveExecutionsError(
- "Can't start execution; Service {0} has an active execution with id {1}"
+ "Can't start execution; Service {0} has an active execution with ID {1}"
.format(self.service.name, active_executions[0].id))
- def _get_workflow_fn(self):
+ @property
+ def _workflow_fn(self):
if self._workflow_name in builtin.BUILTIN_WORKFLOWS:
return import_fullname('{0}.{1}'.format(builtin.BUILTIN_WORKFLOWS_PATH_PREFIX,
self._workflow_name))
@@ -156,10 +158,10 @@ class WorkflowRunner(object):
sys.path.append(service_template_resources_path)
try:
- workflow_fn = import_fullname(workflow.implementation)
+ workflow_fn = import_fullname(workflow.function)
except ImportError:
raise exceptions.WorkflowImplementationNotFoundError(
- 'Could not find workflow {0} implementation at {1}'.format(
- self._workflow_name, workflow.implementation))
+ 'Could not find workflow {0} function at {1}'.format(
+ self._workflow_name, workflow.function))
return workflow_fn
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index aa6ac45..feacaf4 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -55,7 +55,28 @@ class BaseTask(object):
class OperationTask(BaseTask):
"""
- Represents an operation task in the task graph
+ Represents an operation task in the task graph.
+
+ :ivar name: formatted name (includes actor type, actor name, and interface/operation names)
+ :vartype name: basestring
+ :ivar actor: node or relationship
+ :vartype actor: :class:`aria.modeling.models.Node`|:class:`aria.modeling.models.Relationship`
+ :ivar interface_name: interface name on actor
+ :vartype interface_name: basestring
+ :ivar operation_name: operation name on interface
+ :vartype operation_name: basestring
+ :ivar plugin: plugin (or None for default plugin)
+ :vartype plugin: :class:`aria.modeling.models.Plugin`
+ :ivar function: path to Python function
+ :vartype function: basestring
+ :ivar arguments: arguments to send to Python function
+ :vartype arguments: {basestring, :class:`aria.modeling.models.Parameter`}
+ :ivar ignore_failure: whether to ignore failures
+ :vartype ignore_failure: bool
+ :ivar max_attempts: maximum number of attempts allowed in case of failure
+ :vartype max_attempts: int
+ :ivar retry_interval: interval between retries (in seconds)
+ :vartype retry_interval: int
"""
NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
@@ -64,43 +85,61 @@ class OperationTask(BaseTask):
actor,
interface_name,
operation_name,
- inputs=None,
+ arguments=None,
+ ignore_failure=None,
max_attempts=None,
- retry_interval=None,
- ignore_failure=None):
+ retry_interval=None):
"""
- Do not call this constructor directly. Instead, use :meth:`for_node` or
- :meth:`for_relationship`.
+ :param actor: node or relationship
+ :type actor: :class:`aria.modeling.models.Node`|:class:`aria.modeling.models.Relationship`
+ :param interface_name: interface name on actor
+ :type interface_name: basestring
+ :param operation_name: operation name on interface
+ :type operation_name: basestring
+ :param arguments: override argument values
+ :type arguments: {basestring, object}
+ :param ignore_failure: override whether to ignore failures
+ :type ignore_failure: bool
+ :param max_attempts: override maximum number of attempts allowed in case of failure
+ :type max_attempts: int
+ :param retry_interval: override interval between retries (in seconds)
+ :type retry_interval: int
+ :raises aria.orchestrator.workflows.exceptions.OperationNotFoundException: if
+ ``interface_name`` and ``operation_name`` to not refer to an operation on the actor
"""
+
assert isinstance(actor, (models.Node, models.Relationship))
- super(OperationTask, self).__init__()
- self.actor = actor
- self.interface_name = interface_name
- self.operation_name = operation_name
- self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
- self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
- self.ignore_failure = \
- self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
- self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
- name=actor.name,
- interface=self.interface_name,
- operation=self.operation_name)
+
# Creating OperationTask directly should raise an error when there is no
# interface/operation.
-
- if not has_operation(self.actor, self.interface_name, self.operation_name):
+ if not has_operation(actor, interface_name, operation_name):
raise exceptions.OperationNotFoundException(
- 'Could not find operation "{self.operation_name}" on interface '
- '"{self.interface_name}" for {actor_type} "{actor.name}"'.format(
- self=self,
+ 'Could not find operation "{operation_name}" on interface '
+ '"{interface_name}" for {actor_type} "{actor.name}"'.format(
+ operation_name=operation_name,
+ interface_name=interface_name,
actor_type=type(actor).__name__.lower(),
actor=actor)
)
+ super(OperationTask, self).__init__()
+
+ self.name = OperationTask.NAME_FORMAT.format(type=type(actor).__name__.lower(),
+ name=actor.name,
+ interface=interface_name,
+ operation=operation_name)
+ self.actor = actor
+ self.interface_name = interface_name
+ self.operation_name = operation_name
+ self.ignore_failure = \
+ self.workflow_context._task_ignore_failure if ignore_failure is None else ignore_failure
+ self.max_attempts = max_attempts or self.workflow_context._task_max_attempts
+ self.retry_interval = retry_interval or self.workflow_context._task_retry_interval
+
operation = self.actor.interfaces[self.interface_name].operations[self.operation_name]
self.plugin = operation.plugin
- self.inputs = modeling_utils.create_parameters(inputs or {}, operation.arguments)
- self.implementation = operation.function
+ self.function = operation.function
+ self.arguments = modeling_utils.merge_parameter_values(arguments, operation.arguments)
def __repr__(self):
return self.name
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 02a654a..437e584 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -69,7 +69,7 @@ def execute_operation(
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=operation_kwargs
+ arguments=operation_kwargs
)
)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 0d6eb11..72d83ea 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -146,8 +146,8 @@ class OperationTask(BaseTask):
# Only non-stub tasks have these fields
plugin=api_task.plugin,
- implementation=api_task.implementation,
- inputs=api_task.inputs
+ function=api_task.function,
+ arguments=api_task.arguments
)
self._workflow_context.model.task.put(task_model)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 236a55f..0c93b85 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -35,12 +35,12 @@ def _get_task_name(task):
@events.start_task_signal.connect
def _start_task_handler(task, **kwargs):
- # If the task has not implementation this is an empty task.
- if task.implementation:
+ # If the task has no function this is an empty task.
+ if task.function:
suffix = 'started...'
logger = task.context.logger.info
else:
- suffix = 'has no implementation'
+ suffix = 'has no function'
logger = task.context.logger.debug
logger('{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
@@ -48,7 +48,7 @@ def _start_task_handler(task, **kwargs):
@events.on_success_task_signal.connect
def _success_task_handler(task, **kwargs):
- if not task.implementation:
+ if not task.function:
return
task.context.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
.format(name=_get_task_name(task), task=task))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/executor/base.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/base.py b/aria/orchestrator/workflows/executor/base.py
index c543278..7fece6f 100644
--- a/aria/orchestrator/workflows/executor/base.py
+++ b/aria/orchestrator/workflows/executor/base.py
@@ -33,10 +33,10 @@ class BaseExecutor(logger.LoggerMixin):
Execute a task
:param task: task to execute
"""
- if task.implementation:
+ if task.function:
self._execute(task)
else:
- # In this case the task is missing an implementation. This task still gets to an
+ # In this case the task is missing a function. This task still gets to an
# executor, but since there is nothing to run, we by default simply skip the execution
# itself.
self._task_started(task)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index bbddc25..3935b07 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -44,11 +44,11 @@ class CeleryExecutor(BaseExecutor):
def _execute(self, task):
self._tasks[task.id] = task
- inputs = dict(inp.unwrap() for inp in task.inputs.values())
- inputs['ctx'] = task.context
+ arguments = dict(arg.unwrap() for arg in task.arguments.values())
+ arguments['ctx'] = task.context
self._results[task.id] = self._app.send_task(
task.operation_mapping,
- kwargs=inputs,
+ kwargs=arguments,
task_id=task.id,
queue=self._get_queue(task))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index 63ec392..8848df8 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -33,7 +33,7 @@ class DryExecutor(BaseExecutor):
task.status = task.STARTED
dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
- logger = task.context.logger.info if task.implementation else task.context.logger.debug
+ logger = task.context.logger.info if task.function else task.context.logger.debug
if hasattr(task.actor, 'source_node'):
name = '{source_node.name}->{target_node.name}'.format(
@@ -41,11 +41,11 @@ class DryExecutor(BaseExecutor):
else:
name = task.actor.name
- if task.implementation:
+ if task.function:
logger(dry_msg.format(name=name, task=task, suffix='started...'))
logger(dry_msg.format(name=name, task=task, suffix='successful'))
else:
- logger(dry_msg.format(name=name, task=task, suffix='has no implementation'))
+ logger(dry_msg.format(name=name, task=task, suffix='has no function'))
# updating the task manually instead of calling self._task_succeeded(task),
# to avoid any side effects raising that event might cause
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index f02e0a6..7472a2e 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -140,8 +140,8 @@ class ProcessExecutor(base.BaseExecutor):
def _create_arguments_dict(self, task):
return {
'task_id': task.id,
- 'implementation': task.implementation,
- 'operation_inputs': dict(inp.unwrap() for inp in task.inputs.values()),
+ 'function': task.function,
+ 'operation_arguments': dict(arg.unwrap() for arg in task.arguments.values()),
'port': self._server_port,
'context': task.context.serialization_dict,
}
@@ -290,8 +290,8 @@ def _main():
port = arguments['port']
messenger = _Messenger(task_id=task_id, port=port)
- implementation = arguments['implementation']
- operation_inputs = arguments['operation_inputs']
+ function = arguments['function']
+ operation_arguments = arguments['operation_arguments']
context_dict = arguments['context']
try:
@@ -302,11 +302,11 @@ def _main():
try:
messenger.started()
- task_func = imports.load_attribute(implementation)
+ task_func = imports.load_attribute(function)
aria.install_aria_extensions()
for decorate in process_executor.decorate():
task_func = decorate(task_func)
- task_func(ctx=ctx, **operation_inputs)
+ task_func(ctx=ctx, **operation_arguments)
ctx.close()
messenger.succeeded()
except BaseException as e:
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index f53362a..2c5ef16 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -60,9 +60,9 @@ class ThreadExecutor(BaseExecutor):
task = self._queue.get(timeout=1)
self._task_started(task)
try:
- task_func = imports.load_attribute(task.implementation)
- inputs = dict(inp.unwrap() for inp in task.inputs.values())
- task_func(ctx=task.context, **inputs)
+ task_func = imports.load_attribute(task.function)
+ arguments = dict(arg.unwrap() for arg in task.arguments.values())
+ task_func(ctx=task.context, **arguments)
self._task_succeeded(task)
except BaseException as e:
self._task_failed(task,
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/utils/formatting.py b/aria/utils/formatting.py
index f96a4ce..b8d24cd 100644
--- a/aria/utils/formatting.py
+++ b/aria/utils/formatting.py
@@ -124,7 +124,9 @@ def string_list_as_string(strings):
Nice representation of a list of strings.
"""
- return ', '.join('"%s"' % safe_str(v) for v in strings)
+ if not strings:
+ return 'none'
+ return ', '.join('"{0}"'.format(safe_str(v)) for v in strings)
def pluralize(noun):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/aria/utils/validation.py
----------------------------------------------------------------------
diff --git a/aria/utils/validation.py b/aria/utils/validation.py
index a33f7a2..193cb33 100644
--- a/aria/utils/validation.py
+++ b/aria/utils/validation.py
@@ -17,6 +17,8 @@
Contains validation related utilities
"""
+from .formatting import string_list_as_string
+
class ValidatorMixin(object):
"""
@@ -82,8 +84,8 @@ def validate_function_arguments(func, func_kwargs):
for arg in non_default_args:
if arg not in func_kwargs:
raise ValueError(
- "The argument '{arg}' doest not have a default value, and it "
- "isn't passed to {func.__name__}".format(arg=arg, func=func))
+ 'The argument "{arg}" is not provided and does not have a default value for '
+ 'function "{func.__name__}"'.format(arg=arg, func=func))
# check if there are any extra kwargs
extra_kwargs = [arg for arg in func_kwargs.keys() if arg not in args]
@@ -91,5 +93,5 @@ def validate_function_arguments(func, func_kwargs):
# assert that the function has kwargs
if extra_kwargs and not has_kwargs:
raise ValueError("The following extra kwargs were supplied: {extra_kwargs}".format(
- extra_kwargs=extra_kwargs
+ extra_kwargs=string_list_as_string(extra_kwargs)
))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
index abac03b..c1dc11d 100644
--- a/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
+++ b/extensions/aria_extension_tosca/profiles/aria-1.0/aria-1.0.yaml
@@ -52,10 +52,10 @@ policy_types:
should be inherited and extended with additional properties.
derived_from: tosca.policies.Root
properties:
- function:
+ implementation:
description: >-
- The interpretation of the function string depends on the orchestrator. In ARIA it is the
- full path to a Python @workflow function that generates a task graph based on the service
- topology.
+ The interpretation of the implementation string depends on the orchestrator. In ARIA it is
+ the full path to a Python @workflow function that generates a task graph based on the
+ service topology.
type: string
required: true
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
index 0b04fdc..c88bf41 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py
@@ -378,7 +378,7 @@ def create_operation_template_model(context, service_template, operation):
implementation = operation.implementation
if implementation is not None:
primary = implementation.primary
- set_implementation(context, service_template, operation, model, primary)
+ extract_implementation_primary(context, service_template, operation, model, primary)
relationship_edge = operation._get_extensions(context).get('relationship_edge')
if relationship_edge is not None:
if relationship_edge == 'source':
@@ -392,6 +392,8 @@ def create_operation_template_model(context, service_template, operation):
for dependency in dependencies:
key, value = split_prefix(dependency)
if key is not None:
+ # Special ARIA prefix: signifies configuration parameters
+
# Parse as YAML
try:
value = yaml.load(value)
@@ -512,15 +514,13 @@ def create_workflow_operation_template_model(context, service_template, policy):
properties = policy._get_property_values(context)
for prop_name, prop in properties.iteritems():
- if prop_name == 'function':
+ if prop_name == 'implementation':
model.function = prop.value
- elif prop_name == 'dependencies':
- model.dependencies = prop.value
else:
- model.configuration[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
- type_name=prop.type,
- value=prop.value,
- description=prop.description)
+ model.inputs[prop_name] = Parameter(name=prop_name, # pylint: disable=unexpected-keyword-arg
+ type_name=prop.type,
+ value=prop.value,
+ description=prop.description)
return model
@@ -667,7 +667,7 @@ def split_prefix(string):
split = IMPLEMENTATION_PREFIX_REGEX.split(string, 1)
if len(split) < 2:
return None, None
- return split[0].strip(), split[1].lstrip()
+ return split[0].strip(), split[1].strip()
def set_nested(the_dict, keys, value):
@@ -693,7 +693,7 @@ def set_nested(the_dict, keys, value):
set_nested(the_dict[key], keys, value)
-def set_implementation(context, service_template, presentation, model, primary):
+def extract_implementation_primary(context, service_template, presentation, model, primary):
prefix, postfix = split_prefix(primary)
if prefix:
# Special ARIA prefix
@@ -706,5 +706,5 @@ def set_implementation(context, service_template, presentation, model, primary):
locator=presentation._get_child_locator('properties', 'implementation'),
level=Issue.BETWEEN_TYPES)
else:
- # Standard TOSCA artifact
+ # Standard TOSCA artifact with default plugin
model.implementation = primary
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
index 7089ed9..7be5bf6 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/functions.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from cStringIO import StringIO
+from StringIO import StringIO # Note: cStringIO does not support Unicode
import re
from aria.utils.collections import FrozenList
@@ -69,7 +69,9 @@ class Concat(Function):
e, final = evaluate(e, final, container_holder)
if e is not None:
value.write(unicode(e))
- value = value.getvalue()
+ value = value.getvalue() or u''
+ from aria.utils.console import puts
+ puts(safe_repr(value))
return Evaluation(value, final)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/modeling/test_models.py
----------------------------------------------------------------------
diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py
index 57511dd..df3aebd 100644
--- a/tests/modeling/test_models.py
+++ b/tests/modeling/test_models.py
@@ -755,7 +755,7 @@ class TestTask(object):
@pytest.mark.parametrize(
'is_valid, status, due_at, started_at, ended_at, max_attempts, attempts_count, '
- 'retry_interval, ignore_failure, name, operation_mapping, inputs, plugin_id',
+ 'retry_interval, ignore_failure, name, operation_mapping, arguments, plugin_id',
[
(False, m_cls, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
(False, Task.STARTED, m_cls, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
@@ -784,7 +784,7 @@ class TestTask(object):
)
def test_task_model_creation(self, execution_storage, is_valid, status, due_at, started_at,
ended_at, max_attempts, attempts_count, retry_interval,
- ignore_failure, name, operation_mapping, inputs, plugin_id):
+ ignore_failure, name, operation_mapping, arguments, plugin_id):
task = _test_model(
is_valid=is_valid,
storage=execution_storage,
@@ -800,8 +800,8 @@ class TestTask(object):
retry_interval=retry_interval,
ignore_failure=ignore_failure,
name=name,
- implementation=operation_mapping,
- inputs=inputs,
+ function=operation_mapping,
+ arguments=arguments,
plugin_fk=plugin_id,
))
if is_valid:
@@ -813,8 +813,8 @@ class TestTask(object):
def create_task(max_attempts):
Task(execution_fk='eid',
name='name',
- implementation='',
- inputs={},
+ function='',
+ arguments={},
max_attempts=max_attempts)
create_task(max_attempts=1)
create_task(max_attempts=2)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 7dbdd04..eec75da 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -97,7 +97,7 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments
+ arguments=arguments
)
)
@@ -115,8 +115,8 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
)
operations = interface.operations
assert len(operations) == 1
- assert dataholder['implementation'] == operations.values()[0].function # pylint: disable=no-member
- assert dataholder['inputs']['putput'] is True
+ assert dataholder['function'] == operations.values()[0].function # pylint: disable=no-member
+ assert dataholder['arguments']['putput'] is True
# Context based attributes (sugaring)
assert dataholder['template_name'] == node.node_template.name
@@ -147,7 +147,7 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments
+ arguments=arguments
)
)
@@ -159,8 +159,8 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
assert dataholder['actor_name'] == relationship.name
assert interface_name in dataholder['task_name']
operations = interface.operations
- assert dataholder['implementation'] == operations.values()[0].function # pylint: disable=no-member
- assert dataholder['inputs']['putput'] is True
+ assert dataholder['function'] == operations.values()[0].function # pylint: disable=no-member
+ assert dataholder['arguments']['putput'] is True
# Context based attributes (sugaring)
dependency_node_template = ctx.model.node_template.get_by_name(
@@ -252,7 +252,7 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments))
+ arguments=arguments))
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service.id),
@@ -301,7 +301,7 @@ def test_node_operation_logging(ctx, executor):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments
+ arguments=arguments
)
)
@@ -334,7 +334,7 @@ def test_relationship_operation_logging(ctx, executor):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments
+ arguments=arguments
)
)
@@ -348,15 +348,15 @@ def test_attribute_consumption(ctx, executor, dataholder):
source_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
- inputs = {'dict_': {'key': 'value'},
- 'set_test_dict': {'key2': 'value2'}}
+ arguments = {'dict_': {'key': 'value'},
+ 'set_test_dict': {'key2': 'value2'}}
interface = mock.models.create_interface(
source_node.service,
node_int_name,
node_op_name,
operation_kwargs=dict(
- implementation=op_path(attribute_altering_operation, module_path=__name__),
- inputs=inputs)
+ function=op_path(attribute_altering_operation, module_path=__name__),
+ arguments=arguments)
)
source_node.interfaces[interface.name] = interface
ctx.model.node.update(source_node)
@@ -371,8 +371,8 @@ def test_attribute_consumption(ctx, executor, dataholder):
rel_int_name,
rel_op_name,
operation_kwargs=dict(
- implementation=op_path(attribute_consuming_operation, module_path=__name__),
- inputs={'holder_path': dataholder.path}
+ function=op_path(attribute_consuming_operation, module_path=__name__),
+ arguments={'holder_path': dataholder.path}
)
)
relationship.interfaces[interface.name] = interface
@@ -386,7 +386,7 @@ def test_attribute_consumption(ctx, executor, dataholder):
source_node,
interface_name=node_int_name,
operation_name=node_op_name,
- inputs=inputs
+ arguments=arguments
),
api.task.OperationTask(
relationship,
@@ -410,8 +410,7 @@ def test_attribute_consumption(ctx, executor, dataholder):
dataholder['key2'] == 'value2'
-def _assert_loggins(ctx, inputs):
-
+def _assert_loggins(ctx, arguments):
# The logs should contain the following: Workflow Start, Operation Start, custom operation
# log string (op_start), custom operation log string (op_end), Operation End, Workflow End.
@@ -431,11 +430,11 @@ def _assert_loggins(ctx, inputs):
assert all(l.execution == execution for l in logs)
assert all(l in logs and l.task == task for l in task.logs)
- op_start_log = [l for l in logs if inputs['op_start'] in l.msg and l.level.lower() == 'info']
+ op_start_log = [l for l in logs if arguments['op_start'] in l.msg and l.level.lower() == 'info']
assert len(op_start_log) == 1
op_start_log = op_start_log[0]
- op_end_log = [l for l in logs if inputs['op_end'] in l.msg and l.level.lower() == 'debug']
+ op_end_log = [l for l in logs if arguments['op_end'] in l.msg and l.level.lower() == 'debug']
assert len(op_end_log) == 1
op_end_log = op_end_log[0]
@@ -444,10 +443,10 @@ def _assert_loggins(ctx, inputs):
@operation
def logged_operation(ctx, **_):
- ctx.logger.info(ctx.task.inputs['op_start'].value)
+ ctx.logger.info(ctx.task.arguments['op_start'].value)
# enables to check the relation between the created_at field properly
time.sleep(1)
- ctx.logger.debug(ctx.task.inputs['op_end'].value)
+ ctx.logger.debug(ctx.task.arguments['op_end'].value)
@operation
@@ -476,8 +475,8 @@ def operation_common(ctx, holder):
holder['actor_name'] = ctx.task.actor.name
holder['task_name'] = ctx.task.name
- holder['implementation'] = ctx.task.implementation
- holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
+ holder['function'] = ctx.task.function
+ holder['arguments'] = dict(i.unwrap() for i in ctx.task.arguments.values())
@operation
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 946b0bd..4db7bf4 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -60,8 +60,8 @@ def _mock_workflow(ctx, graph):
def _mock_operation(ctx):
# We test several things in this operation
# ctx.task, ctx.node, etc... tell us that the model storage was properly re-created
- # a correct ctx.task.implementation tells us we kept the correct task_id
- assert ctx.task.implementation == _operation_mapping()
+ # a correct ctx.task.function tells us we kept the correct task_id
+ assert ctx.task.function == _operation_mapping()
# a correct ctx.node.name tells us we kept the correct actor_id
assert ctx.node.name == mock.models.DEPENDENCY_NODE_NAME
# a correct ctx.name tells us we kept the correct name
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 26a15e5..326ce83 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -105,7 +105,7 @@ def test_host_ip(workflow_context, executor, dataholder):
dependency_node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments
+ arguments=arguments
)
)
@@ -136,7 +136,7 @@ def test_relationship_tool_belt(workflow_context, executor, dataholder):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments
+ arguments=arguments
)
)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index 0dfd512..d792a57 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -199,7 +199,7 @@ if __name__ == '__main__':
props = self._run(
executor, workflow_context,
script_path=script_path,
- inputs={'key': 'value'})
+ arguments={'key': 'value'})
assert props['key'].value == 'value'
@pytest.mark.parametrize(
@@ -460,10 +460,10 @@ if __name__ == '__main__':
script_path,
process=None,
env_var='value',
- inputs=None):
+ arguments=None):
local_script_path = script_path
script_path = os.path.basename(local_script_path) if local_script_path else ''
- arguments = inputs or {}
+ arguments = arguments or {}
process = process or {}
if script_path:
workflow_context.resource.service.upload(
@@ -495,7 +495,7 @@ if __name__ == '__main__':
node,
interface_name='test',
operation_name='op',
- inputs=arguments))
+ arguments=arguments))
return graph
tasks_graph = mock_workflow(ctx=workflow_context) # pylint: disable=no-value-for-parameter
eng = engine.Engine(
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index a369f8f..899a007 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -243,13 +243,13 @@ class TestWithActualSSHServer(object):
ops = []
for test_operation in test_operations:
- op_inputs = arguments.copy()
- op_inputs['test_operation'] = test_operation
+ op_arguments = arguments.copy()
+ op_arguments['test_operation'] = test_operation
ops.append(api.task.OperationTask(
node,
interface_name='test',
operation_name='op',
- inputs=op_inputs))
+ arguments=op_arguments))
graph.sequence(*ops)
return graph
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/test_workflow_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py
index 405cb80..3646339 100644
--- a/tests/orchestrator/test_workflow_runner.py
+++ b/tests/orchestrator/test_workflow_runner.py
@@ -48,8 +48,7 @@ def test_missing_workflow_implementation(service, request):
workflow = models.Operation(
name='test_workflow',
service=service,
- implementation='nonexistent.workflow.implementation',
- inputs={})
+ function='nonexistent.workflow.implementation')
service.workflows['test_workflow'] = workflow
with pytest.raises(exceptions.WorkflowImplementationNotFoundError):
@@ -259,8 +258,9 @@ def _setup_mock_workflow_in_service(request, inputs=None):
workflow = models.Operation(
name=mock_workflow_name,
service=service,
- implementation='workflow.mock_workflow',
- inputs=inputs or {})
+ function='workflow.mock_workflow',
+ inputs=inputs or {},
+ arguments=inputs or {})
service.workflows[mock_workflow_name] = workflow
return mock_workflow_name
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index d57e424..9d91b6b 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -66,7 +66,7 @@ class TestOperationTask(object):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments,
+ arguments=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval,
ignore_failure=ignore_failure)
@@ -77,9 +77,9 @@ class TestOperationTask(object):
interface=interface_name,
operation=operation_name
)
- assert api_task.implementation == 'op_path'
+ assert api_task.function == 'op_path'
assert api_task.actor == node
- assert api_task.inputs['test_input'].value is True
+ assert api_task.arguments['test_input'].value is True
assert api_task.retry_interval == retry_interval
assert api_task.max_attempts == max_attempts
assert api_task.ignore_failure == ignore_failure
@@ -113,7 +113,7 @@ class TestOperationTask(object):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments,
+ arguments=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval)
@@ -123,9 +123,9 @@ class TestOperationTask(object):
interface=interface_name,
operation=operation_name
)
- assert api_task.implementation == 'op_path'
+ assert api_task.function == 'op_path'
assert api_task.actor == relationship
- assert api_task.inputs['test_input'].value is True
+ assert api_task.arguments['test_input'].value is True
assert api_task.retry_interval == retry_interval
assert api_task.max_attempts == max_attempts
assert api_task.plugin.name == 'test_plugin'
@@ -158,7 +158,7 @@ class TestOperationTask(object):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments,
+ arguments=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval)
@@ -168,9 +168,9 @@ class TestOperationTask(object):
interface=interface_name,
operation=operation_name
)
- assert api_task.implementation == 'op_path'
+ assert api_task.function == 'op_path'
assert api_task.actor == relationship
- assert api_task.inputs['test_input'].value is True
+ assert api_task.arguments['test_input'].value is True
assert api_task.retry_interval == retry_interval
assert api_task.max_attempts == max_attempts
assert api_task.plugin.name == 'test_plugin'
@@ -198,7 +198,7 @@ class TestOperationTask(object):
interface_name=interface_name,
operation_name=operation_name)
- assert task.inputs == {}
+ assert task.arguments == {}
assert task.retry_interval == ctx._task_retry_interval
assert task.max_attempts == ctx._task_max_attempts
assert task.ignore_failure == ctx._task_ignore_failure
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 43ec9f1..6d2836c 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -57,7 +57,7 @@ class BaseTest(object):
@staticmethod
def _op(ctx,
func,
- inputs=None,
+ arguments=None,
max_attempts=None,
retry_interval=None,
ignore_failure=None):
@@ -65,9 +65,9 @@ class BaseTest(object):
interface_name = 'aria.interfaces.lifecycle'
operation_kwargs = dict(function='{name}.{func.__name__}'.format(
name=__name__, func=func))
- if inputs:
+ if arguments:
# the operation has to declare the arguments before those may be passed
- operation_kwargs['arguments'] = inputs
+ operation_kwargs['arguments'] = arguments
operation_name = 'create'
interface = mock.models.create_interface(node.service, interface_name, operation_name,
operation_kwargs=operation_kwargs)
@@ -77,7 +77,7 @@ class BaseTest(object):
node,
interface_name='aria.interfaces.lifecycle',
operation_name=operation_name,
- inputs=inputs or {},
+ arguments=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval,
ignore_failure=ignore_failure,
@@ -189,8 +189,8 @@ class TestEngine(BaseTest):
def test_two_tasks_execution_order(self, workflow_context, executor):
@workflow
def mock_workflow(ctx, graph):
- op1 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 1})
- op2 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 2})
+ op1 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 1})
+ op2 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 2})
graph.sequence(op1, op2)
self._execute(
workflow_func=mock_workflow,
@@ -204,9 +204,9 @@ class TestEngine(BaseTest):
def test_stub_and_subworkflow_execution(self, workflow_context, executor):
@workflow
def sub_workflow(ctx, graph):
- op1 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 1})
+ op1 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 1})
op2 = api.task.StubTask()
- op3 = self._op(ctx, func=mock_ordered_task, inputs={'counter': 2})
+ op3 = self._op(ctx, func=mock_ordered_task, arguments={'counter': 2})
graph.sequence(op1, op2, op3)
@workflow
@@ -229,7 +229,7 @@ class TestCancel(BaseTest):
@workflow
def mock_workflow(ctx, graph):
operations = (
- self._op(ctx, func=mock_sleep_task, inputs=dict(seconds=0.1))
+ self._op(ctx, func=mock_sleep_task, arguments=dict(seconds=0.1))
for _ in range(number_of_tasks)
)
return graph.sequence(*operations)
@@ -270,7 +270,7 @@ class TestRetries(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
- inputs={'failure_count': 1},
+ arguments={'failure_count': 1},
max_attempts=2)
graph.add_tasks(op)
self._execute(
@@ -286,7 +286,7 @@ class TestRetries(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
- inputs={'failure_count': 2},
+ arguments={'failure_count': 2},
max_attempts=2)
graph.add_tasks(op)
with pytest.raises(exceptions.ExecutorException):
@@ -303,7 +303,7 @@ class TestRetries(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
- inputs={'failure_count': 1},
+ arguments={'failure_count': 1},
max_attempts=3)
graph.add_tasks(op)
self._execute(
@@ -319,7 +319,7 @@ class TestRetries(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
- inputs={'failure_count': 2},
+ arguments={'failure_count': 2},
max_attempts=3)
graph.add_tasks(op)
self._execute(
@@ -335,7 +335,7 @@ class TestRetries(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
- inputs={'failure_count': 1},
+ arguments={'failure_count': 1},
max_attempts=-1)
graph.add_tasks(op)
self._execute(
@@ -361,7 +361,7 @@ class TestRetries(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
- inputs={'failure_count': 1},
+ arguments={'failure_count': 1},
max_attempts=2,
retry_interval=retry_interval)
graph.add_tasks(op)
@@ -382,7 +382,7 @@ class TestRetries(BaseTest):
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_conditional_failure_task,
ignore_failure=True,
- inputs={'failure_count': 100},
+ arguments={'failure_count': 100},
max_attempts=100)
graph.add_tasks(op)
self._execute(
@@ -405,7 +405,7 @@ class TestTaskRetryAndAbort(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_task_retry,
- inputs={'message': self.message},
+ arguments={'message': self.message},
retry_interval=default_retry_interval,
max_attempts=2)
graph.add_tasks(op)
@@ -429,8 +429,8 @@ class TestTaskRetryAndAbort(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_task_retry,
- inputs={'message': self.message,
- 'retry_interval': custom_retry_interval},
+ arguments={'message': self.message,
+ 'retry_interval': custom_retry_interval},
retry_interval=default_retry_interval,
max_attempts=2)
graph.add_tasks(op)
@@ -452,7 +452,7 @@ class TestTaskRetryAndAbort(BaseTest):
@workflow
def mock_workflow(ctx, graph):
op = self._op(ctx, func=mock_task_abort,
- inputs={'message': self.message},
+ arguments={'message': self.message},
retry_interval=100,
max_attempts=100)
graph.add_tasks(op)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 1ba6422..a717e19 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -103,9 +103,9 @@ class TestOperationTask(object):
assert storage_task.actor == core_task.context.node._original_model
assert core_task.model_task == storage_task
assert core_task.name == api_task.name
- assert core_task.implementation == api_task.implementation
+ assert core_task.function == api_task.function
assert core_task.actor == api_task.actor == node
- assert core_task.inputs == api_task.inputs == storage_task.inputs
+ assert core_task.arguments == api_task.arguments == storage_task.arguments
assert core_task.plugin == storage_plugin
def test_relationship_operation_task_creation(self, ctx):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
index 1633d4a..5dd2855 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -108,9 +108,9 @@ def test_task_graph_into_execution_graph(tmpdir):
def _assert_execution_is_api_task(execution_task, api_task):
assert execution_task.id == api_task.id
assert execution_task.name == api_task.name
- assert execution_task.implementation == api_task.implementation
+ assert execution_task.function == api_task.function
assert execution_task.actor == api_task.actor
- assert execution_task.inputs == api_task.inputs
+ assert execution_task.arguments == api_task.arguments
def _get_task_by_name(task_name, graph):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/executor/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/__init__.py b/tests/orchestrator/workflows/executor/__init__.py
index 41c4b2e..ac6d325 100644
--- a/tests/orchestrator/workflows/executor/__init__.py
+++ b/tests/orchestrator/workflows/executor/__init__.py
@@ -25,11 +25,11 @@ class MockTask(object):
INFINITE_RETRIES = models.Task.INFINITE_RETRIES
- def __init__(self, implementation, inputs=None, plugin=None, storage=None):
- self.implementation = self.name = implementation
+ def __init__(self, function, arguments=None, plugin=None, storage=None):
+ self.function = self.name = function
self.plugin_fk = plugin.id if plugin else None
self.plugin = plugin or None
- self.inputs = inputs or {}
+ self.arguments = arguments or {}
self.states = []
self.exception = None
self.id = str(uuid.uuid4())
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 29cb0e8..9ddaef4 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -38,16 +38,16 @@ import tests
from . import MockTask
-def _get_implementation(func):
+def _get_function(func):
return '{module}.{func.__name__}'.format(module=__name__, func=func)
def execute_and_assert(executor, storage=None):
expected_value = 'value'
- successful_task = MockTask(_get_implementation(mock_successful_task), storage=storage)
- failing_task = MockTask(_get_implementation(mock_failing_task), storage=storage)
- task_with_inputs = MockTask(_get_implementation(mock_task_with_input),
- inputs={'input': models.Parameter.wrap('input', 'value')},
+ successful_task = MockTask(_get_function(mock_successful_task), storage=storage)
+ failing_task = MockTask(_get_function(mock_failing_task), storage=storage)
+ task_with_inputs = MockTask(_get_function(mock_task_with_input),
+ arguments={'input': models.Parameter.wrap('input', 'value')},
storage=storage)
for task in [successful_task, failing_task, task_with_inputs]:
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index e6333e8..058190e 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -66,7 +66,7 @@ class TestProcessExecutor(object):
def test_closed(self, executor):
executor.close()
with pytest.raises(RuntimeError) as exc_info:
- executor.execute(task=MockTask(implementation='some.implementation'))
+ executor.execute(task=MockTask(function='some.function'))
assert 'closed' in exc_info.value.message
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 8c3f72a..6163c09 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -93,12 +93,12 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments),
+ arguments=arguments),
api.task.OperationTask(
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments)
+ arguments=arguments)
)
signal = events.on_failure_task_signal
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index acca0bf..e4944df 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -27,7 +27,7 @@ from tests import storage
def test_decorate_extension(context, executor):
- arguments = {'input1': 1, 'input2': 2}
+ arguments = {'arg1': 1, 'arg2': 2}
def get_node(ctx):
return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
@@ -49,15 +49,15 @@ def test_decorate_extension(context, executor):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=arguments)
+ arguments=arguments)
graph.add_tasks(task)
return graph
graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter
eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
eng.execute()
out = get_node(context).attributes.get('out').value
- assert out['wrapper_inputs'] == arguments
- assert out['function_inputs'] == arguments
+ assert out['wrapper_arguments'] == arguments
+ assert out['function_arguments'] == arguments
@extension.process_executor
@@ -65,16 +65,16 @@ class MockProcessExecutorExtension(object):
def decorate(self):
def decorator(function):
- def wrapper(ctx, **operation_inputs):
- ctx.node.attributes['out'] = {'wrapper_inputs': operation_inputs}
- function(ctx=ctx, **operation_inputs)
+ def wrapper(ctx, **operation_arguments):
+ ctx.node.attributes['out'] = {'wrapper_arguments': operation_arguments}
+ function(ctx=ctx, **operation_arguments)
return wrapper
return decorator
@operation
-def _mock_operation(ctx, **operation_inputs):
- ctx.node.attributes['out']['function_inputs'] = operation_inputs
+def _mock_operation(ctx, **operation_arguments):
+ ctx.node.attributes['out']['function_arguments'] = operation_arguments
@pytest.fixture
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index c766fe4..2d80a3b 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -62,19 +62,19 @@ def test_refresh_state_of_tracked_attributes(context, executor):
def test_apply_tracked_changes_during_an_operation(context, executor):
- inputs = {
+ arguments = {
'committed': {'some': 'new', 'properties': 'right here'},
'changed_but_refreshed': {'some': 'newer', 'properties': 'right there'}
}
expected_initial = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME).attributes
out = _run_workflow(
- context=context, executor=executor, op_func=_mock_updating_operation, inputs=inputs)
+ context=context, executor=executor, op_func=_mock_updating_operation, arguments=arguments)
expected_after_update = expected_initial.copy()
- expected_after_update.update(inputs['committed']) # pylint: disable=no-member
+ expected_after_update.update(arguments['committed']) # pylint: disable=no-member
expected_after_change = expected_after_update.copy()
- expected_after_change.update(inputs['changed_but_refreshed']) # pylint: disable=no-member
+ expected_after_change.update(arguments['changed_but_refreshed']) # pylint: disable=no-member
assert out['initial'] == expected_initial
assert out['after_update'] == expected_after_update
@@ -82,13 +82,13 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
assert out['after_refresh'] == expected_after_change
-def _run_workflow(context, executor, op_func, inputs=None):
+def _run_workflow(context, executor, op_func, arguments=None):
@workflow
def mock_workflow(ctx, graph):
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
interface_name = 'test_interface'
operation_name = 'operation'
- wf_arguments = inputs or {}
+ wf_arguments = arguments or {}
interface = mock.models.create_interface(
ctx.service,
interface_name,
@@ -101,7 +101,7 @@ def _run_workflow(context, executor, op_func, inputs=None):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=wf_arguments)
+ arguments=wf_arguments)
graph.add_tasks(task)
return graph
graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter
[2/5] incubator-ariatosca git commit: ARIA-149 Enhance operation
configuration
Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py
deleted file mode 100644
index 9c3ea42..0000000
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/properties.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from aria.utils.collections import merge, deepcopy_with_locators, OrderedDict
-from aria.parser.presentation import Value
-from aria.parser.validation import Issue
-
-from .data_types import coerce_value
-
-#
-# ArtifactType, DataType, CapabilityType, RelationshipType, NodeType, GroupType, PolicyType
-#
-
-# Works on properties, parameters, inputs, and attributes
-def get_inherited_property_definitions(context, presentation, field_name, for_presentation=None):
- """
- Returns our property definitions added on top of those of our parent, if we have one
- (recursively).
-
- Allows overriding all aspects of parent properties except data type.
- """
-
- # Get definitions from parent
- # If we inherit from a primitive, it does not have a parent:
- parent = presentation._get_parent(context) if hasattr(presentation, '_get_parent') else None
- definitions = get_inherited_property_definitions(context, parent, field_name,
- for_presentation=presentation) \
- if parent is not None else OrderedDict()
-
- # Add/merge our definitions
- # If we inherit from a primitive, it does not have our field
- our_definitions = getattr(presentation, field_name, None)
- if our_definitions:
- our_definitions_clone = OrderedDict()
- for name, our_definition in our_definitions.iteritems():
- our_definitions_clone[name] = our_definition._clone(for_presentation)
- our_definitions = our_definitions_clone
- merge_property_definitions(context, presentation, definitions, our_definitions, field_name)
-
- for definition in definitions.itervalues():
- definition._reset_method_cache()
-
- return definitions
-
-#
-# NodeTemplate, RelationshipTemplate, GroupTemplate, PolicyTemplate
-#
-
-def get_assigned_and_defined_property_values(context, presentation, field_name='property',
- field_name_plural='properties'):
- """
- Returns the assigned property values while making sure they are defined in our type.
-
- The property definition's default value, if available, will be used if we did not assign it.
-
- Makes sure that required properties indeed end up with a value.
- """
-
- values = OrderedDict()
-
- the_type = presentation._get_type(context)
- assignments = getattr(presentation, field_name_plural)
- get_fn_name = '_get_{0}'.format(field_name_plural)
- definitions = getattr(the_type, get_fn_name)(context) if the_type is not None else None
-
- # Fill in our assignments, but make sure they are defined
- if assignments:
- for name, value in assignments.iteritems():
- if (definitions is not None) and (name in definitions):
- definition = definitions[name]
- values[name] = coerce_property_value(context, value, definition, value.value)
- else:
- context.validation.report('assignment to undefined {0} "{1}" in "{2}"'
- .format(field_name, name, presentation._fullname),
- locator=value._locator, level=Issue.BETWEEN_TYPES)
-
- # Fill in defaults from the definitions
- if definitions:
- for name, definition in definitions.iteritems():
- if values.get(name) is None:
- values[name] = coerce_property_value(context, presentation, definition,
- definition.default)
-
- validate_required_values(context, presentation, values, definitions)
-
- return values
-
-#
-# TopologyTemplate
-#
-
-def get_parameter_values(context, presentation, field_name):
- values = OrderedDict()
-
- parameters = getattr(presentation, field_name)
-
- # Fill in defaults and values
- if parameters:
- for name, parameter in parameters.iteritems():
- if values.get(name) is None:
- if hasattr(parameter, 'value') and (parameter.value is not None):
- # For parameters only:
- values[name] = coerce_property_value(context, presentation, parameter,
- parameter.value)
- else:
- default = parameter.default if hasattr(parameter, 'default') else None
- values[name] = coerce_property_value(context, presentation, parameter, default)
-
- return values
-
-#
-# Utils
-#
-
-def validate_required_values(context, presentation, values, definitions):
- """
- Check if required properties have not been assigned.
- """
-
- if not definitions:
- return
- for name, definition in definitions.iteritems():
- if getattr(definition, 'required', False) \
- and ((values is None) or (values.get(name) is None)):
- context.validation.report('required property "%s" is not assigned a value in "%s"'
- % (name, presentation._fullname),
- locator=presentation._get_child_locator('properties'),
- level=Issue.BETWEEN_TYPES)
-
-def merge_raw_property_definition(context, presentation, raw_property_definition,
- our_property_definition, field_name, property_name):
- # Check if we changed the type
- # TODO: allow a sub-type?
- type1 = raw_property_definition.get('type')
- type2 = our_property_definition.type
- if type1 != type2:
- context.validation.report(
- 'override changes type from "%s" to "%s" for property "%s" in "%s"'
- % (type1, type2, property_name, presentation._fullname),
- locator=presentation._get_child_locator(field_name, property_name),
- level=Issue.BETWEEN_TYPES)
-
- merge(raw_property_definition, our_property_definition._raw)
-
-def merge_raw_property_definitions(context, presentation, raw_property_definitions,
- our_property_definitions, field_name):
- if not our_property_definitions:
- return
- for property_name, our_property_definition in our_property_definitions.iteritems():
- if property_name in raw_property_definitions:
- raw_property_definition = raw_property_definitions[property_name]
- merge_raw_property_definition(context, presentation, raw_property_definition,
- our_property_definition, field_name, property_name)
- else:
- raw_property_definitions[property_name] = \
- deepcopy_with_locators(our_property_definition._raw)
-
-def merge_property_definitions(context, presentation, property_definitions,
- our_property_definitions, field_name):
- if not our_property_definitions:
- return
- for property_name, our_property_definition in our_property_definitions.iteritems():
- if property_name in property_definitions:
- property_definition = property_definitions[property_name]
- merge_raw_property_definition(context, presentation, property_definition._raw,
- our_property_definition, field_name, property_name)
- else:
- property_definitions[property_name] = our_property_definition
-
-# Works on properties, inputs, and parameters
-def coerce_property_value(context, presentation, definition, value, aspect=None):
- the_type = definition._get_type(context) if definition is not None else None
- entry_schema = definition.entry_schema if definition is not None else None
- constraints = definition._get_constraints(context) \
- if ((definition is not None) and hasattr(definition, '_get_constraints')) else None
- value = coerce_value(context, presentation, the_type, entry_schema, constraints, value, aspect)
- if (the_type is not None) and hasattr(the_type, '_name'):
- type_name = the_type._name
- else:
- type_name = getattr(definition, 'type', None)
- description = getattr(definition, 'description', None)
- description = description.value if description is not None else None
- return Value(type_name, value, description)
-
-def convert_property_definitions_to_values(context, definitions):
- values = OrderedDict()
- for name, definition in definitions.iteritems():
- default = definition.default
- values[name] = coerce_property_value(context, definition, definition, default)
- return values
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
index 2a68da2..6bdb5b1 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/requirements.py
@@ -14,13 +14,14 @@
# limitations under the License.
from aria.parser.validation import Issue
-from aria.utils.collections import deepcopy_with_locators, OrderedDict
+from aria.utils.collections import (deepcopy_with_locators, OrderedDict)
-from .properties import (convert_property_definitions_to_values, validate_required_values,
- coerce_property_value)
+from .parameters import (convert_parameter_definitions_to_values, validate_required_values,
+ coerce_parameter_value)
from .interfaces import (convert_requirement_interface_definitions_from_type_to_raw_template,
merge_interface_definitions, merge_interface, validate_required_inputs)
+
#
# NodeType
#
@@ -49,6 +50,7 @@ def get_inherited_requirement_definitions(context, presentation):
return requirement_definitions
+
#
# NodeTemplate
#
@@ -127,6 +129,7 @@ def get_template_requirements(context, presentation):
return requirement_assignments
+
#
# Utils
#
@@ -195,8 +198,8 @@ def convert_requirement_from_definition_to_assignment(context, requirement_defin
if relationship_property_definitions:
# Convert property definitions to values
raw['relationship']['properties'] = \
- convert_property_definitions_to_values(context,
- relationship_property_definitions)
+ convert_parameter_definitions_to_values(context,
+ relationship_property_definitions)
# These are our interface definitions
# InterfaceDefinition:
@@ -229,6 +232,7 @@ def convert_requirement_from_definition_to_assignment(context, requirement_defin
relationship_property_definitions, \
relationship_interface_definitions
+
def add_requirement_assignments(context, presentation, requirement_assignments,
requirement_definitions, our_requirement_assignments):
for requirement_name, our_requirement_assignment in our_requirement_assignments:
@@ -258,6 +262,7 @@ def add_requirement_assignments(context, presentation, requirement_assignments,
locator=our_requirement_assignment._locator,
level=Issue.BETWEEN_TYPES)
+
def merge_requirement_assignment(context, relationship_property_definitions,
relationship_interface_definitions, requirement, our_requirement):
our_capability = our_requirement.capability
@@ -283,6 +288,7 @@ def merge_requirement_assignment(context, relationship_property_definitions,
relationship_interface_definitions,
requirement, our_relationship)
+
def merge_requirement_assignment_relationship(context, presentation, property_definitions,
interface_definitions, requirement, our_relationship):
our_relationship_properties = our_relationship._raw.get('properties')
@@ -296,7 +302,7 @@ def merge_requirement_assignment_relationship(context, presentation, property_de
if property_name in property_definitions:
definition = property_definitions[property_name]
requirement._raw['relationship']['properties'][property_name] = \
- coerce_property_value(context, presentation, definition, prop)
+ coerce_parameter_value(context, presentation, definition, prop)
else:
context.validation.report(
'relationship property "%s" not declared at definition of requirement "%s"'
@@ -330,6 +336,7 @@ def merge_requirement_assignment_relationship(context, presentation, property_de
presentation._container._container._fullname),
locator=our_relationship._locator, level=Issue.BETWEEN_TYPES)
+
def validate_requirement_assignment(context, presentation, requirement_assignment,
relationship_property_definitions,
relationship_interface_definitions):
@@ -348,6 +355,7 @@ def validate_requirement_assignment(context, presentation, requirement_assignmen
validate_required_inputs(context, presentation, interface_assignment,
relationship_interface_definition, None, interface_name)
+
def get_first_requirement(requirement_definitions, name):
if requirement_definitions is not None:
for requirement_name, requirement_definition in requirement_definitions:
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
index c1e21de..8f7ec4c 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/substitution_mappings.py
@@ -16,6 +16,7 @@
from aria.utils.formatting import safe_repr
from aria.parser.validation import Issue
+
def validate_subtitution_mappings_requirement(context, presentation):
if not validate_format(context, presentation, 'requirement'):
return
@@ -57,6 +58,7 @@ def validate_subtitution_mappings_requirement(context, presentation):
locator=presentation._locator, level=Issue.BETWEEN_TYPES)
return
+
def validate_subtitution_mappings_capability(context, presentation):
if not validate_format(context, presentation, 'capability'):
return
@@ -99,6 +101,7 @@ def validate_subtitution_mappings_capability(context, presentation):
% (capability_type._name, presentation._name, type_capability_type._name),
locator=presentation._locator, level=Issue.BETWEEN_TYPES)
+
#
# Utils
#
@@ -114,6 +117,7 @@ def validate_format(context, presentation, name):
return False
return True
+
def get_node_template(context, presentation, name):
node_template_name = presentation._raw[0]
node_template = context.presentation.get_from_dict('service_template', 'topology_template',
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/templates.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/templates.py b/extensions/aria_extension_tosca/simple_v1_0/templates.py
index ce6b5d9..123a00e 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/templates.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/templates.py
@@ -26,7 +26,7 @@ from .assignments import (PropertyAssignment, AttributeAssignment, RequirementAs
from .definitions import ParameterDefinition
from .filters import NodeFilter
from .misc import (Description, MetaData, Repository, Import, SubstitutionMappings)
-from .modeling.properties import (get_assigned_and_defined_property_values, get_parameter_values)
+from .modeling.parameters import (get_assigned_and_defined_parameter_values, get_parameter_values)
from .modeling.interfaces import get_template_interfaces
from .modeling.requirements import get_template_requirements
from .modeling.capabilities import get_template_capabilities
@@ -157,12 +157,11 @@ class NodeTemplate(ExtensiblePresentation):
@cachedmethod
def _get_property_values(self, context):
- return FrozenDict(get_assigned_and_defined_property_values(context, self))
+ return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
@cachedmethod
def _get_attribute_default_values(self, context):
- return FrozenDict(get_assigned_and_defined_property_values(context, self,
- 'attribute', 'attributes'))
+ return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'attribute'))
@cachedmethod
def _get_requirements(self, context):
@@ -281,7 +280,7 @@ class RelationshipTemplate(ExtensiblePresentation):
@cachedmethod
def _get_property_values(self, context):
- return FrozenDict(get_assigned_and_defined_property_values(context, self))
+ return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
@cachedmethod
def _get_interfaces(self, context):
@@ -363,7 +362,7 @@ class GroupTemplate(ExtensiblePresentation):
@cachedmethod
def _get_property_values(self, context):
- return FrozenDict(get_assigned_and_defined_property_values(context, self))
+ return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
@cachedmethod
def _get_interfaces(self, context):
@@ -427,7 +426,7 @@ class PolicyTemplate(ExtensiblePresentation):
@cachedmethod
def _get_property_values(self, context):
- return FrozenDict(get_assigned_and_defined_property_values(context, self))
+ return FrozenDict(get_assigned_and_defined_parameter_values(context, self, 'property'))
@cachedmethod
def _get_targets(self, context):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/extensions/aria_extension_tosca/simple_v1_0/types.py
----------------------------------------------------------------------
diff --git a/extensions/aria_extension_tosca/simple_v1_0/types.py b/extensions/aria_extension_tosca/simple_v1_0/types.py
index bc80eb9..d97b89c 100644
--- a/extensions/aria_extension_tosca/simple_v1_0/types.py
+++ b/extensions/aria_extension_tosca/simple_v1_0/types.py
@@ -33,9 +33,9 @@ from .modeling.capabilities import (get_inherited_valid_source_types,
get_inherited_capability_definitions)
from .modeling.data_types import (get_data_type, get_inherited_constraints, coerce_data_type_value,
validate_data_type_name)
-from .modeling.interfaces import get_inherited_interface_definitions, get_inherited_operations
+from .modeling.interfaces import (get_inherited_interface_definitions, get_inherited_operations)
from .modeling.policies import get_inherited_targets
-from .modeling.properties import get_inherited_property_definitions
+from .modeling.parameters import get_inherited_parameter_definitions
from .modeling.requirements import get_inherited_requirement_definitions
from .presentation.extensible import ExtensiblePresentation
from .presentation.field_getters import data_type_class_getter
@@ -115,7 +115,7 @@ class ArtifactType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
def _validate(self, context):
super(ArtifactType, self)._validate(context)
@@ -201,7 +201,7 @@ class DataType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
@cachedmethod
def _get_constraints(self, context):
@@ -307,7 +307,7 @@ class CapabilityType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
@cachedmethod
def _get_valid_source_types(self, context):
@@ -385,7 +385,7 @@ class InterfaceType(ExtensiblePresentation):
@cachedmethod
def _get_inputs(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'inputs'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'inputs'))
@cachedmethod
def _get_operations(self, context):
@@ -493,11 +493,11 @@ class RelationshipType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
@cachedmethod
def _get_attributes(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'attributes'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'attributes'))
@cachedmethod
def _get_interfaces(self, context):
@@ -624,11 +624,11 @@ class NodeType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
@cachedmethod
def _get_attributes(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'attributes'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'attributes'))
@cachedmethod
def _get_requirements(self, context):
@@ -760,7 +760,7 @@ class GroupType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
@cachedmethod
def _get_interfaces(self, context):
@@ -848,7 +848,7 @@ class PolicyType(ExtensiblePresentation):
@cachedmethod
def _get_properties(self, context):
- return FrozenDict(get_inherited_property_definitions(context, self, 'properties'))
+ return FrozenDict(get_inherited_parameter_definitions(context, self, 'properties'))
@cachedmethod
def _get_targets(self, context):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/cli/test_services.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_services.py b/tests/cli/test_services.py
index b1a6ee4..e5717cc 100644
--- a/tests/cli/test_services.py
+++ b/tests/cli/test_services.py
@@ -19,7 +19,7 @@ import mock
from aria.cli.env import _Environment
from aria.core import Core
from aria.exceptions import DependentActiveExecutionsError, DependentAvailableNodesError
-from aria.modeling.exceptions import InputsException
+from aria.modeling.exceptions import ParameterException
from aria.storage import exceptions as storage_exceptions
from .base_test import ( # pylint: disable=unused-import
@@ -120,11 +120,11 @@ class TestServicesCreate(TestCliBase):
monkeypatch.setattr(_Environment, 'model_storage', mock_storage)
monkeypatch.setattr(Core,
'create_service',
- raise_exception(InputsException))
+ raise_exception(ParameterException))
assert_exception_raised(
self.invoke('services create -t with_inputs test_s'),
- expected_exception=InputsException)
+ expected_exception=ParameterException)
assert "Service created. The service's name is test_s" not in self.logger_output_string
@@ -152,8 +152,8 @@ class TestServicesDelete(TestCliBase):
assert_exception_raised(
self.invoke('services delete test_s'),
expected_exception=DependentActiveExecutionsError,
- expected_msg="Can't delete service {name} - there is an active execution "
- "for this service. Active execution id: 1".format(
+ expected_msg="Can't delete service `{name}` - there is an active execution "
+ "for this service. Active execution ID: 1".format(
name=mock_models.SERVICE_NAME))
def test_delete_available_nodes_error(self, monkeypatch, mock_storage):
@@ -161,8 +161,8 @@ class TestServicesDelete(TestCliBase):
assert_exception_raised(
self.invoke('services delete test_s'),
expected_exception=DependentAvailableNodesError,
- expected_msg="Can't delete service {name} - there are available nodes "
- "for this service. Available node ids: 1".format(
+ expected_msg="Can't delete service `{name}` - there are available nodes "
+ "for this service. Available node IDs: 1".format(
name=mock_models.SERVICE_NAME))
def test_delete_available_nodes_error_with_force(self, monkeypatch, mock_storage):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 98703d5..50aa340 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -225,11 +225,11 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N
interface_kwargs=None):
the_type = service.service_template.interface_types.get_descendant('test_interface_type')
- if operation_kwargs and operation_kwargs.get('inputs'):
- operation_kwargs['inputs'] = dict(
- (input_name, models.Parameter.wrap(input_name, input_value))
- for input_name, input_value in operation_kwargs['inputs'].iteritems()
- if input_value is not None)
+ if operation_kwargs and operation_kwargs.get('arguments'):
+ operation_kwargs['arguments'] = dict(
+ (argument_name, models.Parameter.wrap(argument_name, argument_value))
+ for argument_name, argument_value in operation_kwargs['arguments'].iteritems()
+ if argument_value is not None)
operation = models.Operation(
name=operation_name,
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/mock/topology.py
----------------------------------------------------------------------
diff --git a/tests/mock/topology.py b/tests/mock/topology.py
index bfb7b4e..ab08dbd 100644
--- a/tests/mock/topology.py
+++ b/tests/mock/topology.py
@@ -27,9 +27,9 @@ def create_simple_topology_single_node(model_storage, create_operation):
service_template,
'Standard', 'create',
operation_kwargs=dict(
- implementation=create_operation,
- inputs={'key': aria_models.Parameter.wrap('key', 'create'),
- 'value': aria_models.Parameter.wrap('value', True)})
+ function=create_operation,
+ arguments={'key': aria_models.Parameter.wrap('key', 'create'),
+ 'value': aria_models.Parameter.wrap('value', True)})
)
node_template.interface_templates[interface_template.name] = interface_template # pylint: disable=unsubscriptable-object
@@ -38,9 +38,9 @@ def create_simple_topology_single_node(model_storage, create_operation):
service,
'Standard', 'create',
operation_kwargs=dict(
- implementation=create_operation,
- inputs={'key': aria_models.Parameter.wrap('key', 'create'),
- 'value': aria_models.Parameter.wrap('value', True)})
+ function=create_operation,
+ arguments={'key': aria_models.Parameter.wrap('key', 'create'),
+ 'value': aria_models.Parameter.wrap('value', True)})
)
node.interfaces[interface.name] = interface # pylint: disable=unsubscriptable-object
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 5d193bc..7dbdd04 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -78,14 +78,14 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
interface_name = 'Standard'
operation_name = 'create'
- inputs = {'putput': True, 'holder_path': dataholder.path}
+ arguments = {'putput': True, 'holder_path': dataholder.path}
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
interface = mock.models.create_interface(
node.service,
interface_name,
operation_name,
- operation_kwargs=dict(implementation=op_path(basic_node_operation, module_path=__name__),
- inputs=inputs)
+ operation_kwargs=dict(function=op_path(basic_node_operation, module_path=__name__),
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
@@ -97,7 +97,7 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs
+ inputs=arguments
)
)
@@ -115,7 +115,7 @@ def test_node_operation_task_execution(ctx, thread_executor, dataholder):
)
operations = interface.operations
assert len(operations) == 1
- assert dataholder['implementation'] == operations.values()[0].implementation # pylint: disable=no-member
+ assert dataholder['implementation'] == operations.values()[0].function # pylint: disable=no-member
assert dataholder['inputs']['putput'] is True
# Context based attributes (sugaring)
@@ -127,15 +127,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
interface_name = 'Configure'
operation_name = 'post_configure'
- inputs = {'putput': True, 'holder_path': dataholder.path}
+ arguments = {'putput': True, 'holder_path': dataholder.path}
relationship = ctx.model.relationship.list()[0]
interface = mock.models.create_interface(
relationship.source_node.service,
interface_name,
operation_name,
- operation_kwargs=dict(implementation=op_path(basic_relationship_operation,
- module_path=__name__),
- inputs=inputs),
+ operation_kwargs=dict(function=op_path(basic_relationship_operation, module_path=__name__),
+ arguments=arguments),
)
relationship.interfaces[interface.name] = interface
@@ -148,7 +147,7 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs
+ inputs=arguments
)
)
@@ -160,7 +159,7 @@ def test_relationship_operation_task_execution(ctx, thread_executor, dataholder)
assert dataholder['actor_name'] == relationship.name
assert interface_name in dataholder['task_name']
operations = interface.operations
- assert dataholder['implementation'] == operations.values()[0].implementation # pylint: disable=no-member
+ assert dataholder['implementation'] == operations.values()[0].function # pylint: disable=no-member
assert dataholder['inputs']['putput'] is True
# Context based attributes (sugaring)
@@ -197,8 +196,8 @@ def test_invalid_task_operation_id(ctx, thread_executor, dataholder):
node.service,
interface_name=interface_name,
operation_name=operation_name,
- operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__),
- inputs={'holder_path': dataholder.path})
+ operation_kwargs=dict(function=op_path(get_node_id, module_path=__name__),
+ arguments={'holder_path': dataholder.path})
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
@@ -234,15 +233,15 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
filename = 'test_file'
content = 'file content'
- inputs = {'filename': filename, 'content': content}
+ arguments = {'filename': filename, 'content': content}
interface = mock.models.create_interface(
node.service,
interface_name,
operation_name,
operation_kwargs=dict(
- implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
+ function='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__),
plugin=plugin,
- inputs=inputs)
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
@@ -253,7 +252,7 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs))
+ inputs=arguments))
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
expected_file = tmpdir.join('workdir', 'plugins', str(ctx.service.id),
@@ -280,7 +279,7 @@ def test_node_operation_logging(ctx, executor):
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
- inputs = {
+ arguments = {
'op_start': 'op_start',
'op_end': 'op_end',
}
@@ -289,8 +288,8 @@ def test_node_operation_logging(ctx, executor):
interface_name,
operation_name,
operation_kwargs=dict(
- implementation=op_path(logged_operation, module_path=__name__),
- inputs=inputs)
+ function=op_path(logged_operation, module_path=__name__),
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
@@ -302,19 +301,19 @@ def test_node_operation_logging(ctx, executor):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs
+ inputs=arguments
)
)
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor)
- _assert_loggins(ctx, inputs)
+ _assert_loggins(ctx, arguments)
def test_relationship_operation_logging(ctx, executor):
interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0]
relationship = ctx.model.relationship.list()[0]
- inputs = {
+ arguments = {
'op_start': 'op_start',
'op_end': 'op_end',
}
@@ -322,8 +321,8 @@ def test_relationship_operation_logging(ctx, executor):
relationship.source_node.service,
interface_name,
operation_name,
- operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__),
- inputs=inputs)
+ operation_kwargs=dict(function=op_path(logged_operation, module_path=__name__),
+ arguments=arguments)
)
relationship.interfaces[interface.name] = interface
ctx.model.relationship.update(relationship)
@@ -335,12 +334,12 @@ def test_relationship_operation_logging(ctx, executor):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs
+ inputs=arguments
)
)
execute(workflow_func=basic_workflow, workflow_context=ctx, executor=executor)
- _assert_loggins(ctx, inputs)
+ _assert_loggins(ctx, arguments)
def test_attribute_consumption(ctx, executor, dataholder):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 8a5db6f..946b0bd 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -47,7 +47,7 @@ def _mock_workflow(ctx, graph):
node.service,
'test',
'op',
- operation_kwargs=dict(implementation=_operation_mapping(),
+ operation_kwargs=dict(function=_operation_mapping(),
plugin=plugin)
)
node.interfaces[interface.name] = interface
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index fc34907..26a15e5 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -86,12 +86,12 @@ def test_host_ip(workflow_context, executor, dataholder):
interface_name = 'Standard'
operation_name = 'create'
_, dependency_node, _, _, _ = _get_elements(workflow_context)
- inputs = {'putput': True, 'holder_path': dataholder.path}
+ arguments = {'putput': True, 'holder_path': dataholder.path}
interface = mock.models.create_interface(
dependency_node.service,
interface_name=interface_name,
operation_name=operation_name,
- operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), inputs=inputs)
+ operation_kwargs=dict(function=op_path(host_ip, module_path=__name__), arguments=arguments)
)
dependency_node.interfaces[interface.name] = interface
dependency_node.attributes['ip'] = models.Parameter.wrap('ip', '1.1.1.1')
@@ -105,7 +105,7 @@ def test_host_ip(workflow_context, executor, dataholder):
dependency_node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs
+ inputs=arguments
)
)
@@ -118,13 +118,13 @@ def test_relationship_tool_belt(workflow_context, executor, dataholder):
interface_name = 'Configure'
operation_name = 'post_configure'
_, _, _, _, relationship = _get_elements(workflow_context)
- inputs = {'putput': True, 'holder_path': dataholder.path}
+ arguments = {'putput': True, 'holder_path': dataholder.path}
interface = mock.models.create_interface(
relationship.source_node.service,
interface_name=interface_name,
operation_name=operation_name,
- operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__),
- inputs=inputs)
+ operation_kwargs=dict(function=op_path(relationship_operation, module_path=__name__),
+ arguments=arguments)
)
relationship.interfaces[interface.name] = interface
workflow_context.model.relationship.update(relationship)
@@ -136,7 +136,7 @@ def test_relationship_tool_belt(workflow_context, executor, dataholder):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs
+ inputs=arguments
)
)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index d9115e1..0dfd512 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -463,7 +463,7 @@ if __name__ == '__main__':
inputs=None):
local_script_path = script_path
script_path = os.path.basename(local_script_path) if local_script_path else ''
- inputs = inputs or {}
+ arguments = inputs or {}
process = process or {}
if script_path:
workflow_context.resource.service.upload(
@@ -471,7 +471,7 @@ if __name__ == '__main__':
source=local_script_path,
path=script_path)
- inputs.update({
+ arguments.update({
'script_path': script_path,
'process': process,
'input_as_env_var': env_var
@@ -485,17 +485,17 @@ if __name__ == '__main__':
'test',
'op',
operation_kwargs=dict(
- implementation='{0}.{1}'.format(
+ function='{0}.{1}'.format(
operations.__name__,
operations.run_script_locally.__name__),
- inputs=inputs)
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
graph.add_tasks(api.task.OperationTask(
node,
interface_name='test',
operation_name='op',
- inputs=inputs))
+ inputs=arguments))
return graph
tasks_graph = mock_workflow(ctx=workflow_context) # pylint: disable=no-value-for-parameter
eng = engine.Engine(
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index 92d250e..a369f8f 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -217,7 +217,7 @@ class TestWithActualSSHServer(object):
@workflow
def mock_workflow(ctx, graph):
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
- inputs = {
+ arguments = {
'script_path': script_path,
'fabric_env': _FABRIC_ENV,
'process': process,
@@ -226,24 +226,24 @@ class TestWithActualSSHServer(object):
'test_operation': '',
}
if hide_output:
- inputs['hide_output'] = hide_output
+ arguments['hide_output'] = hide_output
if commands:
- inputs['commands'] = commands
+ arguments['commands'] = commands
interface = mock.models.create_interface(
node.service,
'test',
'op',
operation_kwargs=dict(
- implementation='{0}.{1}'.format(
+ function='{0}.{1}'.format(
operations.__name__,
operation.__name__),
- inputs=inputs)
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
ops = []
for test_operation in test_operations:
- op_inputs = inputs.copy()
+ op_inputs = arguments.copy()
op_inputs['test_operation'] = test_operation
ops.append(api.task.OperationTask(
node,
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/test_workflow_runner.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/test_workflow_runner.py b/tests/orchestrator/test_workflow_runner.py
index 7374e50..405cb80 100644
--- a/tests/orchestrator/test_workflow_runner.py
+++ b/tests/orchestrator/test_workflow_runner.py
@@ -190,7 +190,7 @@ def test_execution_inputs_override_workflow_inputs(request):
def test_execution_inputs_undeclared_inputs(request):
mock_workflow = _setup_mock_workflow_in_service(request)
- with pytest.raises(modeling_exceptions.UndeclaredInputsException):
+ with pytest.raises(modeling_exceptions.UndeclaredParametersException):
_create_workflow_runner(request, mock_workflow, inputs={'undeclared_input': 'value'})
@@ -198,7 +198,7 @@ def test_execution_inputs_missing_required_inputs(request):
mock_workflow = _setup_mock_workflow_in_service(
request, inputs={'required_input': models.Parameter.wrap('required_input', value=None)})
- with pytest.raises(modeling_exceptions.MissingRequiredInputsException):
+ with pytest.raises(modeling_exceptions.MissingRequiredParametersException):
_create_workflow_runner(request, mock_workflow, inputs={})
@@ -206,13 +206,13 @@ def test_execution_inputs_wrong_type_inputs(request):
mock_workflow = _setup_mock_workflow_in_service(
request, inputs={'input': models.Parameter.wrap('input', 'value')})
- with pytest.raises(modeling_exceptions.InputsOfWrongTypeException):
+ with pytest.raises(modeling_exceptions.ParametersOfWrongTypeException):
_create_workflow_runner(request, mock_workflow, inputs={'input': 5})
def test_execution_inputs_builtin_workflow_with_inputs(request):
# built-in workflows don't have inputs
- with pytest.raises(modeling_exceptions.UndeclaredInputsException):
+ with pytest.raises(modeling_exceptions.UndeclaredParametersException):
_create_workflow_runner(request, 'install', inputs={'undeclared_input': 'value'})
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 642c785..d57e424 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -44,15 +44,15 @@ class TestOperationTask(object):
plugin = mock.models.create_plugin('test_plugin', '0.1')
ctx.model.node.update(plugin)
- inputs = {'test_input': True}
+ arguments = {'test_input': True}
interface = mock.models.create_interface(
ctx.service,
interface_name,
operation_name,
operation_kwargs=dict(plugin=plugin,
- implementation='op_path',
- inputs=inputs),)
+ function='op_path',
+ arguments=arguments),)
node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
node.interfaces[interface_name] = interface
@@ -66,7 +66,7 @@ class TestOperationTask(object):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs,
+ inputs=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval,
ignore_failure=ignore_failure)
@@ -92,15 +92,15 @@ class TestOperationTask(object):
plugin = mock.models.create_plugin('test_plugin', '0.1')
ctx.model.plugin.update(plugin)
- inputs = {'test_input': True}
+ arguments = {'test_input': True}
interface = mock.models.create_interface(
ctx.service,
interface_name,
operation_name,
operation_kwargs=dict(plugin=plugin,
- implementation='op_path',
- inputs=inputs)
+ function='op_path',
+ arguments=arguments)
)
relationship = ctx.model.relationship.list()[0]
@@ -113,7 +113,7 @@ class TestOperationTask(object):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs,
+ inputs=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval)
@@ -137,15 +137,15 @@ class TestOperationTask(object):
plugin = mock.models.create_plugin('test_plugin', '0.1')
ctx.model.node.update(plugin)
- inputs = {'test_input': True}
+ arguments = {'test_input': True}
interface = mock.models.create_interface(
ctx.service,
interface_name,
operation_name,
operation_kwargs=dict(plugin=plugin,
- implementation='op_path',
- inputs=inputs)
+ function='op_path',
+ arguments=arguments)
)
relationship = ctx.model.relationship.list()[0]
@@ -158,7 +158,7 @@ class TestOperationTask(object):
relationship,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs,
+ inputs=arguments,
max_attempts=max_attempts,
retry_interval=retry_interval)
@@ -189,7 +189,7 @@ class TestOperationTask(object):
interface_name,
operation_name,
operation_kwargs=dict(plugin=plugin,
- implementation='op_path'))
+ function='op_path'))
dependency_node.interfaces[interface_name] = interface
with context.workflow.current.push(ctx):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index 4cddbe6..88818ca 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -35,7 +35,7 @@ def test_execute_operation(ctx):
ctx.service,
interface_name,
operation_name,
- operation_kwargs={'implementation': 'test'}
+ operation_kwargs=dict(function='test')
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 8c0705b..43ec9f1 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -63,11 +63,11 @@ class BaseTest(object):
ignore_failure=None):
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
interface_name = 'aria.interfaces.lifecycle'
- operation_kwargs = dict(implementation='{name}.{func.__name__}'.format(
+ operation_kwargs = dict(function='{name}.{func.__name__}'.format(
name=__name__, func=func))
if inputs:
- # the operation has to declare the inputs before those may be passed
- operation_kwargs['inputs'] = inputs
+ # the operation has to declare the arguments before those may be passed
+ operation_kwargs['arguments'] = inputs
operation_name = 'create'
interface = mock.models.create_interface(node.service, interface_name, operation_name,
operation_kwargs=operation_kwargs)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/core/test_events.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_events.py b/tests/orchestrator/workflows/core/test_events.py
index 184071d..6d542e9 100644
--- a/tests/orchestrator/workflows/core/test_events.py
+++ b/tests/orchestrator/workflows/core/test_events.py
@@ -110,8 +110,7 @@ def run_operation_on_node(ctx, op_name, interface_name):
service=node.service,
interface_name=interface_name,
operation_name=op_name,
- operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
- func=func)))
+ operation_kwargs=dict(function='{name}.{func.__name__}'.format(name=__name__, func=func)))
node.interfaces[interface.name] = interface
eng = engine.Engine(executor=ThreadExecutor(),
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index e488933..1ba6422 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -43,7 +43,7 @@ def ctx(tmpdir):
relationship.source_node.service,
RELATIONSHIP_INTERFACE_NAME,
RELATIONSHIP_OPERATION_NAME,
- operation_kwargs={'implementation': 'test'}
+ operation_kwargs=dict(function='test')
)
relationship.interfaces[interface.name] = interface
context.model.relationship.update(relationship)
@@ -53,7 +53,7 @@ def ctx(tmpdir):
node.service,
NODE_INTERFACE_NAME,
NODE_OPERATION_NAME,
- operation_kwargs={'implementation': 'test'}
+ operation_kwargs=dict(function='test')
)
node.interfaces[interface.name] = interface
context.model.node.update(node)
@@ -92,7 +92,7 @@ class TestOperationTask(object):
node.service,
NODE_INTERFACE_NAME,
NODE_OPERATION_NAME,
- operation_kwargs=dict(plugin=storage_plugin, implementation='test')
+ operation_kwargs=dict(plugin=storage_plugin, function='test')
)
node.interfaces[interface.name] = interface
ctx.model.node.update(node)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
index 2a96d01..1633d4a 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_execution_graph.py
@@ -32,7 +32,7 @@ def test_task_graph_into_execution_graph(tmpdir):
node.service,
interface_name,
operation_name,
- operation_kwargs={'implementation': 'test'}
+ operation_kwargs=dict(function='test')
)
node.interfaces[interface.name] = interface
task_context.model.node.update(node)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
index 92f0fc4..8c3f72a 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_concurrent_modifications.py
@@ -67,7 +67,7 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
key = 'key'
first_value = 'value1'
second_value = 'value2'
- inputs = {
+ arguments = {
'lock_files': lock_files,
'key': key,
'first_value': first_value,
@@ -80,8 +80,8 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
node.service,
interface_name,
operation_name,
- operation_kwargs=dict(implementation='{0}.{1}'.format(__name__, func.__name__),
- inputs=inputs)
+ operation_kwargs=dict(function='{0}.{1}'.format(__name__, func.__name__),
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
context.model.node.update(node)
@@ -93,12 +93,12 @@ def _test(context, executor, lock_files, func, dataholder, expected_failure):
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs),
+ inputs=arguments),
api.task.OperationTask(
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs)
+ inputs=arguments)
)
signal = events.on_failure_task_signal
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 30b23ed..acca0bf 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -27,7 +27,7 @@ from tests import storage
def test_decorate_extension(context, executor):
- inputs = {'input1': 1, 'input2': 2}
+ arguments = {'input1': 1, 'input2': 2}
def get_node(ctx):
return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
@@ -41,24 +41,23 @@ def test_decorate_extension(context, executor):
ctx.service,
interface_name,
operation_name,
- operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
- _mock_operation.__name__),
- inputs=inputs)
+ operation_kwargs=dict(function='{0}.{1}'.format(__name__, _mock_operation.__name__),
+ arguments=arguments)
)
node.interfaces[interface.name] = interface
task = api.task.OperationTask(
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=inputs)
+ inputs=arguments)
graph.add_tasks(task)
return graph
graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter
eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
eng.execute()
out = get_node(context).attributes.get('out').value
- assert out['wrapper_inputs'] == inputs
- assert out['function_inputs'] == inputs
+ assert out['wrapper_inputs'] == arguments
+ assert out['function_inputs'] == arguments
@extension.process_executor
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index 2b628a0..c766fe4 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -88,20 +88,20 @@ def _run_workflow(context, executor, op_func, inputs=None):
node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
interface_name = 'test_interface'
operation_name = 'operation'
- wf_inputs = inputs or {}
+ wf_arguments = inputs or {}
interface = mock.models.create_interface(
ctx.service,
interface_name,
operation_name,
- operation_kwargs=dict(implementation=_operation_mapping(op_func),
- inputs=wf_inputs)
+ operation_kwargs=dict(function=_operation_mapping(op_func),
+ arguments=wf_arguments)
)
node.interfaces[interface.name] = interface
task = api.task.OperationTask(
node,
interface_name=interface_name,
operation_name=operation_name,
- inputs=wf_inputs)
+ inputs=wf_arguments)
graph.add_tasks(task)
return graph
graph = mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/8fe7f4b1/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index 8e80640..ee9e094 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -104,6 +104,16 @@ topology_template:
Maintenance:
enable: juju > charm.maintenance_on
disable: juju > charm.maintenance_off
+ Standard:
+ create:
+ implementation:
+ primary: create_node_cellar.sh
+ dependencies:
+ - "process.args.1 > { get_attribute: [ SELF, tosca_id ] }"
+ - "process.args.2 > { get_property: [ HOST, flavor_name ] }"
+ - ssh.user > admin
+ - ssh.password > '1234'
+ - ssh.use_sudo > true
requirements:
- database: node_cellar_database
capabilities:
@@ -161,16 +171,7 @@ topology_template:
relationship:
interfaces:
Configure:
- target_changed:
- implementation:
- primary: changed.sh
- dependencies:
- #- { concat: [ process.args.1 >, mongodb ] }
- - process.args.1 > mongodb
- - process.args.2 > host
- - ssh.user > admin
- - ssh.password > 1234
- - ssh.use_sudo > true
+ target_changed: changed.sh
nginx:
type: nginx.Nginx
@@ -251,6 +252,7 @@ topology_template:
Standard:
inputs:
openstack_credential: { get_input: openstack_credential }
+ create: create_data_volume.sh
groups:
@@ -309,7 +311,7 @@ policy_types:
client connections cleanly and shut down services.
derived_from: aria.Workflow
properties:
- implementation:
+ function:
type: string
default: workflows.maintenance
enabled:
[4/5] incubator-ariatosca git commit: Fixes
Posted by em...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fdec01ab/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
index ee9e094..4d53f9b 100644
--- a/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
+++ b/tests/resources/service-templates/tosca-simple-1.0/node-cellar/node-cellar.yaml
@@ -311,7 +311,7 @@ policy_types:
client connections cleanly and shut down services.
derived_from: aria.Workflow
properties:
- function:
+ implementation:
type: string
default: workflows.maintenance
enabled: