You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by mx...@apache.org on 2016/12/04 16:56:12 UTC
incubator-ariatosca git commit: code review 2 - wip
Repository: incubator-ariatosca
Updated Branches:
refs/heads/ARIA-30-SQL-based-storage-implementation 5ea0b6313 -> 9a608318b
code review 2 - wip
Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/9a608318
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/9a608318
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/9a608318
Branch: refs/heads/ARIA-30-SQL-based-storage-implementation
Commit: 9a608318bcd249d030e465ca8687b8d059995cc0
Parents: 5ea0b63
Author: mxmrlv <mx...@gmail.com>
Authored: Sun Dec 4 18:56:02 2016 +0200
Committer: mxmrlv <mx...@gmail.com>
Committed: Sun Dec 4 18:56:02 2016 +0200
----------------------------------------------------------------------
aria/__init__.py | 4 +-
aria/orchestrator/context/toolbelt.py | 2 +-
aria/orchestrator/context/workflow.py | 8 +-
aria/orchestrator/workflows/core/task.py | 14 +-
aria/storage/core.py | 8 +-
aria/storage/models.py | 185 +++++++++----------
aria/storage/sql_mapi.py | 52 +++---
aria/storage/structures.py | 34 +---
tests/mock/context.py | 6 +-
tests/mock/models.py | 28 +--
tests/orchestrator/context/test_workflow.py | 4 +-
.../orchestrator/workflows/builtin/__init__.py | 7 +
.../workflows/builtin/test_execute_operation.py | 4 +-
.../orchestrator/workflows/builtin/test_heal.py | 10 +-
.../workflows/builtin/test_install.py | 9 +-
.../workflows/builtin/test_uninstall.py | 10 +-
.../orchestrator/workflows/core/test_engine.py | 2 +-
tests/storage/__init__.py | 4 +-
tests/storage/test_model_storage.py | 27 +--
tests/storage/test_models.py | 14 +-
tests/storage/test_resource_storage.py | 6 +-
21 files changed, 201 insertions(+), 237 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 5317afa..f5151e3 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -56,7 +56,7 @@ def install_aria_extensions():
del sys.modules[module_name]
-def application_model_storage(api, api_params=None):
+def application_model_storage(api, api_kwargs=None):
"""
Initiate model storage for the supplied storage driver
"""
@@ -76,7 +76,7 @@ def application_model_storage(api, api_params=None):
storage.models.Task,
]
# if api not in _model_storage:
- return storage.ModelStorage(api, items=models, api_params=api_params or {})
+ return storage.ModelStorage(api, items=models, api_kwargs=api_kwargs or {})
def application_resource_storage(driver):
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/orchestrator/context/toolbelt.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/toolbelt.py b/aria/orchestrator/context/toolbelt.py
index ceaeb72..ae0e1ff 100644
--- a/aria/orchestrator/context/toolbelt.py
+++ b/aria/orchestrator/context/toolbelt.py
@@ -33,7 +33,7 @@ class NodeToolBelt(object):
:return:
"""
assert isinstance(self._op_context, operation.NodeOperationContext)
- filters = {'target_node_instance_fk': self._op_context.node_instance.storage_id}
+ filters = {'target_node_instance_storage_id': self._op_context.node_instance.storage_id}
for relationship_instance in \
self._op_context.model.relationship_instance.iter(filters=filters):
yield relationship_instance.source_node_instance
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 0540590..5ec4044 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -53,8 +53,8 @@ class WorkflowContext(BaseContext):
now = datetime.utcnow()
execution = self.model.execution.model_cls(
id=self._execution_id,
- blueprint_fk=self.blueprint.storage_id,
- deployment_fk=self.deployment.storage_id,
+ blueprint_storage_id=self.blueprint.storage_id,
+ deployment_storage_id=self.deployment.storage_id,
workflow_id=self._workflow_id,
created_at=now,
status=execution_cls.PENDING,
@@ -69,7 +69,7 @@ class WorkflowContext(BaseContext):
"""
return self.model.node.iter(
filters={
- 'deployment_fk': self.deployment.storage_id
+ 'deployment_storage_id': self.deployment.storage_id
}
)
@@ -80,7 +80,7 @@ class WorkflowContext(BaseContext):
"""
return self.model.node_instance.iter(
filters={
- 'deployment_fk': self.deployment.storage_id
+ 'deployment_storage_id': self.deployment.storage_id
}
)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 3b18965..505b0b2 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -106,23 +106,25 @@ class OperationTask(BaseTask):
def __init__(self, api_task, *args, **kwargs):
super(OperationTask, self).__init__(id=api_task.id, **kwargs)
self._workflow_context = api_task._workflow_context
- task_model = api_task._workflow_context.model.task.model_cls
+ base_task_model = api_task._workflow_context.model.task.model_cls
if isinstance(api_task.actor, models.NodeInstance):
context_class = operation_context.NodeOperationContext
+ task_model_cls = base_task_model.node_instance
elif isinstance(api_task.actor, models.RelationshipInstance):
context_class = operation_context.RelationshipOperationContext
+ task_model_cls = base_task_model.relationship_instance
else:
- raise RuntimeError('No operation context could be created for {0}'
- .format(api_task.actor.model_cls))
+ raise RuntimeError('No operation context could be created for {actor.model_cls}'
+ .format(actor=api_task.actor))
- operation_task = task_model(
+ operation_task = task_model_cls(
id=api_task.id,
name=api_task.name,
operation_mapping=api_task.operation_mapping,
- actor=api_task.actor,
+ instance_id=api_task.actor.storage_id,
inputs=api_task.inputs,
- status=task_model.PENDING,
+ status=base_task_model.PENDING,
execution_id=self._workflow_context._execution_id,
max_attempts=api_task.max_attempts,
retry_interval=api_task.retry_interval,
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index e452698..a5d3210 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -52,8 +52,8 @@ class Storage(LoggerMixin):
"""
Represents the storage
"""
- def __init__(self, api_cls, api_params=None, items=(), **kwargs):
- self._api_params = api_params or {}
+ def __init__(self, api_cls, api_kwargs=None, items=(), **kwargs):
+ self._api_kwargs = api_kwargs or {}
super(Storage, self).__init__(**kwargs)
self.api = api_cls
self.registered = {}
@@ -90,7 +90,7 @@ class ResourceStorage(Storage):
:param name:
:return:
"""
- self.registered[name] = self.api(name=name, **self._api_params)
+ self.registered[name] = self.api(name=name, **self._api_kwargs)
self.registered[name].create()
self.logger.debug('setup {name} in storage {self!r}'.format(name=name, self=self))
@@ -112,7 +112,7 @@ class ModelStorage(Storage):
return
self.registered[model_name] = self.api(name=model_name,
model_cls=model_cls,
- **self._api_params)
+ **self._api_kwargs)
self.registered[model_name].create()
self.logger.debug('setup {name} in storage {self!r}'.format(name=model_name, self=self))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/storage/models.py
----------------------------------------------------------------------
diff --git a/aria/storage/models.py b/aria/storage/models.py
index 29dfdc9..0bc5d95 100644
--- a/aria/storage/models.py
+++ b/aria/storage/models.py
@@ -38,7 +38,6 @@ classes:
"""
from collections import namedtuple
from datetime import datetime
-from uuid import uuid4
from sqlalchemy.ext.declarative.base import declared_attr
from sqlalchemy.orm import validates
@@ -54,7 +53,6 @@ from .structures import (
String,
PickleType,
Float,
- MutableDict,
Dict,
foreign_key,
one_to_many_relationship,
@@ -77,10 +75,6 @@ __all__ = (
)
-def uuid_generator():
- return str(uuid4())
-
-
#pylint: disable=no-self-argument
@@ -92,7 +86,7 @@ class Blueprint(SQLModelBase):
created_at = Column(DateTime, nullable=False, index=True)
main_file_name = Column(Text, nullable=False)
- plan = Column(MutableDict.as_mutable(Dict), nullable=False)
+ plan = Column(Dict, nullable=False)
updated_at = Column(DateTime)
description = Column(Text)
@@ -103,25 +97,25 @@ class Deployment(SQLModelBase):
"""
__tablename__ = 'deployments'
- _private_fields = ['blueprint_fk']
+ _private_fields = ['blueprint_storage_id']
- blueprint_fk = foreign_key(Blueprint.storage_id)
+ blueprint_storage_id = foreign_key(Blueprint.storage_id)
created_at = Column(DateTime, nullable=False, index=True)
description = Column(Text)
- inputs = Column(MutableDict.as_mutable(Dict))
- groups = Column(MutableDict.as_mutable(Dict))
+ inputs = Column(Dict)
+ groups = Column(Dict)
permalink = Column(Text)
- policy_triggers = Column(MutableDict.as_mutable(Dict))
- policy_types = Column(MutableDict.as_mutable(Dict))
- outputs = Column(MutableDict.as_mutable(Dict))
- scaling_groups = Column(MutableDict.as_mutable(Dict))
+ policy_triggers = Column(Dict)
+ policy_types = Column(Dict)
+ outputs = Column(Dict)
+ scaling_groups = Column(Dict)
updated_at = Column(DateTime)
- workflows = Column(MutableDict.as_mutable(Dict))
+ workflows = Column(Dict)
@declared_attr
def blueprint(cls):
- return one_to_many_relationship(cls, Blueprint, cls.blueprint_fk)
+ return one_to_many_relationship(cls, Blueprint, cls.blueprint_storage_id)
class Execution(SQLModelBase):
@@ -167,26 +161,26 @@ class Execution(SQLModelBase):
deployment_id = association_proxy('deployment', 'id')
blueprint_id = association_proxy('blueprint', 'id')
- deployment_fk = foreign_key(Deployment.storage_id)
- blueprint_fk = foreign_key(Blueprint.storage_id)
- _private_fields = ['deployment_fk', 'blueprint_fk']
+ deployment_storage_id = foreign_key(Deployment.storage_id)
+ blueprint_storage_id = foreign_key(Blueprint.storage_id)
+ _private_fields = ['deployment_storage_id', 'blueprint_storage_id']
created_at = Column(DateTime, index=True)
started_at = Column(DateTime, nullable=True, index=True)
ended_at = Column(DateTime, nullable=True, index=True)
error = Column(Text, nullable=True)
is_system_workflow = Column(Boolean, nullable=False, default=False)
- parameters = Column(MutableDict.as_mutable(Dict))
+ parameters = Column(Dict)
status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
workflow_id = Column(Text, nullable=False)
@declared_attr
def deployment(cls):
- return one_to_many_relationship(cls, Deployment, cls.deployment_fk)
+ return one_to_many_relationship(cls, Deployment, cls.deployment_storage_id)
@declared_attr
def blueprint(cls):
- return one_to_many_relationship(cls, Blueprint, cls.blueprint_fk)
+ return one_to_many_relationship(cls, Blueprint, cls.blueprint_storage_id)
def __str__(self):
id_name, id_value = self.unique_id()
@@ -207,25 +201,25 @@ class DeploymentUpdate(SQLModelBase):
deployment_id = association_proxy('deployment', 'id')
execution_id = association_proxy('execution', 'id')
- deployment_fk = foreign_key(Deployment.storage_id)
- execution_fk = foreign_key(Execution.storage_id, nullable=True)
- _private_fields = ['execution_fk', 'deployment_fk']
+ deployment_storage_id = foreign_key(Deployment.storage_id)
+ execution_storage_id = foreign_key(Execution.storage_id, nullable=True)
+ _private_fields = ['execution_storage_id', 'deployment_storage_id']
created_at = Column(DateTime, nullable=False, index=True)
- deployment_plan = Column(MutableDict.as_mutable(Dict))
- deployment_update_node_instances = Column(MutableDict.as_mutable(Dict))
- deployment_update_deployment = Column(MutableDict.as_mutable(Dict))
- deployment_update_nodes = Column(MutableDict.as_mutable(Dict))
- modified_entity_ids = Column(MutableDict.as_mutable(Dict))
+ deployment_plan = Column(Dict)
+ deployment_update_node_instances = Column(Dict)
+ deployment_update_deployment = Column(Dict)
+ deployment_update_nodes = Column(Dict)
+ modified_entity_ids = Column(Dict)
state = Column(Text)
@declared_attr
def execution(cls):
- return one_to_many_relationship(cls, Execution, cls.execution_fk)
+ return one_to_many_relationship(cls, Execution, cls.execution_storage_id)
@declared_attr
def deployment(cls):
- return one_to_many_relationship(cls, Deployment, cls.deployment_fk)
+ return one_to_many_relationship(cls, Deployment, cls.deployment_storage_id)
def to_dict(self, suppress_error=False, **kwargs):
dep_update_dict = super(DeploymentUpdate, self).to_dict(suppress_error)
@@ -260,8 +254,8 @@ class DeploymentUpdateStep(SQLModelBase):
)
deployment_update_id = association_proxy('deployment_update', 'id')
- deployment_update_fk = foreign_key(DeploymentUpdate.storage_id)
- _private_fields = ['deployment_update_fk']
+ deployment_update_storage_id = foreign_key(DeploymentUpdate.storage_id)
+ _private_fields = ['deployment_update_storage_id']
action = Column(Enum(*ACTION_TYPES, name='action_type'))
entity_id = Column(Text, nullable=False)
@@ -271,7 +265,7 @@ class DeploymentUpdateStep(SQLModelBase):
def deployment_update(cls):
return one_to_many_relationship(cls,
DeploymentUpdate,
- cls.deployment_update_fk,
+ cls.deployment_update_storage_id,
backreference='steps')
def __hash__(self):
@@ -315,22 +309,22 @@ class DeploymentModification(SQLModelBase):
STATES = [STARTED, FINISHED, ROLLEDBACK]
END_STATES = [FINISHED, ROLLEDBACK]
- deployment_fk = foreign_key(Deployment.storage_id)
- _private_fields = ['deployment_fk']
+ deployment_storage_id = foreign_key(Deployment.storage_id)
+ _private_fields = ['deployment_storage_id']
deployment_id = association_proxy('deployment', 'id')
- context = Column(MutableDict.as_mutable(Dict))
+ context = Column(Dict)
created_at = Column(DateTime, nullable=False, index=True)
ended_at = Column(DateTime, index=True)
- modified_nodes = Column(MutableDict.as_mutable(Dict))
- node_instances = Column(MutableDict.as_mutable(Dict))
+ modified_nodes = Column(Dict)
+ node_instances = Column(Dict)
status = Column(Enum(*STATES, name='deployment_modification_status'))
@declared_attr
def deployment(cls):
return one_to_many_relationship(cls,
Deployment,
- cls.deployment_fk,
+ cls.deployment_storage_id,
backreference='modifications')
@@ -343,15 +337,15 @@ class Node(SQLModelBase):
# See base class for an explanation on these properties
is_id_unique = False
- _private_fields = ['deployment_fk']
- deployment_fk = foreign_key(Deployment.storage_id)
+ _private_fields = ['deployment_storage_id']
+ deployment_storage_id = foreign_key(Deployment.storage_id)
deployment_id = association_proxy('deployment', 'id')
blueprint_id = association_proxy('blueprint', 'id')
@declared_attr
def deployment(cls):
- return one_to_many_relationship(cls, Deployment, cls.deployment_fk)
+ return one_to_many_relationship(cls, Deployment, cls.deployment_storage_id)
deploy_number_of_instances = Column(Integer, nullable=False)
# TODO: This probably should be a foreign key, but there's no guarantee
@@ -361,10 +355,10 @@ class Node(SQLModelBase):
min_number_of_instances = Column(Integer, nullable=False)
number_of_instances = Column(Integer, nullable=False)
planned_number_of_instances = Column(Integer, nullable=False)
- plugins = Column(MutableDict.as_mutable(Dict))
- plugins_to_install = Column(MutableDict.as_mutable(Dict))
- properties = Column(MutableDict.as_mutable(Dict))
- operations = Column(MutableDict.as_mutable(Dict))
+ plugins = Column(Dict)
+ plugins_to_install = Column(Dict)
+ properties = Column(Dict)
+ operations = Column(Dict)
type = Column(Text, nullable=False, index=True)
type_hierarchy = Column(PickleType)
@@ -378,26 +372,26 @@ class Relationship(SQLModelBase):
blueprint_id = association_proxy('blueprint', 'id')
deployment_id = association_proxy('deployment', 'id')
- _private_fields = ['source_node_fk', 'target_node_fk']
+ _private_fields = ['source_node_storage_id', 'target_node_storage_id']
- source_node_fk = foreign_key(Node.storage_id)
- target_node_fk = foreign_key(Node.storage_id)
+ source_node_storage_id = foreign_key(Node.storage_id)
+ target_node_storage_id = foreign_key(Node.storage_id)
@declared_attr
def source_node(cls):
- return one_to_many_relationship(cls, Node, cls.source_node_fk, 'outbound_relationships')
+ return one_to_many_relationship(cls, Node, cls.source_node_storage_id, 'outbound_relationships')
@declared_attr
def target_node(cls):
- return one_to_many_relationship(cls, Node, cls.target_node_fk, 'inbound_relationships')
+ return one_to_many_relationship(cls, Node, cls.target_node_storage_id, 'inbound_relationships')
- source_interfaces = Column(MutableDict.as_mutable(Dict))
- source_operations = Column(MutableDict.as_mutable(Dict))
- target_interfaces = Column(MutableDict.as_mutable(Dict))
- target_operations = Column(MutableDict.as_mutable(Dict))
+ source_interfaces = Column(Dict)
+ source_operations = Column(Dict)
+ target_interfaces = Column(Dict)
+ target_operations = Column(Dict)
type = Column(String)
type_hierarchy = Column(PickleType)
- properties = Column(MutableDict.as_mutable(Dict))
+ properties = Column(Dict)
class NodeInstance(SQLModelBase):
@@ -406,9 +400,9 @@ class NodeInstance(SQLModelBase):
"""
__tablename__ = 'node_instances'
- node_fk = foreign_key(Node.storage_id)
- deployment_fk = foreign_key(Deployment.storage_id)
- _private_fields = ['node_fk', 'deployment_fk']
+ node_storage_id = foreign_key(Node.storage_id)
+ deployment_storage_id = foreign_key(Deployment.storage_id)
+ _private_fields = ['node_storage_id', 'deployment_storage_id']
node_id = association_proxy('node', 'id')
deployment_id = association_proxy('node', 'deployment_id')
@@ -419,14 +413,14 @@ class NodeInstance(SQLModelBase):
# TODO: This probably should be a foreign key, but there's no guarantee
# in the code, currently, that the host will be created beforehand
host_id = Column(Text)
- runtime_properties = Column(MutableDict.as_mutable(Dict))
- scaling_groups = Column(MutableDict.as_mutable(Dict))
+ runtime_properties = Column(Dict)
+ scaling_groups = Column(Dict)
state = Column(Text, nullable=False)
version = Column(Integer, default=1)
@declared_attr
def node(cls):
- return one_to_many_relationship(cls, Node, cls.node_fk)
+ return one_to_many_relationship(cls, Node, cls.node_storage_id)
class RelationshipInstance(SQLModelBase):
@@ -438,31 +432,31 @@ class RelationshipInstance(SQLModelBase):
blueprint_id = association_proxy('blueprint', 'id')
deployment_id = association_proxy('deployment', 'id')
- relationship_fk = foreign_key(Relationship.storage_id)
- source_node_instance_fk = foreign_key(NodeInstance.storage_id)
- target_node_instance_fk = foreign_key(NodeInstance.storage_id)
+ relationship_storage_id = foreign_key(Relationship.storage_id)
+ source_node_instance_storage_id = foreign_key(NodeInstance.storage_id)
+ target_node_instance_storage_id = foreign_key(NodeInstance.storage_id)
- _private_fields = ['relationship_storage_fk',
- 'source_node_instance_fk',
- 'target_node_instance_fk']
+ _private_fields = ['relationship_storage_storage_id',
+ 'source_node_instance_storage_id',
+ 'target_node_instance_storage_id']
@declared_attr
def source_node_instance(cls):
return one_to_many_relationship(cls,
NodeInstance,
- cls.source_node_instance_fk,
+ cls.source_node_instance_storage_id,
'outbound_relationship_instances')
@declared_attr
def target_node_instance(cls):
return one_to_many_relationship(cls,
NodeInstance,
- cls.target_node_instance_fk,
+ cls.target_node_instance_storage_id,
'inbound_relationship_instances')
@declared_attr
def relationship(cls):
- return one_to_many_relationship(cls, Relationship, cls.relationship_fk)
+ return one_to_many_relationship(cls, Relationship, cls.relationship_storage_id)
class ProviderContext(SQLModelBase):
@@ -472,7 +466,7 @@ class ProviderContext(SQLModelBase):
__tablename__ = 'provider_context'
name = Column(Text, nullable=False)
- context = Column(MutableDict.as_mutable(Dict), nullable=False)
+ context = Column(Dict, nullable=False)
class Plugin(SQLModelBase):
@@ -488,14 +482,14 @@ class Plugin(SQLModelBase):
distribution = Column(Text)
distribution_release = Column(Text)
distribution_version = Column(Text)
- excluded_wheels = Column(MutableDict.as_mutable(Dict))
+ excluded_wheels = Column(Dict)
package_name = Column(Text, nullable=False, index=True)
package_source = Column(Text)
package_version = Column(Text)
- supported_platform = Column(MutableDict.as_mutable(Dict))
- supported_py_versions = Column(MutableDict.as_mutable(Dict))
+ supported_platform = Column(Dict)
+ supported_py_versions = Column(Dict)
uploaded_at = Column(DateTime, nullable=False, index=True)
- wheels = Column(MutableDict.as_mutable(Dict), nullable=False)
+ wheels = Column(Dict, nullable=False)
class Task(SQLModelBase):
@@ -504,18 +498,18 @@ class Task(SQLModelBase):
"""
__tablename__ = 'task'
- node_instance_fk = foreign_key(NodeInstance.storage_id, nullable=True)
- relationship_instance_fk = foreign_key(RelationshipInstance.storage_id, nullable=True)
+ node_instance_storage_id = foreign_key(NodeInstance.storage_id, nullable=True)
+ relationship_instance_storage_id = foreign_key(RelationshipInstance.storage_id, nullable=True)
- _private_fields = ['node_instance_fk', 'relationship_instance_fk']
+ _private_fields = ['node_instance_storage_id', 'relationship_instance_storage_id']
@declared_attr
def node_instance(cls):
- return one_to_many_relationship(cls, NodeInstance, cls.node_instance_fk)
+ return one_to_many_relationship(cls, NodeInstance, cls.node_instance_storage_id)
@declared_attr
def relationship_instance(cls):
- return one_to_many_relationship(cls, RelationshipInstance, cls.relationship_instance_fk)
+ return one_to_many_relationship(cls, RelationshipInstance, cls.relationship_instance_storage_id)
PENDING = 'pending'
RETRYING = 'retrying'
@@ -559,15 +553,7 @@ class Task(SQLModelBase):
# Operation specific fields
name = Column(String)
operation_mapping = Column(String)
- inputs = Column(MutableDict.as_mutable(Dict))
-
- @property
- def actor_storage_id(self):
- """
- Return the actor storage id of the task
- :return:
- """
- return self.node_instance_fk or self.relationship_instance_fk
+ inputs = Column(Dict)
@property
def actor(self):
@@ -577,9 +563,10 @@ class Task(SQLModelBase):
"""
return self.node_instance or self.relationship_instance
- def __init__(self, actor=None, **kwargs):
- if isinstance(actor, RelationshipInstance):
- kwargs['relationship_instance_fk'] = actor.storage_id
- elif isinstance(actor, NodeInstance):
- kwargs['node_instance_fk'] = actor.storage_id
- super(Task, self).__init__(**kwargs)
+ @classmethod
+ def node_instance(cls, instance_id, **kwargs):
+ return cls(node_instance_storage_id=instance_id, **kwargs)
+
+ @classmethod
+ def relationship_instance(cls, instance_id, **kwargs):
+ return cls(relationship_instance_storage_id=instance_id, **kwargs)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
index 25cc82f..7298aa9 100644
--- a/aria/storage/sql_mapi.py
+++ b/aria/storage/sql_mapi.py
@@ -38,11 +38,10 @@ class SQLAlchemyModelAPI(api.ModelAPI):
self._engine = engine
self._session = session
- def get(self, entry_id, include=None, filters=None, locking=False, **kwargs):
+ def get(self, entry_id, include=None, locking=False, **kwargs):
"""Return a single result based on the model class and element ID
"""
- filters = filters or {'id': entry_id}
- query = self._get_query(include, filters)
+ query = self._get_query(include, {'id': entry_id})
if locking:
query = query.with_for_update()
result = query.first()
@@ -54,6 +53,23 @@ class SQLAlchemyModelAPI(api.ModelAPI):
)
return result
+ def list(self,
+ include=None,
+ filters=None,
+ pagination=None,
+ sort=None,
+ **kwargs):
+ query = self._get_query(include, filters, sort)
+
+ results, total, size, offset = self._paginate(query, pagination)
+
+ return ListResult(
+ items=results,
+ metadata=dict(total=total,
+ size=size,
+ offset=offset)
+ )
+
def iter(self,
include=None,
filters=None,
@@ -63,10 +79,7 @@ class SQLAlchemyModelAPI(api.ModelAPI):
"""Return a (possibly empty) list of `model_class` results
"""
query = self._get_query(include, filters, sort)
-
- results, _, _, _ = self._paginate(query, pagination)
-
- for result in results:
+ for result in query:
yield result
def put(self, entry, **kwargs):
@@ -81,26 +94,13 @@ class SQLAlchemyModelAPI(api.ModelAPI):
self._safe_commit()
return entry
- def delete(self, entry_id, filters=None, **kwargs):
+ def delete(self, entry, **kwargs):
"""Delete a single result based on the model class and element ID
"""
- try:
- instance = self.get(
- entry_id,
- filters=filters
- )
- except exceptions.StorageError:
- raise exceptions.StorageError(
- 'Could not delete {0} with ID `{1}` - element not found'
- .format(
- self.model_cls.__name__,
- entry_id
- )
- )
- self._load_relationships(instance)
- self._session.delete(instance)
+ self._load_relationships(entry)
+ self._session.delete(entry)
self._safe_commit()
- return instance
+ return entry
def update(self, entry, **kwargs):
"""Add `instance` to the DB session, and attempt to commit
@@ -125,8 +125,8 @@ class SQLAlchemyModelAPI(api.ModelAPI):
def _establish_connection(self):
pass
- def create(self):
- self.model_cls.__table__.create(self._engine)
+ def create(self, checkfirst=True):
+ self.model_cls.__table__.create(self._engine, checkfirst=checkfirst)
def drop(self):
"""
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/aria/storage/structures.py
----------------------------------------------------------------------
diff --git a/aria/storage/structures.py b/aria/storage/structures.py
index d9a5322..58a4b98 100644
--- a/aria/storage/structures.py
+++ b/aria/storage/structures.py
@@ -87,7 +87,7 @@ def one_to_many_relationship(child_class,
)
-class Dict(TypeDecorator):
+class _DictType(TypeDecorator):
"""
Dict representation of type.
"""
@@ -112,7 +112,7 @@ class Dict(TypeDecorator):
return value
-class MutableDict(Mutable, dict):
+class _MutableDict(Mutable, dict):
"""
Enables tracking for dict values.
"""
@@ -120,9 +120,9 @@ class MutableDict(Mutable, dict):
def coerce(cls, key, value):
"Convert plain dictionaries to MutableDict."
- if not isinstance(value, MutableDict):
+ if not isinstance(value, _MutableDict):
if isinstance(value, dict):
- return MutableDict(value)
+ return _MutableDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
@@ -141,6 +141,7 @@ class MutableDict(Mutable, dict):
dict.__delitem__(self, key)
self.changed()
+Dict = _MutableDict.as_mutable(_DictType)
class SQLModelBase(Model):
"""
@@ -149,18 +150,11 @@ class SQLModelBase(Model):
# SQLAlchemy syntax
__abstract__ = True
- # Does the class represent a resource (Blueprint, Deployment, etc.) or a
- # management table (User, Tenant, etc.), as they are handled differently
- is_resource = False
-
- # This would be overridden once the models are created.
+ # This would be overridden once the models are created. Created for pylint.
__table__ = None
_private_fields = []
- # Indicates whether the `id` column in this class should be unique
- is_id_unique = True
-
storage_id = Column(Integer, primary_key=True, autoincrement=True)
id = Column(Text, index=True)
@@ -197,19 +191,5 @@ class SQLModelBase(Model):
"""
return set(cls.__table__.columns.keys()) - set(cls._private_fields)
- def __str__(self):
- id_name, id_value = self.unique_id()
- return '<{0} {1}=`{2}`>'.format(
- self.__class__.__name__,
- id_name,
- id_value
- )
-
def __repr__(self):
- return str(self)
-
- def __unicode__(self):
- return str(self)
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.to_dict() == other.to_dict()
+ return '<{0} id=`{1}`>'.format(self.__class__.__name__, self.id)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index 0ab18bf..9b33f55 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -20,15 +20,15 @@ from aria import application_model_storage
from aria.orchestrator import context
from aria.storage.sql_mapi import SQLAlchemyModelAPI
-from tests.storage import get_sqlite_api_params
+from tests.storage import get_sqlite_api_kwargs
from . import models
@pytest.fixture
def simple(**kwargs):
- api_params = get_sqlite_api_params()
- model_storage = application_model_storage(SQLAlchemyModelAPI, api_params=api_params)
+ api_kwargs = get_sqlite_api_kwargs()
+ model_storage = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
model_storage.blueprint.put(models.get_blueprint())
blueprint = model_storage.blueprint.get(models.BLUEPRINT_ID)
deployment = models.get_deployment(blueprint)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 72ec7e4..df08174 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -46,7 +46,7 @@ def get_dependency_node(deployment):
operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
min_number_of_instances=1,
max_number_of_instances=1,
- deployment_fk=deployment.storage_id
+ deployment_storage_id=deployment.storage_id
)
@@ -56,8 +56,8 @@ def get_dependency_node_instance(dependency_node):
host_id=DEPENDENCY_NODE_INSTANCE_ID,
runtime_properties={'ip': '1.1.1.1'},
version=None,
- node_fk=dependency_node.storage_id,
- deployment_fk=dependency_node.deployment.storage_id,
+ node_storage_id=dependency_node.storage_id,
+ deployment_storage_id=dependency_node.deployment.storage_id,
state='',
scaling_groups={}
)
@@ -66,8 +66,8 @@ def get_dependency_node_instance(dependency_node):
def get_relationship(source=None, target=None):
return models.Relationship(
id=RELATIONSHIP_ID,
- source_node_fk=source.storage_id,
- target_node_fk=target.storage_id,
+ source_node_storage_id=source.storage_id,
+ target_node_storage_id=target.storage_id,
source_interfaces={},
source_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS),
target_interfaces={},
@@ -81,16 +81,16 @@ def get_relationship(source=None, target=None):
def get_relationship_instance(source_instance, target_instance, relationship):
return models.RelationshipInstance(
id=RELATIONSHIP_INSTANCE_ID,
- relationship_fk=relationship.storage_id,
- target_node_instance_fk=target_instance.storage_id,
- source_node_instance_fk=source_instance.storage_id,
+ relationship_storage_id=relationship.storage_id,
+ target_node_instance_storage_id=target_instance.storage_id,
+ source_node_instance_storage_id=source_instance.storage_id,
)
def get_dependent_node(deployment):
return models.Node(
id=DEPENDENT_NODE_ID,
- deployment_fk=deployment.storage_id,
+ deployment_storage_id=deployment.storage_id,
host_id=DEPENDENT_NODE_ID,
type='test_node_type',
type_hierarchy=[],
@@ -110,8 +110,8 @@ def get_dependent_node_instance(dependent_node):
host_id=DEPENDENT_NODE_INSTANCE_ID,
runtime_properties={},
version=None,
- node_fk=dependent_node.storage_id,
- deployment_fk=dependent_node.deployment.storage_id,
+ node_storage_id=dependent_node.storage_id,
+ deployment_storage_id=dependent_node.deployment.storage_id,
state='',
scaling_groups={}
)
@@ -132,8 +132,8 @@ def get_blueprint():
def get_execution(deployment):
return models.Execution(
id=EXECUTION_ID,
- deployment_fk=deployment.storage_id,
- blueprint_fk=deployment.blueprint.storage_id,
+ deployment_storage_id=deployment.storage_id,
+ blueprint_storage_id=deployment.blueprint.storage_id,
status=models.Execution.STARTED,
workflow_id=WORKFLOW_ID,
started_at=datetime.utcnow(),
@@ -145,7 +145,7 @@ def get_deployment(blueprint):
now = datetime.utcnow()
return models.Deployment(
id=DEPLOYMENT_ID,
- blueprint_fk=blueprint.storage_id,
+ blueprint_storage_id=blueprint.storage_id,
description='',
created_at=now,
updated_at=now,
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index fbe5d75..e2c8287 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -57,8 +57,8 @@ class TestWorkflowContext(object):
@pytest.fixture(scope='function')
def storage():
- api_params = test_storage.get_sqlite_api_params()
- result = application_model_storage(SQLAlchemyModelAPI, api_params=api_params)
+ api_kwargs = test_storage.get_sqlite_api_kwargs()
+ result = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
result.blueprint.put(models.get_blueprint())
blueprint = result.blueprint.get(models.BLUEPRINT_ID)
result.deployment.put(models.get_deployment(blueprint))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/__init__.py b/tests/orchestrator/workflows/builtin/__init__.py
index 7649a2a..3cda673 100644
--- a/tests/orchestrator/workflows/builtin/__init__.py
+++ b/tests/orchestrator/workflows/builtin/__init__.py
@@ -53,3 +53,10 @@ def assert_node_uninstall_operations(operations, with_relationships=False):
else:
for i, operation in enumerate(operations):
assert operation.name.startswith(mock.operations.NODE_OPERATIONS_UNINSTALL[i])
+
+
+@pytest.fixture
+def ctx():
+ context = mock.context.simple()
+ yield context
+ context.model.drop()
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index f034046..09ce214 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -18,9 +18,9 @@ from aria.orchestrator.workflows.builtin.execute_operation import execute_operat
from tests import mock
+from . import ctx
-def test_execute_operation():
- ctx = mock.context.simple()
+def test_execute_operation(ctx):
operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
node_instance_id = 'dependency_node_instance'
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/workflows/builtin/test_heal.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_heal.py b/tests/orchestrator/workflows/builtin/test_heal.py
index 7982f42..d39181c 100644
--- a/tests/orchestrator/workflows/builtin/test_heal.py
+++ b/tests/orchestrator/workflows/builtin/test_heal.py
@@ -19,11 +19,11 @@ from aria.orchestrator.workflows.builtin.heal import heal
from tests import mock
from . import (assert_node_install_operations,
- assert_node_uninstall_operations)
+ assert_node_uninstall_operations,
+ ctx)
-def test_heal_dependent_node():
- ctx = mock.context.simple()
+def test_heal_dependent_node(ctx):
heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id='dependent_node_instance')
assert len(list(heal_graph.tasks)) == 2
@@ -47,9 +47,7 @@ def test_heal_dependent_node():
assert_node_uninstall_operations(dependent_node_uninstall_tasks, with_relationships=True)
assert_node_install_operations(dependent_node_install_tasks, with_relationships=True)
-def test_heal_dependency_node():
- ctx = mock.context.simple()
-
+def test_heal_dependency_node(ctx):
heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id='dependency_node_instance')
# both subgraphs should contain un\install for both the dependent and the dependency
assert len(list(heal_graph.tasks)) == 2
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/workflows/builtin/test_install.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_install.py b/tests/orchestrator/workflows/builtin/test_install.py
index e2e0e4c..2c803eb 100644
--- a/tests/orchestrator/workflows/builtin/test_install.py
+++ b/tests/orchestrator/workflows/builtin/test_install.py
@@ -16,13 +16,14 @@
from aria.orchestrator.workflows.api import task
from aria.orchestrator.workflows.builtin.install import install
-from tests import mock
-from . import assert_node_install_operations
+from . import (
+ assert_node_install_operations,
+ ctx
+)
-def test_install():
- ctx = mock.context.simple()
+def test_install(ctx):
install_tasks = list(task.WorkflowTask(install, ctx=ctx).topological_order(True))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/workflows/builtin/test_uninstall.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_uninstall.py b/tests/orchestrator/workflows/builtin/test_uninstall.py
index 7d788f4..b745a15 100644
--- a/tests/orchestrator/workflows/builtin/test_uninstall.py
+++ b/tests/orchestrator/workflows/builtin/test_uninstall.py
@@ -16,13 +16,13 @@
from aria.orchestrator.workflows.api import task
from aria.orchestrator.workflows.builtin.uninstall import uninstall
-from tests import mock
+from . import (
+ assert_node_uninstall_operations,
+ ctx
+)
-from . import assert_node_uninstall_operations
-
-def test_uninstall():
- ctx = mock.context.simple()
+def test_uninstall(ctx):
uninstall_tasks = list(task.WorkflowTask(uninstall, ctx=ctx).topological_order(True))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 43c3641..a7d009a 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -218,7 +218,7 @@ class TestCancel(BaseTest):
@workflow
def mock_workflow(ctx, graph):
- return graph.sequence(*(self._op(mock_sleep_task, ctx, inputs={'seconds': 1})
+ return graph.sequence(*(self._op(mock_sleep_task, ctx, inputs={'seconds': 0.1})
for _ in range(number_of_tasks)))
eng = self._engine(workflow_func=mock_workflow,
workflow_context=workflow_context,
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 235c05c..2c848b4 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -18,6 +18,7 @@ from shutil import rmtree
from sqlalchemy import (
create_engine,
+ MetaData,
orm)
from sqlalchemy.pool import StaticPool
@@ -31,9 +32,10 @@ class TestFileSystem(object):
rmtree(self.path, ignore_errors=True)
-def get_sqlite_api_params():
+def get_sqlite_api_kwargs():
engine = create_engine('sqlite:///:memory:',
connect_args={'check_same_thread': False},
poolclass=StaticPool)
session = orm.sessionmaker(bind=engine)()
+ MetaData().create_all(engine)
return dict(engine=engine, session=session)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index 4d610f3..b3dab34 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -13,12 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import tempfile
import shutil
import pytest
-
from aria.storage import (
ModelStorage,
models,
@@ -26,23 +24,12 @@ from aria.storage import (
sql_mapi,
)
from aria import application_model_storage
-from tests.storage import get_sqlite_api_params
-
-temp_dir = tempfile.mkdtemp()
+from tests.storage import get_sqlite_api_kwargs
@pytest.fixture
def storage():
- return ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_params=get_sqlite_api_params())
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
- yield
- try:
- shutil.rmtree(temp_dir, ignore_errors=True)
- except BaseException:
- pass
+ return ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_kwargs=get_sqlite_api_kwargs())
def test_storage_base(storage):
@@ -66,9 +53,9 @@ def test_model_storage(storage):
storage.provider_context.update(pc)
assert storage.provider_context.get(pc.id).context == new_context
- storage.provider_context.delete('id1')
+ storage.provider_context.delete(pc)
with pytest.raises(exceptions.StorageError):
- storage.provider_context.get('id1')
+ storage.provider_context.get(pc.id)
def test_storage_driver(storage):
@@ -82,15 +69,15 @@ def test_storage_driver(storage):
assert next(i for i in storage.registered['provider_context'].iter()) == pc
assert [i for i in storage.provider_context] == [pc]
- storage.registered['provider_context'].delete('id2')
+ storage.registered['provider_context'].delete(pc)
with pytest.raises(exceptions.StorageError):
- storage.registered['provider_context'].get('id2')
+ storage.registered['provider_context'].get(pc.id)
def test_application_storage_factory():
storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
- api_params=get_sqlite_api_params())
+ api_kwargs=get_sqlite_api_kwargs())
assert storage.node
assert storage.node_instance
assert storage.plugin
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
index 82a1377..2491cdd 100644
--- a/tests/storage/test_models.py
+++ b/tests/storage/test_models.py
@@ -26,13 +26,13 @@ from aria.storage.models import (
Task)
-from tests.storage import get_sqlite_api_params
+from tests.storage import get_sqlite_api_kwargs
@pytest.fixture
def empty_storage():
return application_model_storage(sql_mapi.SQLAlchemyModelAPI,
- api_params=get_sqlite_api_params())
+ api_kwargs=get_sqlite_api_kwargs())
@pytest.mark.parametrize(
@@ -168,11 +168,11 @@ def test_execution_status_transition():
def test_task_max_attempts_validation():
def create_task(max_attempts):
- return Task(execution_id='eid',
- name='name',
- operation_mapping='',
- inputs={},
- max_attempts=max_attempts)
+ Task(execution_id='eid',
+ name='name',
+ operation_mapping='',
+ inputs={},
+ max_attempts=max_attempts)
create_task(max_attempts=1)
create_task(max_attempts=2)
create_task(max_attempts=Task.INFINITE_RETRIES)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/9a608318/tests/storage/test_resource_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_resource_storage.py b/tests/storage/test_resource_storage.py
index 4347512..9b5f782 100644
--- a/tests/storage/test_resource_storage.py
+++ b/tests/storage/test_resource_storage.py
@@ -45,13 +45,13 @@ class TestResourceStorage(TestFileSystem):
def _create_storage(self):
return ResourceStorage(FileSystemResourceAPI,
- api_params=dict(directory=self.path))
+ api_kwargs=dict(directory=self.path))
def test_name(self):
api = FileSystemResourceAPI
storage = ResourceStorage(FileSystemResourceAPI,
items=['blueprint'],
- api_params=dict(directory=self.path))
+ api_kwargs=dict(directory=self.path))
assert repr(storage) == 'ResourceStorage(api={api})'.format(api=api)
assert 'directory={resource_dir}'.format(resource_dir=self.path) in \
repr(storage.registered['blueprint'])
@@ -62,7 +62,7 @@ class TestResourceStorage(TestFileSystem):
assert os.path.exists(os.path.join(self.path, 'blueprint'))
def test_upload_file(self):
- storage = ResourceStorage(FileSystemResourceAPI, api_params=dict(directory=self.path))
+ storage = ResourceStorage(FileSystemResourceAPI, api_kwargs=dict(directory=self.path))
self._create(storage)
tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
self._upload(storage, tmpfile_path, id='blueprint_id')