You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by mx...@apache.org on 2017/02/09 09:10:17 UTC

[1/2] incubator-ariatosca git commit: ARIA-99 Straightforward end-to-end tests for parser and built-in workflow [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-42-Generic-ctx-serialization-mechanism 6311e453f -> 1498ad397 (forced update)


ARIA-99 Straightforward end-to-end tests for parser and built-in workflow


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/d35d09a3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/d35d09a3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/d35d09a3

Branch: refs/heads/ARIA-42-Generic-ctx-serialization-mechanism
Commit: d35d09a35422add7c3ee34053add05f41b8de1ba
Parents: e282f23
Author: Tal Liron <ta...@gmail.com>
Authored: Fri Feb 3 13:32:46 2017 -0600
Committer: Tal Liron <ta...@gmail.com>
Committed: Wed Feb 8 11:32:52 2017 -0600

----------------------------------------------------------------------
 aria/cli/cli.py                                 |   4 +-
 aria/parser/loading/request.py                  |   9 +-
 aria/parser/loading/uri.py                      |   5 +-
 aria/storage/filesystem_rapi.py                 |   2 +-
 aria/utils/uris.py                              |  22 +-
 tests/conftest.py                               |  23 ++
 tests/end2end/test_orchestrator.py              |  60 ++++
 tests/end2end/test_parser.py                    |  40 +++
 tests/orchestrator/conftest.py                  |  23 --
 tests/parser/__init__.py                        |  14 +
 tests/parser/service_templates.py               |  30 ++
 tests/parser/utils.py                           |  78 +++++
 .../service_templates/node-cellar/inputs.yaml   |   3 +
 .../node-cellar/node-cellar.yaml                | 299 +++++++++++++++++++
 .../node-cellar/types/aria.yaml                 |  93 ++++++
 .../node-cellar/types/mongodb.yaml              |  73 +++++
 .../node-cellar/types/nginx.yaml                |  27 ++
 .../node-cellar/types/nodejs.yaml               |  69 +++++
 .../node-cellar/types/openstack.yaml            | 201 +++++++++++++
 .../service_templates/node-cellar/types/os.yaml |  75 +++++
 .../service_templates/node-cellar/workflows.py  |  19 ++
 21 files changed, 1137 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/aria/cli/cli.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli.py b/aria/cli/cli.py
index 20ace2c..8d014b3 100644
--- a/aria/cli/cli.py
+++ b/aria/cli/cli.py
@@ -17,7 +17,9 @@
 CLI Entry point
 """
 
+import os
 import logging
+import tempfile
 
 from .. import install_aria_extensions
 from ..logger import (
@@ -100,7 +102,7 @@ def main():
     create_logger(
         handlers=[
             create_console_log_handler(),
-            create_file_log_handler(file_path='/tmp/aria_cli.log'),
+            create_file_log_handler(file_path=os.path.join(tempfile.gettempdir(), 'aria_cli.log')),
         ],
         level=logging.INFO)
     with AriaCli() as aria:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/aria/parser/loading/request.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/request.py b/aria/parser/loading/request.py
index 6ebabfc..a809347 100644
--- a/aria/parser/loading/request.py
+++ b/aria/parser/loading/request.py
@@ -13,8 +13,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
+import tempfile
+
 from requests import Session
-from requests.exceptions import ConnectionError
+from requests.exceptions import (ConnectionError, InvalidSchema)
 from cachecontrol import CacheControl
 from cachecontrol.caches import FileCache
 
@@ -22,7 +25,7 @@ from .exceptions import LoaderException, DocumentNotFoundException
 from .loader import Loader
 
 SESSION = None
-SESSION_CACHE_PATH = '/tmp'
+SESSION_CACHE_PATH = os.path.join(tempfile.gettempdir(), 'aria_requests')
 
 
 class RequestLoader(Loader):
@@ -53,6 +56,8 @@ class RequestLoader(Loader):
 
         try:
             self._response = SESSION.get(self.uri, headers=self.headers)
+        except InvalidSchema as e:
+            raise DocumentNotFoundException('document not found: "%s"' % self.uri, cause=e)
         except ConnectionError as e:
             raise LoaderException('request connection error: "%s"' % self.uri, cause=e)
         except Exception as e:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/aria/parser/loading/uri.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/uri.py b/aria/parser/loading/uri.py
index f0cde3a..1b23bf6 100644
--- a/aria/parser/loading/uri.py
+++ b/aria/parser/loading/uri.py
@@ -66,8 +66,9 @@ class UriTextLoader(Loader):
         except DocumentNotFoundException:
             # Try prefixes in order
             for prefix in self._prefixes:
-                if as_file(prefix) is not None:
-                    uri = os.path.join(prefix, self.location.uri)
+                prefix_as_file = as_file(prefix)
+                if prefix_as_file is not None:
+                    uri = os.path.join(prefix_as_file, self.location.uri)
                 else:
                     uri = urljoin(prefix, self.location.uri)
                 try:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/aria/storage/filesystem_rapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/filesystem_rapi.py b/aria/storage/filesystem_rapi.py
index eb30e0b..6693dbd 100644
--- a/aria/storage/filesystem_rapi.py
+++ b/aria/storage/filesystem_rapi.py
@@ -17,10 +17,10 @@ SQLalchemy based RAPI
 """
 import os
 import shutil
+from multiprocessing import RLock
 from contextlib import contextmanager
 from functools import partial
 from distutils import dir_util                                # https://github.com/PyCQA/pylint/issues/73; pylint: disable=no-name-in-module
-from multiprocessing import RLock
 
 from aria.storage import (
     api,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/aria/utils/uris.py
----------------------------------------------------------------------
diff --git a/aria/utils/uris.py b/aria/utils/uris.py
index 1686517..5f7bcf5 100644
--- a/aria/utils/uris.py
+++ b/aria/utils/uris.py
@@ -16,13 +16,29 @@
 import os
 import urlparse
 
+
+_IS_WINDOWS = (os.name == 'nt')
+
+
 def as_file(uri):
     """
-    If the URI is a file (either the :code:`file` scheme or no scheme), then returns the absolute
+    If the URI is a file (either the :code:`file` scheme or no scheme), then returns the normalized
     path. Otherwise, returns None.
     """
 
+    if _IS_WINDOWS:
+        # We need this extra check in Windows before urlparse because paths might have a drive
+        # prefix, e.g. "C:" which will be considered a scheme for urlparse below
+        path = uri.replace('/', '\\')
+        if os.path.exists(path):
+            return os.path.normpath(path)
+
     url = urlparse.urlparse(uri)
-    if (not url.scheme) or (url.scheme == 'file'):
-        return os.path.abspath(url.path)
+    scheme = url.scheme
+    if (not scheme) or (scheme == 'file'):
+        path = url.path
+        if _IS_WINDOWS:
+            path = path.replace('/', '\\')
+        return os.path.normpath(path)
+
     return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/conftest.py
----------------------------------------------------------------------
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..4b24f18
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,23 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+import aria
+
+
+@pytest.fixture(scope='session', autouse=True)
+def install_aria_extensions():
+    aria.install_aria_extensions()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/end2end/test_orchestrator.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_orchestrator.py b/tests/end2end/test_orchestrator.py
new file mode 100644
index 0000000..f930577
--- /dev/null
+++ b/tests/end2end/test_orchestrator.py
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from aria.orchestrator.runner import Runner
+from aria.orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
+from aria.parser.modeling.storage import initialize_storage
+from aria.utils.imports import import_fullname
+from aria.utils.collections import OrderedDict
+
+from tests.parser.service_templates import consume_node_cellar
+
+
+WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('function', 'implementation', 'dependencies')
+
+
+def test_install():
+    _workflow('install')
+
+
+def test_custom():
+    _workflow('maintenance_on')
+
+
+def _workflow(workflow_name):
+    context, _ = consume_node_cellar()
+
+    # TODO: this logic will eventually stabilize and be part of the ARIA API,
+    # likely somewhere in aria.orchestrator.workflows
+    if workflow_name in BUILTIN_WORKFLOWS:
+        workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.' + workflow_name)
+        inputs = {}
+    else:
+        policy = context.modeling.instance.policies[workflow_name]
+        sys.path.append(policy.properties['implementation'].value)
+
+        workflow_fn = import_fullname(policy.properties['function'].value)
+        inputs = OrderedDict([
+            (k, v.value) for k, v in policy.properties.iteritems()
+            if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES
+        ])
+
+    def _initialize_storage(model_storage):
+        initialize_storage(context, model_storage, 1)
+
+    runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage, 1)
+    runner.run()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/end2end/test_parser.py
----------------------------------------------------------------------
diff --git a/tests/end2end/test_parser.py b/tests/end2end/test_parser.py
new file mode 100644
index 0000000..7c243ab
--- /dev/null
+++ b/tests/end2end/test_parser.py
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from tests.parser.service_templates import consume_node_cellar
+
+
+def test_validation():
+    consume_node_cellar('validate')
+
+
+def test_validation_no_cache():
+    consume_node_cellar('validate', False)
+
+
+def test_presentation():
+    consume_node_cellar('presentation')
+
+
+def test_model():
+    consume_node_cellar('model')
+
+
+def test_types():
+    consume_node_cellar('types')
+
+
+def test_instance():
+    consume_node_cellar('instance')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/orchestrator/conftest.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/conftest.py b/tests/orchestrator/conftest.py
deleted file mode 100644
index 4b24f18..0000000
--- a/tests/orchestrator/conftest.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-import aria
-
-
-@pytest.fixture(scope='session', autouse=True)
-def install_aria_extensions():
-    aria.install_aria_extensions()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/parser/__init__.py
----------------------------------------------------------------------
diff --git a/tests/parser/__init__.py b/tests/parser/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/tests/parser/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/parser/service_templates.py
----------------------------------------------------------------------
diff --git a/tests/parser/service_templates.py b/tests/parser/service_templates.py
new file mode 100644
index 0000000..8c361e6
--- /dev/null
+++ b/tests/parser/service_templates.py
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from aria.utils.caching import cachedmethod
+
+from .utils import (get_uri, create_context, create_consumer)
+
+
+def consume_node_cellar(consumer_class_name='instance', cache=True):
+    cachedmethod.ENABLED = cache
+    uri = get_uri('node-cellar', 'node-cellar.yaml')
+    context = create_context(uri)
+    context.args.append('--inputs=' + get_uri('node-cellar', 'inputs.yaml'))
+    consumer, dumper = create_consumer(context, consumer_class_name)
+    consumer.consume()
+    context.validation.dump_issues()
+    assert not context.validation.has_issues
+    return context, dumper

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/parser/utils.py
----------------------------------------------------------------------
diff --git a/tests/parser/utils.py b/tests/parser/utils.py
new file mode 100644
index 0000000..e55e6e5
--- /dev/null
+++ b/tests/parser/utils.py
@@ -0,0 +1,78 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from aria.parser.loading import UriLocation
+from aria.parser.consumption import (
+    ConsumptionContext,
+    ConsumerChain,
+    Read,
+    Validate,
+    Model,
+    Types,
+    Inputs,
+    Instance
+)
+from aria.utils.imports import import_fullname
+
+from tests.resources import DIR
+
+
+SERVICE_TEMPLATES_DIR = os.path.join(DIR, 'service_templates')
+
+
+def get_uri(*args):
+    return os.path.join(SERVICE_TEMPLATES_DIR, *args)
+
+
+def create_context(uri,
+                   loader_source='aria.parser.loading.DefaultLoaderSource',
+                   reader_source='aria.parser.reading.DefaultReaderSource',
+                   presenter_source='aria.parser.presentation.DefaultPresenterSource',
+                   presenter=None,
+                   debug=False):
+    context = ConsumptionContext()
+    context.loading.loader_source = import_fullname(loader_source)()
+    context.reading.reader_source = import_fullname(reader_source)()
+    context.presentation.location = UriLocation(uri) if isinstance(uri, basestring) else uri
+    context.presentation.presenter_source = import_fullname(presenter_source)()
+    context.presentation.presenter_class = import_fullname(presenter)
+    context.presentation.print_exceptions = debug
+    return context
+
+
+def create_consumer(context, consumer_class_name):
+    consumer = ConsumerChain(context, (Read, Validate))
+    dumper = None
+    if consumer_class_name == 'validate':
+        dumper = None
+    elif consumer_class_name == 'presentation':
+        dumper = consumer.consumers[0]
+    elif consumer_class_name == 'model':
+        consumer.append(Model)
+    elif consumer_class_name == 'types':
+        consumer.append(Model, Types)
+    elif consumer_class_name == 'instance':
+        consumer.append(Model, Inputs, Instance)
+    else:
+        consumer.append(Model, Inputs, Instance)
+        consumer.append(import_fullname(consumer_class_name))
+
+    if dumper is None:
+        # Default to last consumer
+        dumper = consumer.consumers[-1]
+
+    return consumer, dumper

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/inputs.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/inputs.yaml b/tests/resources/service_templates/node-cellar/inputs.yaml
new file mode 100644
index 0000000..37ab9ea
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/inputs.yaml
@@ -0,0 +1,3 @@
+openstack_credential:
+    user: username
+    token: password

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/node-cellar.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/node-cellar.yaml b/tests/resources/service_templates/node-cellar/node-cellar.yaml
new file mode 100644
index 0000000..3e579bd
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/node-cellar.yaml
@@ -0,0 +1,299 @@
+#
+# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+# NFV is not used here, but we are using it just to validate the imports
+tosca_definitions_version: tosca_simple_profile_for_nfv_1_0
+#tosca_definitions_version: tosca_simple_yaml_1_0
+
+description: >-
+  Node Cellar TOSCA blueprint.
+  Here is some Unicode: \u4e2d\u570b.
+
+metadata:
+  template_name: node-cellar
+  template_author: ARIA
+  template_version: '1.0.0'
+  aria_version: '0.0'
+
+imports:
+  - types/openstack.yaml
+  - types/nodejs.yaml
+  - types/mongodb.yaml
+  - types/nginx.yaml
+  - types/aria.yaml
+
+dsl_definitions:
+
+  default_openstack_credential: &DEFAULT_OPENSTACK_CREDENTIAL
+    user: openstackadmin
+    token: { concat: [ openstack, 123 ] }
+
+repositories:
+
+  node_cellar:
+    description: >-
+      The repository for the Node Cellar application and its dependencies.
+    url: https://github.com/ccoenraets/nodecellar/archive/
+    credential:
+      user: guest
+      token: ''
+
+interface_types:
+
+  Maintenance:
+    derived_from: tosca.interfaces.Root
+    enable: {}
+    disable: {}
+
+node_types:
+
+  NodeMongoApplication:
+    derived_from: nodejs.Application
+    interfaces:
+      Maintenance:
+        type: Maintenance
+    requirements:
+      - database:
+          capability: tosca.capabilities.Endpoint.Database
+          node: mongodb.Database
+
+topology_template:
+
+  inputs:
+    openstack_credential:
+      type: openstack.Credential
+      value: *DEFAULT_OPENSTACK_CREDENTIAL
+
+  node_templates:
+  
+    # Application
+
+    node_cellar:
+      description: >-
+        Node Cellar Node.js web application.
+      type: NodeMongoApplication
+      artifacts:
+        node_cellar:
+          description: >-
+            The Node Cellar application package.
+          type: os.Archive
+          file: master.zip
+          repository: node_cellar
+          deploy_path: /opt/nodejs/applications/node-cellar
+          properties:
+            unpack_credential:
+              user: gigaspaces
+              token: { get_property: [ SELF, app_endpoint, protocol ] }
+              #token: { get_property: [ HOST, flavor_name ] }
+      interfaces:
+        Maintenance:
+          enable: juju > charm.maintenance_on
+          disable: juju > charm.maintenance_off
+      requirements:
+        - database: node_cellar_database
+      capabilities:
+        app_endpoint:
+          properties:
+            protocol: udp
+            url_path: /nodecellar
+    
+    node_cellar_database:
+      description: >-
+        Node Cellar MongoDB database.
+      type: mongodb.Database
+      properties:
+        name: node_cellar
+      artifacts:
+        initial:
+          description: >-
+            The Node Cellar initial database.
+          type: mongodb.DatabaseDump
+          file: node-cellar.json
+          repository: node_cellar
+
+    # Server software
+    
+    nodejs:
+      description: >-
+        Node.js instance.
+      type: nodejs.Server
+      requirements:
+        - host: application_host
+      node_filter: # cannot be validated
+        properties:
+          #- flavor_name: { valid_values: [ {concat:[m1,.,small]} ] } # won't work because not validated :/
+          - flavor_name: { valid_values: [ m1.small ] }
+        capabilities:
+          - scalable:
+              properties:
+                - max_instances: { greater_or_equal: 8 }
+
+    mongodb:
+      description: >-
+        MongoDB instance.
+      type: mongodb.Server
+      requirements:
+        - host:
+            node: openstack.Instance
+            node_filter:
+              properties:
+                - flavor_name: { valid_values: [ m1.medium, { concat: [ { concat: [ m1, . ] }, large ] } ] }
+                #- flavor_name: { valid_values: [ m1.medium, m1.large ] }
+              capabilities:
+                - scalable:
+                    properties:
+                      - max_instances: { greater_or_equal: 8 }
+    
+    loadbalancer:
+      type: nginx.LoadBalancer
+      properties:
+        algorithm: round-robin      
+      requirements:
+        - host: loadbalancer_host
+    
+    # Hosts
+
+    loadbalancer_host:
+      description: >-
+        Host for the loadbalancer.
+      type: openstack.Instance
+      properties:
+        flavor_name: m1.small
+        os_users: # map of os.UserInfo
+          root:
+            password: admin123
+      interfaces:
+        Standard:
+          inputs:
+            openstack_credential: { get_input: openstack_credential }
+          configure: juju > charm.loadbalancer
+
+    application_host:
+      copy: loadbalancer_host
+      description: >-
+        Host for applications.
+      properties:
+        flavor_name: m1.small
+        os_users: # map of os.UserInfo
+          nodejs:
+            password: nodejs123
+            groups:
+              - www-data
+      capabilities:
+        scalable:
+          properties:
+            max_instances: 10
+
+    data_host:
+      copy: loadbalancer_host
+      description: >-
+        Host for data.
+      properties:
+        flavor_name: m1.large
+        flavor_id: 5d62e82c-924e-4fa9-b1e4-c133867596f7
+        os_users: # map of os.UserInfo
+          mongodb:
+            password: mongo123
+      requirements:
+        - local_storage:
+            node: data_volume
+            relationship:
+              properties:
+                location: /mnt/volume
+      capabilities:
+        scalable:
+          properties:
+            max_instances: 10
+
+    data_volume:
+      type: openstack.Volume
+      properties:
+        size: 10 GB
+      interfaces:
+        Standard:
+          inputs:
+            openstack_credential: { get_input: openstack_credential }
+
+  groups:
+  
+    node_cellar_group:
+      type: openstack.Secured
+      members:
+        - loadbalancer
+        - application_host
+        - data_host
+      interfaces:
+        Standard:
+          inputs:
+            openstack_credential: { get_input: openstack_credential }
+
+  policies:
+  
+    scaling:
+      type: openstack.Scaling
+      properties:
+        bandwidth_threshold: 2 GB
+      targets: # node templates or groups
+        - node_cellar_group
+    
+    juju:
+      description: >-
+        Juju plugin executes charms.
+      type: aria.Plugin
+      properties:
+        executor: host_agent
+        install: false
+    
+    maintenance_on:
+      type: MaintenanceWorkflow
+      properties:
+        enabled: true
+
+    maintenance_off:
+      type: MaintenanceWorkflow
+      properties:
+        enabled: false
+
+  substitution_mappings:
+
+    node_type: tosca.nodes.WebApplication
+    requirements:
+      host: [ node_cellar, host ] # doesn't really make sense; just for testing
+    capabilities:
+      app_endpoint: [ loadbalancer, client ]
+
+policy_types:
+
+  MaintenanceWorkflow:
+    description: >-
+      Workflow to put all nodes in/out of maintenance mode. For web servers, this will show a "this
+      site is under maintenance and we'll be back soon" web page. Database nodes will then close all
+      client connections cleanly and shut down services. 
+    derived_from: aria.Workflow
+    properties:
+      function: # @override
+        type: string
+        default: workflows.maintenance
+      implementation:
+        type: string
+        default: tests/resources/service_templates/node-cellar
+      enabled:
+        description: >-
+          Whether to turn maintenance mode on or off.
+        type: boolean
+      #ctx:
+      #  type: string
+      #  default: abc

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/types/aria.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/types/aria.yaml b/tests/resources/service_templates/node-cellar/types/aria.yaml
new file mode 100644
index 0000000..2ddb238
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/types/aria.yaml
@@ -0,0 +1,93 @@
+
+policy_types:
+
+  aria.Plugin:
+    _extensions:
+      role: plugin
+    description: >-
+      ARIA Plugin definition.
+    derived_from: tosca.policies.Root
+    properties:
+      executor:
+        description: >-
+          Where to execute the plugin's operations.
+        type: string
+        constraints:
+          - valid_values: [ central_deployment_agent, host_agent ]
+      source:
+        description: >-
+          Where to execute the plugin's operations. Where to retrieve the plugin from. Could be
+          either a path relative to the plugins dir inside the blueprint's root dir or a url. If
+          install is false, source is redundant. If install is true, source (or package_name) is
+          mandatory.
+        type: string
+        required: false
+      install_arguments:
+        description: >-
+          Optional arguments passed to the 'pip install' command created for the plugin
+          installation.
+        type: string
+        required: false
+      install:
+        description: >-
+          Whether to install the plugin or not as it might already be installed as part of the
+          agent.
+        type: boolean
+        default: true
+      package_name:
+        description: >-
+          Managed plugin package name. If install is false, package_name is redundant. If install is
+          true, package_name (or source) is mandatory.
+        type: string
+        required: false
+      package_version:
+        description: >-
+          Managed plugin package version.
+        type: string
+        required: false
+      supported_platform:
+        description: >-
+          Managed plugin supported platform (e.g. linux_x86_64).
+        type: string
+        required: false
+      supported_distribution:
+        description: >-
+          Managed plugin distribution.
+        type: string
+        required: false
+      distribution_version:
+        description: >-
+          Managed plugin distribution version.
+        type: string
+        required: false
+      distribution_release:
+        description: >-
+          Managed plugin distribution release.
+        type: string
+        required: false
+
+  aria.Workflow:
+    _extensions:
+      role: workflow
+    description: >-
+      ARIA Workflow definition.
+    derived_from: tosca.policies.Root
+    properties:
+      function:
+        description: >-
+          Python workflow function.
+        type: string
+      implementation:
+        description: >-
+          The implementation artifact name (i.e., the primary script file name within a TOSCA CSAR
+          file).
+        type: string
+        required: false
+      dependencies:
+        description: >-
+          The optional ordered list of one or more dependent or secondary implementation artifact
+          name which are referenced by the primary implementation artifact (e.g., a library the
+          script installs or a secondary script).
+        type: list
+        entry_schema: string
+        required: false

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/types/mongodb.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/types/mongodb.yaml b/tests/resources/service_templates/node-cellar/types/mongodb.yaml
new file mode 100644
index 0000000..612dbcb
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/types/mongodb.yaml
@@ -0,0 +1,73 @@
+#
+# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+imports:
+  - os.yaml
+
+node_types:
+
+  mongodb.Server:
+    description: >-
+      MongoDB server application.
+    derived_from: tosca.nodes.DBMS
+    properties:
+      root_password: # @override
+        type: string
+        default: admin
+      port: # @override
+        type: integer
+        default: 27017
+    artifacts:
+      mongodb:
+        description: >-
+          MongoDB application package.
+        type: os.Archive
+        file: https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu1604-3.2.8.tgz
+        deploy_path: /opt/mongodb
+    capabilities:
+      host: # @override
+        type: tosca.capabilities.Container
+        valid_source_types: [ mongodb.Database ]
+
+  mongodb.Database:
+    description: >-
+      MongoDB database.
+      
+      Supports importing database data if a mongodb.DatabaseDump is provided.
+    derived_from: tosca.nodes.Database
+    interfaces:
+      Standard:
+        type: tosca.interfaces.node.lifecycle.Standard
+        create:
+          implementation:
+            primary: mongodb/create_and_import_database.sh
+            dependencies:
+              - mongodb/utils/api.sh
+              - utils/os.sh
+    requirements:
+      - host: # @override
+          capability: tosca.capabilities.Container
+          node: mongodb.Server
+          relationship: tosca.relationships.HostedOn
+
+artifact_types:
+
+  mongodb.DatabaseDump:
+    description: >-
+      Dump of a MongoDB database.
+    derived_from: tosca.artifacts.Root
+    file_ext:
+      - json

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/types/nginx.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/types/nginx.yaml b/tests/resources/service_templates/node-cellar/types/nginx.yaml
new file mode 100644
index 0000000..8986a21
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/types/nginx.yaml
@@ -0,0 +1,27 @@
+#
+# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+node_types:
+
+  nginx.LoadBalancer:
+    description: >-
+      Nginx as a loadbalancer.
+    derived_from: tosca.nodes.LoadBalancer
+    requirements:
+      - host:
+          capability: tosca.capabilities.Container
+          node: tosca.nodes.Compute
+          relationship: tosca.relationships.HostedOn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/types/nodejs.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/types/nodejs.yaml b/tests/resources/service_templates/node-cellar/types/nodejs.yaml
new file mode 100644
index 0000000..ec8dd83
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/types/nodejs.yaml
@@ -0,0 +1,69 @@
+#
+# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+imports:
+  - os.yaml
+
+node_types:
+
+  nodejs.Server:
+    description: >-
+      Node.js server application.
+    derived_from: tosca.nodes.WebServer
+    artifacts:
+      nodejs:
+        description: >-
+          Node.js application package.
+        type: os.Archive
+        file: https://nodejs.org/dist/v4.4.7/node-v4.4.7-linux-x64.tar.xz
+        deploy_path: /opt/nodejs
+    capabilities:
+      data_endpoint: # @override 
+        type: tosca.capabilities.Endpoint
+        properties:
+          port:
+            type: tosca.datatypes.network.PortDef
+            default: 8080
+          url_path:
+            type: string
+            default: /
+      admin_endpoint: # @override
+        type: tosca.capabilities.Endpoint.Admin
+        properties:
+          port:
+            type: tosca.datatypes.network.PortDef
+            default: 8080
+          url_path:
+            type: string
+            default: /admin
+      host: # @override
+        type: tosca.capabilities.Container
+        valid_source_types: [ nodejs.Application ]
+
+  nodejs.Application:
+    derived_from: tosca.nodes.WebApplication
+    capabilities:
+      app_endpoint: # @override
+        type: tosca.capabilities.Endpoint
+        properties:
+          port:
+            type: tosca.datatypes.network.PortDef
+            default: 8080
+    requirements:
+      - host: # @override
+          capability: tosca.capabilities.Container
+          node: nodejs.Server
+          relationship: tosca.relationships.HostedOn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/types/openstack.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/types/openstack.yaml b/tests/resources/service_templates/node-cellar/types/openstack.yaml
new file mode 100644
index 0000000..a18da53
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/types/openstack.yaml
@@ -0,0 +1,201 @@
+#
+# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+imports:
+  - os.yaml
+
+dsl_definitions:
+
+  openstack:
+    uuid_constraints: &OPENSTACK_UUID_CONSTRAINTS
+      - pattern: '^[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12}$'
+
+node_types:
+
+  openstack.Instance:
+    description: >-
+      OpenStack instance.
+
+      You may assign an image_id or attach an openstack.Image artifact (the artifact
+      will take precedence).
+    
+      You may assign either flavor_id or flavor_name (flavor_id will take precedence).
+      If neither are assigned, flavor_name has a default value.
+    derived_from: tosca.nodes.Compute
+    properties:
+      image_id:
+        description: >-
+          See: https://s3itwiki.uzh.ch/display/clouddoc/Supported+Images
+        type: openstack.UUID
+        default: 5d62e82c-924e-4fa9-b1e4-c133867596f7
+      flavor_id:
+        type: openstack.UUID
+        required: false
+      flavor_name:
+        type: string
+        default: m1.medium
+        required: false
+      availability_zone:
+        description: >-
+          OpenStack availability zone.
+        type: string
+        required: false
+      os_users:
+        type: map
+        entry_schema: os.UserInfo
+    interfaces:
+      Standard:
+        type: tosca.interfaces.node.lifecycle.Standard
+        inputs:
+          openstack_credential:
+            description: The OpenStack API credential for all operations.
+            type: openstack.Credential
+        create:
+          implementation:
+            primary: openstack/create_instance.sh
+            dependencies:
+              - openstack/utils/api.sh
+              - utils/os.sh
+    requirements:
+      - local_storage: # @override
+          capability: tosca.capabilities.Attachment
+          node: openstack.Volume
+          relationship: tosca.relationships.AttachesTo
+#          relationship:
+#            type: tosca.relationships.AttachesTo
+#            interfaces:
+#              Standard:
+#                inputs:
+#                  xxx:
+#                    type: string
+#                    default: { concat: [ a, b ] }
+          occurrences: [ 0, UNBOUNDED ]
+
+  openstack.Volume:
+    description: >-
+      OpenStack volume.
+      
+      See: http://developer.openstack.org/api-ref-blockstorage-v2.html
+    derived_from: tosca.nodes.BlockStorage
+    properties:
+      tenant_id:
+        type: openstack.UUID
+        required: false
+      availability_zone:
+        type: string
+        required: false
+      source_volid:
+        type: openstack.UUID
+        required: false
+      description:
+        type: string
+        required: false
+      multiattach:
+        type: boolean
+        default: false
+      #snapshot_id: # @override
+      #  type: openstack.UUID
+      #  required: false
+      name:
+        type: string
+        required: false
+      volume_type:
+        type: string
+        required: false
+      metadata:
+        type: map
+        entry_schema: string
+        required: false
+      source_replica:
+        type: openstack.UUID
+        required: false
+      consistencygroup_id:
+        type: openstack.UUID
+        required: false
+      scheduler_hints:
+        type: map
+        entry_schema: string
+        required: false
+    interfaces:
+      Standard:
+        type: tosca.interfaces.node.lifecycle.Standard
+        inputs:
+          openstack_credential:
+            description: The OpenStack API credential for all operations.
+            type: openstack.Credential
+        create:
+          implementation:
+            primary: openstack/create_volume.sh
+            dependencies:
+              - openstack/utils/api.sh
+              - utils/os.sh
+
+group_types:
+
+  openstack.Secured:
+    description: >-
+      OpenStack secured group.
+    derived_from: tosca.groups.Root
+    members:
+      - openstack.Instance
+    interfaces:
+      Standard:
+        type: tosca.interfaces.node.lifecycle.Standard
+        inputs:
+          openstack_credential:
+            description: The OpenStack API credential for all operations.
+            type: openstack.Credential
+        create:
+          implementation:
+            primary: openstack/create_secured_group.sh
+            dependencies:
+              - openstack/utils/api.sh
+              - utils/os.sh
+
+policy_types:
+
+  openstack.Scaling:
+    description: >-
+      OpenStack scaling policy.
+    derived_from: tosca.policies.Scaling
+    properties:
+      bandwidth_threshold:
+        type: scalar-unit.size
+        default: 1 GB
+    targets:
+      - openstack.Instance
+      - openstack.Secured
+
+data_types:
+
+  openstack.Credential:
+    derived_from: tosca.datatypes.Credential
+
+  openstack.UUID:
+    description: >-
+      OpenStack UUID (in GUID format).
+    derived_from: string
+    constraints: *OPENSTACK_UUID_CONSTRAINTS
+
+artifact_types:
+
+  openstack.Image:
+    description: >-
+      OpenStack image artifact.
+    derived_from: tosca.artifacts.Deployment.Image.VM
+    file_ext:
+      - img
+      - iso

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/types/os.yaml
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/types/os.yaml b/tests/resources/service_templates/node-cellar/types/os.yaml
new file mode 100644
index 0000000..43ea78c
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/types/os.yaml
@@ -0,0 +1,75 @@
+#
+# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved.
+# 
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+
+dsl_definitions:
+
+  os:
+    user_and_group_name_constraints: &OS_USER_AND_GROUP_NAME_CONSTRAINTS
+      - pattern: '^[a-z0-9_-]{3,16}$'
+    password_constraints: &OS_PASSWORD_CONSTRAINTS
+      - pattern: '^[a-z0-9_-]{6,18}$'
+
+artifact_types:
+
+  os.Package:
+    description: >-
+      Generic application package.
+    derived_from: tosca.artifacts.Root
+
+  os.Archive:
+    description: >-
+      Application package in an archive.
+    derived_from: os.Package
+    file_ext:
+      - zip
+      - tar
+      - tar.gz
+      - tar.xz
+    properties:
+      unpack_credential:
+        type: tosca.datatypes.Credential
+        required: false
+
+  os.Deb:
+    description: >-
+      Debian application package.
+    derived_from: os.Package
+    file_ext:
+      - deb
+
+  os.RPM:
+    description: >-
+      RPM application package.
+    derived_from: os.Package
+    file_ext:
+      - rpm
+
+data_types:
+
+  os.UserInfo:
+    description: >-
+      Information about an operating system user.
+    derived_from: tosca.datatypes.Root
+    properties:
+      password:
+        type: string
+        constraints: *OS_PASSWORD_CONSTRAINTS
+      groups:
+        type: list
+        entry_schema:
+          type: string
+          constraints: *OS_USER_AND_GROUP_NAME_CONSTRAINTS
+        required: false

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d35d09a3/tests/resources/service_templates/node-cellar/workflows.py
----------------------------------------------------------------------
diff --git a/tests/resources/service_templates/node-cellar/workflows.py b/tests/resources/service_templates/node-cellar/workflows.py
new file mode 100644
index 0000000..b3546bb
--- /dev/null
+++ b/tests/resources/service_templates/node-cellar/workflows.py
@@ -0,0 +1,19 @@
+
+from aria import workflow
+from aria.orchestrator.workflows.api.task import OperationTask
+
+
+@workflow
+def maintenance(ctx, graph, enabled):
+    """
+    Custom workflow to call the operations on the Maintenance interface.
+    """
+
+    operation = 'Maintenance.enable' if enabled else 'Maintenance.disable'
+
+    for node_instance in ctx.model.node_instance.iter():
+        if operation in node_instance.node.operations:
+            task = OperationTask.node_instance(
+                instance=node_instance,
+                name=operation)
+            graph.add_tasks(task)


[2/2] incubator-ariatosca git commit: ARIA-42-Generic-ctx-serialization-mechanism

Posted by mx...@apache.org.
ARIA-42-Generic-ctx-serialization-mechanism


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/1498ad39
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/1498ad39
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/1498ad39

Branch: refs/heads/ARIA-42-Generic-ctx-serialization-mechanism
Commit: 1498ad397bcbed5a69c01f6d512a251e375792c7
Parents: d35d09a
Author: mxmrlv <mx...@gmail.com>
Authored: Wed Feb 1 16:16:01 2017 +0200
Committer: mxmrlv <mx...@gmail.com>
Committed: Thu Feb 9 11:00:23 2017 +0200

----------------------------------------------------------------------
 aria/__init__.py                                | 25 +++---
 aria/orchestrator/context/operation.py          | 27 ++++++
 aria/orchestrator/context/serialization.py      | 95 --------------------
 aria/orchestrator/runner.py                     | 70 ++++-----------
 aria/orchestrator/workflows/executor/process.py | 11 +--
 aria/storage/api.py                             |  3 +-
 aria/storage/core.py                            | 55 ++++++++++--
 aria/storage/sql_mapi.py                        | 35 +++++++-
 setup.py                                        |  2 +-
 tests/mock/context.py                           | 32 ++++---
 tests/orchestrator/context/test_operation.py    |  7 +-
 .../context/test_resource_render.py             |  3 +-
 tests/orchestrator/context/test_serialize.py    | 19 ++--
 tests/orchestrator/context/test_toolbelt.py     |  2 +-
 tests/orchestrator/context/test_workflow.py     |  6 +-
 .../orchestrator/execution_plugin/test_local.py |  4 +-
 tests/orchestrator/execution_plugin/test_ssh.py |  4 +-
 tests/orchestrator/workflows/api/test_task.py   |  4 +-
 .../workflows/builtin/test_execute_operation.py |  2 +-
 .../orchestrator/workflows/builtin/test_heal.py |  2 +-
 .../workflows/builtin/test_install.py           |  2 +-
 .../workflows/builtin/test_uninstall.py         |  2 +-
 .../orchestrator/workflows/core/test_engine.py  |  2 +-
 tests/orchestrator/workflows/core/test_task.py  |  2 +-
 .../test_task_graph_into_exececution_graph.py   |  4 +-
 .../workflows/executor/test_executor.py         |  9 +-
 .../workflows/executor/test_process_executor.py | 26 ++++--
 .../executor/test_process_executor_extension.py |  2 +-
 .../test_process_executor_tracked_changes.py    |  2 +-
 tests/storage/__init__.py                       | 41 +++------
 tests/storage/test_instrumentation.py           |  9 +-
 tests/storage/test_model_storage.py             |  7 +-
 tests/storage/test_models.py                    |  4 +-
 tests/storage/test_structures.py                | 11 +--
 tests/utils/test_plugin.py                      |  6 +-
 35 files changed, 254 insertions(+), 283 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 248aa1a..8b87473 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -57,7 +57,7 @@ def install_aria_extensions():
     extension.init()
 
 
-def application_model_storage(api, api_kwargs=None):
+def application_model_storage(api, api_kwargs=None, initiator=None, initiator_kwargs=None):
     """
     Initiate model storage
     """
@@ -78,19 +78,20 @@ def application_model_storage(api, api_kwargs=None):
         storage.model.Execution,
         storage.model.Task,
     ]
-    # if api not in _model_storage:
-    return storage.ModelStorage(api, items=models, api_kwargs=api_kwargs or {})
+    return storage.ModelStorage(api_cls=api,
+                                api_kwargs=api_kwargs,
+                                items=models,
+                                initiator=initiator,
+                                initiator_kwargs=initiator_kwargs or {})
 
 
-def application_resource_storage(api, api_kwargs=None):
+def application_resource_storage(api, api_kwargs=None, initiator=None, initiator_kwargs=None):
     """
     Initiate resource storage
     """
-    return storage.ResourceStorage(
-        api,
-        api_kwargs=api_kwargs or {},
-        items=[
-            'blueprint',
-            'deployment',
-            'plugin',
-        ])
+
+    return storage.ResourceStorage(api_cls=api,
+                                   api_kwargs=api_kwargs,
+                                   items=['blueprint', 'deployment', 'plugin'],
+                                   initiator=initiator,
+                                   initiator_kwargs=initiator_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 23a6fd4..d1f61b2 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -17,6 +17,7 @@
 Workflow and operation contexts
 """
 
+import aria
 from aria.utils import file
 from .common import BaseContext
 
@@ -73,6 +74,32 @@ class BaseOperationContext(BaseContext):
         file.makedirs(plugin_workdir)
         return plugin_workdir
 
+    @property
+    def serialization_dict(self):
+        context_cls = self.__class__
+        context_dict = {
+            'name': self.name,
+            'deployment_id': self._deployment_id,
+            'task_id': self._task_id,
+            'actor_id': self._actor_id,
+            'workdir': self._workdir,
+            'model_storage': self.model.serialization_dict if self.model else None,
+            'resource_storage': self.resource.serialization_dict if self.resource else None
+        }
+        return {
+            'context_cls': context_cls,
+            'context': context_dict
+        }
+
+    @classmethod
+    def deserialize_from_dict(cls, model_storage=None, resource_storage=None, **kwargs):
+        if model_storage:
+            model_storage = aria.application_model_storage(**model_storage)
+        if resource_storage:
+            resource_storage = aria.application_resource_storage(**resource_storage)
+
+        return cls(model_storage=model_storage, resource_storage=resource_storage, **kwargs)
+
 
 class NodeOperationContext(BaseOperationContext):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/orchestrator/context/serialization.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/serialization.py b/aria/orchestrator/context/serialization.py
deleted file mode 100644
index 760818f..0000000
--- a/aria/orchestrator/context/serialization.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sqlalchemy.orm
-import sqlalchemy.pool
-
-import aria
-
-
-def operation_context_to_dict(context):
-    context_cls = context.__class__
-    context_dict = {
-        'name': context.name,
-        'deployment_id': context._deployment_id,
-        'task_id': context._task_id,
-        'actor_id': context._actor_id,
-        'workdir': context._workdir
-    }
-    if context.model:
-        model = context.model
-        context_dict['model_storage'] = {
-            'api_cls': model.api,
-            'api_kwargs': _serialize_sql_mapi_kwargs(model)
-        }
-    else:
-        context_dict['model_storage'] = None
-    if context.resource:
-        resource = context.resource
-        context_dict['resource_storage'] = {
-            'api_cls': resource.api,
-            'api_kwargs': _serialize_file_rapi_kwargs(resource)
-        }
-    else:
-        context_dict['resource_storage'] = None
-    return {
-        'context_cls': context_cls,
-        'context': context_dict
-    }
-
-
-def operation_context_from_dict(context_dict):
-    context_cls = context_dict['context_cls']
-    context = context_dict['context']
-
-    model_storage = context['model_storage']
-    if model_storage:
-        api_cls = model_storage['api_cls']
-        api_kwargs = _deserialize_sql_mapi_kwargs(model_storage.get('api_kwargs', {}))
-        context['model_storage'] = aria.application_model_storage(api=api_cls,
-                                                                  api_kwargs=api_kwargs)
-
-    resource_storage = context['resource_storage']
-    if resource_storage:
-        api_cls = resource_storage['api_cls']
-        api_kwargs = _deserialize_file_rapi_kwargs(resource_storage.get('api_kwargs', {}))
-        context['resource_storage'] = aria.application_resource_storage(api=api_cls,
-                                                                        api_kwargs=api_kwargs)
-
-    return context_cls(**context)
-
-
-def _serialize_sql_mapi_kwargs(model):
-    engine_url = str(model._api_kwargs['engine'].url)
-    assert ':memory:' not in engine_url
-    return {'engine_url': engine_url}
-
-
-def _deserialize_sql_mapi_kwargs(api_kwargs):
-    engine_url = api_kwargs.get('engine_url')
-    if not engine_url:
-        return {}
-    engine = sqlalchemy.create_engine(engine_url)
-    session_factory = sqlalchemy.orm.sessionmaker(bind=engine)
-    session = sqlalchemy.orm.scoped_session(session_factory=session_factory)
-    return {'session': session, 'engine': engine}
-
-
-def _serialize_file_rapi_kwargs(resource):
-    return {'directory': resource._api_kwargs['directory']}
-
-
-def _deserialize_file_rapi_kwargs(api_kwargs):
-    return api_kwargs

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/orchestrator/runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/runner.py b/aria/orchestrator/runner.py
index 16acc19..5950dc5 100644
--- a/aria/orchestrator/runner.py
+++ b/aria/orchestrator/runner.py
@@ -17,20 +17,20 @@
 Workflow runner
 """
 
-import platform
 import tempfile
 import os
 
-from sqlalchemy import (create_engine, orm) # @UnresolvedImport
-from sqlalchemy.pool import StaticPool # @UnresolvedImport
-
 from .context.workflow import WorkflowContext
 from .workflows.core.engine import Engine
 from .workflows.executor.thread import ThreadExecutor
-from ..storage import model
-from ..storage.sql_mapi import SQLAlchemyModelAPI
-from ..storage.filesystem_rapi import FileSystemResourceAPI
-from .. import (application_model_storage, application_resource_storage)
+from ..storage import (
+    sql_mapi,
+    filesystem_rapi,
+)
+from .. import (
+    application_model_storage,
+    application_resource_storage
+)
 
 
 SQLITE_IN_MEMORY = 'sqlite:///:memory:'
@@ -57,6 +57,8 @@ class Runner(object):
             os.close(the_file)
 
         self._storage_path = storage_path
+        self._storage_dir = os.path.dirname(storage_path)
+        self._storage_name = os.path.basename(storage_path)
         self._is_storage_temporary = is_storage_temporary
 
         workflow_context = self.create_workflow_context(workflow_name, deployment_id,
@@ -76,9 +78,13 @@ class Runner(object):
             self.cleanup()
 
     def create_workflow_context(self, workflow_name, deployment_id, initialize_model_storage_fn):
-        model_storage = self.create_sqlite_model_storage()
+        self.cleanup()
+        model_storage = application_model_storage(
+            sql_mapi.SQLAlchemyModelAPI,
+            initiator_kwargs=dict(base_dir=self._storage_dir, filename=self._storage_name))
         initialize_model_storage_fn(model_storage)
-        resource_storage = self.create_fs_resource_storage()
+        resource_storage = application_resource_storage(
+            filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory='.'))
         return WorkflowContext(
             name=workflow_name,
             model_storage=model_storage,
@@ -88,48 +94,8 @@ class Runner(object):
             task_max_attempts=1,
             task_retry_interval=1)
 
-    def create_sqlite_model_storage(self): # pylint: disable=no-self-use
-        self.cleanup()
-
-        # Engine
-        if self._storage_path is None:
-            # In memory
-            # Causes serious threading problems:
-            # https://gehrcke.de/2015/05/in-memory-sqlite-database-and-flask-a-threading-trap/
-            sqlite_engine = create_engine(
-                SQLITE_IN_MEMORY,
-                connect_args={'check_same_thread': False},
-                poolclass=StaticPool)
-        else:
-            path_prefix = '' if 'Windows' in platform.system() else '/'
-            sqlite_engine = create_engine(
-                'sqlite:///%s%s' % (path_prefix, self._storage_path))
-
-        # Models
-        model.DeclarativeBase.metadata.create_all(bind=sqlite_engine) # @UndefinedVariable
-
-        # Session
-        sqlite_session_factory = orm.sessionmaker(bind=sqlite_engine)
-        if self._storage_path is None:
-            sqlite_session = sqlite_session_factory()
-        else:
-            # File-based storage only
-            sqlite_session = orm.scoped_session(session_factory=sqlite_session_factory)
-
-        # Storage
-        sqlite_kwargs = dict(engine=sqlite_engine, session=sqlite_session)
-        return application_model_storage(
-            SQLAlchemyModelAPI,
-            api_kwargs=sqlite_kwargs)
-
-    def create_fs_resource_storage(self, directory='.'): # pylint: disable=no-self-use
-        fs_kwargs = dict(directory=directory)
-        return application_resource_storage(
-            FileSystemResourceAPI,
-            api_kwargs=fs_kwargs)
 
     def cleanup(self):
-        if self._is_storage_temporary \
-            and (self._storage_path is not None) \
-            and os.path.isfile(self._storage_path):
+        if (self._is_storage_temporary and (self._storage_path is not None) and
+                os.path.isfile(self._storage_path)):
             os.remove(self._storage_path)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 7d990fa..c4b8ba1 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -37,6 +37,7 @@ import struct
 import subprocess
 import tempfile
 import Queue
+import pickle
 
 import jsonpickle
 
@@ -45,7 +46,6 @@ from aria.extension import process_executor
 from aria.utils import imports
 from aria.utils import exceptions
 from aria.orchestrator.workflows.executor import base
-from aria.orchestrator.context import serialization
 from aria.storage import instrumentation
 from aria.storage import type as storage_type
 
@@ -113,7 +113,7 @@ class ProcessExecutor(base.BaseExecutor):
         file_descriptor, arguments_json_path = tempfile.mkstemp(prefix='executor-', suffix='.json')
         os.close(file_descriptor)
         with open(arguments_json_path, 'wb') as f:
-            f.write(jsonpickle.dumps(self._create_arguments_dict(task)))
+            f.write(pickle.dumps(self._create_arguments_dict(task)))
 
         env = os.environ.copy()
         # See _update_env for plugin_prefix usage
@@ -193,7 +193,7 @@ class ProcessExecutor(base.BaseExecutor):
             'operation_mapping': task.operation_mapping,
             'operation_inputs': task.inputs,
             'port': self._server_port,
-            'context': serialization.operation_context_to_dict(task.context),
+            'context': task.context.serialization_dict,
         }
 
     def _update_env(self, env, plugin_prefix):
@@ -306,7 +306,7 @@ def _patch_session(ctx, messenger, instrument):
 def _main():
     arguments_json_path = sys.argv[1]
     with open(arguments_json_path) as f:
-        arguments = jsonpickle.loads(f.read())
+        arguments = pickle.loads(f.read())
 
     # arguments_json_path is a temporary file created by the parent process.
     # so we remove it here
@@ -327,7 +327,7 @@ def _main():
 
     with instrumentation.track_changes() as instrument:
         try:
-            ctx = serialization.operation_context_from_dict(context_dict)
+            ctx = context_dict['context_cls'].deserialize_from_dict(**context_dict['context'])
             _patch_session(ctx=ctx, messenger=messenger, instrument=instrument)
             task_func = imports.load_attribute(operation_mapping)
             aria.install_aria_extensions()
@@ -338,5 +338,6 @@ def _main():
         except BaseException as e:
             messenger.failed(exception=e, tracked_changes=instrument.tracked_changes)
 
+
 if __name__ == '__main__':
     _main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/storage/api.py
----------------------------------------------------------------------
diff --git a/aria/storage/api.py b/aria/storage/api.py
index d6fc3b8..f6da6de 100644
--- a/aria/storage/api.py
+++ b/aria/storage/api.py
@@ -119,11 +119,12 @@ class ResourceAPI(StorageAPI):
     """
     A Base object for the resource.
     """
-    def __init__(self, name):
+    def __init__(self, name, **kwargs):
         """
         Base resource API
         :param str name: the resource type
         """
+        super(ResourceAPI, self).__init__(**kwargs)
         self._name = name
 
     @property

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index 94b4fe0..3878dca 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -39,7 +39,10 @@ API:
 """
 
 from aria.logger import LoggerMixin
-from . import api as storage_api
+from . import (
+    api as storage_api,
+    sql_mapi
+)
 
 __all__ = (
     'Storage',
@@ -52,11 +55,33 @@ class Storage(LoggerMixin):
     """
     Represents the storage
     """
-    def __init__(self, api_cls, api_kwargs=None, items=(), **kwargs):
+    def __init__(self,
+                 api_cls,
+                 api_kwargs=None,
+                 items=(),
+                 initiator=None,
+                 initiator_kwargs=None,
+                 **kwargs):
+        """
+
+        :param api_cls: API cls for each model.
+        :param api_kwargs:
+        :param items: the items to register
+        :param initiator: a func which initializes the storage before the first use.
+        This function should return a dict, this dict would be passed in addition to the api kwargs.
+        This enables the creation of any unpickable objects across process.
+        :param initiator_kwargs:
+        :param kwargs:
+        """
         super(Storage, self).__init__(**kwargs)
         self.api = api_cls
-        self._api_kwargs = api_kwargs or {}
         self.registered = {}
+        self._initiator = initiator
+        self._initiator_kwargs = initiator_kwargs or {}
+        self._api_kwargs = api_kwargs or {}
+        self._additional_api_kwargs = {}
+        if self._initiator:
+            self._additional_api_kwargs = self._initiator(**self._initiator_kwargs)
         for item in items:
             self.register(item)
         self.logger.debug('{name} object is ready: {0!r}'.format(
@@ -71,6 +96,15 @@ class Storage(LoggerMixin):
         except KeyError:
             return super(Storage, self).__getattribute__(item)
 
+    @property
+    def serialization_dict(self):
+        return {
+            'api': self.api,
+            'api_kwargs': self._api_kwargs,
+            'initiator': self._initiator,
+            'initiator_kwargs': self._initiator_kwargs
+        }
+
     def register(self, entry):
         """
         Register the entry to the storage
@@ -90,7 +124,9 @@ class ResourceStorage(Storage):
         :param name:
         :return:
         """
-        self.registered[name] = self.api(name=name, **self._api_kwargs)
+        kwargs = self._api_kwargs.copy()
+        kwargs.update(self._additional_api_kwargs)
+        self.registered[name] = self.api(name=name, **kwargs)
         self.registered[name].create()
         self.logger.debug('setup {name} in storage {self!r}'.format(name=name, self=self))
 
@@ -99,6 +135,11 @@ class ModelStorage(Storage):
     """
     Represents model storage.
     """
+    def __init__(self, *args, **kwargs):
+        if kwargs.get('initiator', None) is None:
+            kwargs['initiator'] = sql_mapi.init_storage
+        super(ModelStorage, self).__init__(*args, **kwargs)
+
     def register(self, model_cls):
         """
         Register the model into the model storage.
@@ -110,9 +151,9 @@ class ModelStorage(Storage):
             self.logger.debug('{name} in already storage {self!r}'.format(name=model_name,
                                                                           self=self))
             return
-        self.registered[model_name] = self.api(name=model_name,
-                                               model_cls=model_cls,
-                                               **self._api_kwargs)
+        kwargs = self._api_kwargs.copy()
+        kwargs.update(self._additional_api_kwargs)
+        self.registered[model_name] = self.api(name=model_name, model_cls=model_cls, **kwargs)
         self.registered[model_name].create()
         self.logger.debug('setup {name} in storage {self!r}'.format(name=model_name, self=self))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
index 809f677..9dae08a 100644
--- a/aria/storage/sql_mapi.py
+++ b/aria/storage/sql_mapi.py
@@ -15,13 +15,19 @@
 """
 SQLAlchemy based MAPI
 """
+import os
+import platform
 
+from sqlalchemy import (
+    create_engine,
+    orm,
+)
 from sqlalchemy.exc import SQLAlchemyError
 
 from aria.utils.collections import OrderedDict
-from aria.storage import (
+from . import (
     api,
-    exceptions
+    exceptions,
 )
 
 
@@ -364,6 +370,31 @@ class SQLAlchemyModelAPI(api.ModelAPI):
             getattr(instance, rel.key)
 
 
+def init_storage(base_dir, filename='db.sqlite'):
+    """
+    A builtin ModelStorage initiator.
+    Creates a sqlalchemy engine and a session to be passed to the mapi.
+
+    Initiator_kwargs must be passed to the ModelStorage which must hold the base_dir for the
+    location of the db file, and an option filename. This would create an sqlite db.
+    :param base_dir: the dir of the db
+    :param filename: the db file name.
+    :return:
+    """
+    uri = 'sqlite:///{platform_char}{path}'.format(
+        # Handles the windows behavior where there is not root, but drivers.
+        # Thus behaving as relative path.
+        platform_char='' if 'Windows' in platform.system() else '/',
+
+        path=os.path.join(base_dir, filename))
+
+    engine = create_engine(uri)
+    session_factory = orm.sessionmaker(bind=engine)
+    session = orm.scoped_session(session_factory=session_factory)
+
+    return dict(engine=engine, session=session)
+
+
 class ListResult(object):
     """
     a ListResult contains results about the requested items.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/setup.py
----------------------------------------------------------------------
diff --git a/setup.py b/setup.py
index 2d1106d..7a1a3f4 100644
--- a/setup.py
+++ b/setup.py
@@ -64,7 +64,7 @@ console_scripts = ['aria = aria.cli.cli:main']
 
 class InstallCommand(install):
     user_options = install.user_options + [
-        ('skip-ctx', None, 'Install with or without the ctx (Defaults to False')
+        ('skip-ctx', None, 'Install with or without the ctx (Defaults to False)')
     ]
     boolean_options = install.boolean_options + ['skip-ctx']
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index ec4bfb8..cb040ae 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -13,28 +13,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import os
+
 import aria
 from aria.orchestrator import context
-from aria.storage.filesystem_rapi import FileSystemResourceAPI
-from aria.storage.sql_mapi import SQLAlchemyModelAPI
+from aria.storage import (
+    sql_mapi,
+    filesystem_rapi,
+)
 
 from . import models
+from ..storage import init_inmemory_model_storage
 from .topology import create_simple_topology_two_nodes
 
 
-def simple(mapi_kwargs, resources_dir=None, **kwargs):
-    model_storage = aria.application_model_storage(SQLAlchemyModelAPI, api_kwargs=mapi_kwargs)
+def simple(tmpdir, inmemory=False, context_kwargs=None):
+    initiator = init_inmemory_model_storage if inmemory else None
+    initiator_kwargs = {} if inmemory else dict(base_dir=tmpdir)
 
-    deployment_id = create_simple_topology_two_nodes(model_storage)
+    model_storage = aria.application_model_storage(
+        sql_mapi.SQLAlchemyModelAPI, initiator=initiator, initiator_kwargs=initiator_kwargs)
+    resource_storage = aria.application_resource_storage(
+        filesystem_rapi.FileSystemResourceAPI,
+        api_kwargs=dict(directory=os.path.join(tmpdir, 'resources'))
+    )
 
-    # pytest tmpdir
-    if resources_dir:
-        resource_storage = aria.application_resource_storage(
-            FileSystemResourceAPI,
-            api_kwargs={'directory': resources_dir}
-        )
-    else:
-        resource_storage = None
+    deployment_id = create_simple_topology_two_nodes(model_storage)
 
     final_kwargs = dict(
         name='simple_context',
@@ -45,5 +49,5 @@ def simple(mapi_kwargs, resources_dir=None, **kwargs):
         task_max_attempts=models.TASK_MAX_ATTEMPTS,
         task_retry_interval=models.TASK_RETRY_INTERVAL
     )
-    final_kwargs.update(kwargs)
+    final_kwargs.update(context_kwargs or {})
     return context.workflow.WorkflowContext(**final_kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index b0918d1..e8c7cca 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -37,8 +37,11 @@ global_test_holder = {}
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)),
-                                  workdir=str(tmpdir.join('workdir')))
+    context = mock.context.simple(
+        str(tmpdir.join('workdir')),
+        inmemory=True,
+        context_kwargs=dict(workdir=str(tmpdir.join('workdir')))
+    )
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/context/test_resource_render.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py
index ca2ef42..ded18c8 100644
--- a/tests/orchestrator/context/test_resource_render.py
+++ b/tests/orchestrator/context/test_resource_render.py
@@ -53,8 +53,7 @@ def test_download_resource_and_render_provided_variables(tmpdir, ctx):
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(),
-                                  resources_dir=str(tmpdir.join('resources')))
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py
index 76930b1..ee123a7 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -16,13 +16,11 @@
 import pytest
 
 import aria
-from aria.storage.sql_mapi import SQLAlchemyModelAPI
+from aria.storage import sql_mapi
 from aria.orchestrator.workflows import api
 from aria.orchestrator.workflows.core import engine
 from aria.orchestrator.workflows.executor import process
 from aria.orchestrator import workflow, operation
-from aria.orchestrator.context import serialization
-
 import tests
 from tests import mock
 from tests import storage
@@ -42,10 +40,6 @@ def test_serialize_operation_context(context, executor, tmpdir):
     eng.execute()
 
 
-def test_illegal_serialize_of_memory_model_storage(memory_model_storage):
-    with pytest.raises(AssertionError):
-        serialization._serialize_sql_mapi_kwargs(memory_model_storage)
-
 
 @workflow
 def _mock_workflow(ctx, graph):
@@ -93,16 +87,17 @@ def executor():
 
 @pytest.fixture
 def context(tmpdir):
-    result = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)),
-                                 resources_dir=str(tmpdir.join('resources')),
-                                 workdir=str(tmpdir.join('workdir')))
+    result = mock.context.simple(
+        str(tmpdir),
+        context_kwargs=dict(workdir=str(tmpdir.join('workdir')))
+    )
+
     yield result
     storage.release_sqlite_storage(result.model)
 
 
 @pytest.fixture
 def memory_model_storage():
-    result = aria.application_model_storage(
-        SQLAlchemyModelAPI, api_kwargs=storage.get_sqlite_api_kwargs())
+    result = aria.application_model_storage(sql_mapi.SQLAlchemyModelAPI)
     yield result
     storage.release_sqlite_storage(result)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index b63811b..beb5730 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -33,7 +33,7 @@ global_test_holder = {}
 
 @pytest.fixture
 def workflow_context(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    context = mock.context.simple(str(tmpdir), inmemory=True)
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index 496c1ff..bb54037 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -19,7 +19,7 @@ import pytest
 
 from aria import application_model_storage
 from aria.orchestrator import context
-from aria.storage.sql_mapi import SQLAlchemyModelAPI
+from aria.storage import sql_mapi
 from tests import storage as test_storage
 from tests.mock import models
 
@@ -60,8 +60,8 @@ class TestWorkflowContext(object):
 
 @pytest.fixture(scope='function')
 def storage():
-    api_kwargs = test_storage.get_sqlite_api_kwargs()
-    workflow_storage = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
+    workflow_storage = application_model_storage(
+        sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage)
     workflow_storage.blueprint.put(models.get_blueprint())
     blueprint = workflow_storage.blueprint.get_by_name(models.BLUEPRINT_NAME)
     workflow_storage.deployment.put(models.get_deployment(blueprint))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py b/tests/orchestrator/execution_plugin/test_local.py
index 497da48..86f2aa7 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -503,9 +503,7 @@ if __name__ == '__main__':
 
     @pytest.fixture
     def workflow_context(self, tmpdir):
-        workflow_context = mock.context.simple(
-            storage.get_sqlite_api_kwargs(str(tmpdir)),
-            resources_dir=str(tmpdir.join('resources')))
+        workflow_context = mock.context.simple(str(tmpdir), inmemory=False)
         workflow_context.states = []
         workflow_context.exception = None
         yield workflow_context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py b/tests/orchestrator/execution_plugin/test_ssh.py
index 6b5c783..65195c8 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -265,9 +265,7 @@ class TestWithActualSSHServer(object):
 
     @pytest.fixture
     def workflow_context(self, tmpdir):
-        workflow_context = mock.context.simple(
-            storage.get_sqlite_api_kwargs(str(tmpdir)),
-            resources_dir=str(tmpdir.join('resources')))
+        workflow_context = mock.context.simple(str(tmpdir))
         workflow_context.states = []
         workflow_context.exception = None
         yield workflow_context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 601c437..bb629ef 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -24,13 +24,13 @@ from tests import mock, storage
 
 
 @pytest.fixture
-def ctx():
+def ctx(tmpdir):
     """
     Create the following graph in storage:
     dependency_node <------ dependent_node
     :return:
     """
-    simple_context = mock.context.simple(storage.get_sqlite_api_kwargs())
+    simple_context = mock.context.simple(str(tmpdir), inmemory=False)
     simple_context.model.execution.put(mock.models.get_execution(simple_context.deployment))
     yield simple_context
     storage.release_sqlite_storage(simple_context.model)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index b7e5678..87e3425 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -24,7 +24,7 @@ from tests import storage
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/builtin/test_heal.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_heal.py b/tests/orchestrator/workflows/builtin/test_heal.py
index b470790..3e4498f 100644
--- a/tests/orchestrator/workflows/builtin/test_heal.py
+++ b/tests/orchestrator/workflows/builtin/test_heal.py
@@ -26,7 +26,7 @@ from . import (assert_node_install_operations,
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/builtin/test_install.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_install.py b/tests/orchestrator/workflows/builtin/test_install.py
index 789a161..1791719 100644
--- a/tests/orchestrator/workflows/builtin/test_install.py
+++ b/tests/orchestrator/workflows/builtin/test_install.py
@@ -25,7 +25,7 @@ from . import assert_node_install_operations
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/builtin/test_uninstall.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_uninstall.py b/tests/orchestrator/workflows/builtin/test_uninstall.py
index 126c4cf..791291f 100644
--- a/tests/orchestrator/workflows/builtin/test_uninstall.py
+++ b/tests/orchestrator/workflows/builtin/test_uninstall.py
@@ -26,7 +26,7 @@ from . import assert_node_uninstall_operations
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index d9b50a9..05a3d90 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -124,7 +124,7 @@ class BaseTest(object):
 
     @pytest.fixture
     def workflow_context(self, tmpdir):
-        workflow_context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+        workflow_context = mock.context.simple(str(tmpdir))
         workflow_context.states = []
         workflow_context.exception = None
         yield workflow_context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 061a3f2..b39a81f 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -31,7 +31,7 @@ from tests import mock, storage
 
 @pytest.fixture
 def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index cd37bde..57be075 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -22,9 +22,9 @@ from tests import mock
 from tests import storage
 
 
-def test_task_graph_into_execution_graph():
+def test_task_graph_into_execution_graph(tmpdir):
     operation_name = 'tosca.interfaces.node.lifecycle.Standard.create'
-    task_context = mock.context.simple(storage.get_sqlite_api_kwargs())
+    task_context = mock.context.simple(str(tmpdir))
     node_instance = \
         task_context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
     def sub_workflow(name, **_):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 2486a1e..d983fe9 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -86,7 +86,14 @@ class MockContext(object):
         pass
 
     def __getattr__(self, item):
-        return None
+        if item == 'serialization_dict':
+            return {'context_cls': self.__class__, 'context': {}}
+        else:
+            return None
+
+    @classmethod
+    def deserialize_from_dict(cls, **kwargs):
+        return cls()
 
 
 class MockTask(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 687e245..ff5dce6 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -22,11 +22,15 @@ from contextlib import contextmanager
 import pytest
 
 from aria import application_model_storage
-from aria.storage import model as aria_model
+from aria.storage import (
+    model as aria_model,
+    sql_mapi
+)
+from aria.orchestrator import (
+    events,
+    plugin
+)
 from aria.utils.plugin import create as create_plugin
-from aria.storage.sql_mapi import SQLAlchemyModelAPI
-from aria.orchestrator import events
-from aria.orchestrator import plugin
 from aria.orchestrator.workflows.executor import process
 
 
@@ -74,8 +78,9 @@ class TestProcessExecutor(object):
 
 @pytest.fixture
 def model(tmpdir):
-    api_kwargs = tests.storage.get_sqlite_api_kwargs(str(tmpdir))
-    result = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
+    result = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                       initiator_kwargs=dict(base_dir=str(tmpdir)),
+                                       initiator=sql_mapi.init_storage)
     yield result
     tests.storage.release_sqlite_storage(result)
 
@@ -112,7 +117,14 @@ class MockContext(object):
         pass
 
     def __getattr__(self, item):
-        return None
+        if item == 'serialization_dict':
+            return {'context_cls': self.__class__, 'context': {}}
+        else:
+            return None
+
+    @classmethod
+    def deserialize_from_dict(cls, **kwargs):
+        return cls()
 
 
 class MockTask(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 4a8ef57..18957f1 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -75,6 +75,6 @@ def executor():
 
 @pytest.fixture
 def context(tmpdir):
-    result = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    result = mock.context.simple(str(tmpdir))
     yield result
     storage.release_sqlite_storage(result.model)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index bd1fa96..e383859 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -148,6 +148,6 @@ def executor():
 
 @pytest.fixture
 def context(tmpdir):
-    result = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+    result = mock.context.simple(str(tmpdir))
     yield result
     storage.release_sqlite_storage(result.model)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 3b3715e..b798e01 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -42,7 +42,6 @@ class MockModel(model.DeclarativeBase, structure.ModelMixin): #pylint: disable=a
     name = Column(Text)
 
 
-
 class TestFileSystem(object):
 
     def setup_method(self):
@@ -52,35 +51,6 @@ class TestFileSystem(object):
         rmtree(self.path, ignore_errors=True)
 
 
-def get_sqlite_api_kwargs(base_dir=None, filename='db.sqlite'):
-    """
-    Create sql params. works in in-memory and in filesystem mode.
-    If base_dir is passed, the mode will be filesystem mode. while the default mode is in-memory.
-    :param str base_dir: The base dir for the filesystem memory file.
-    :param str filename: the file name - defaults to 'db.sqlite'.
-    :return:
-    """
-    if base_dir is not None:
-        uri = 'sqlite:///{platform_char}{path}'.format(
-            # Handles the windows behavior where there is not root, but drivers.
-            # Thus behaving as relative path.
-            platform_char='' if 'Windows' in platform.system() else '/',
-
-            path=os.path.join(base_dir, filename))
-        engine_kwargs = {}
-    else:
-        uri = 'sqlite:///:memory:'
-        engine_kwargs = dict(connect_args={'check_same_thread': False},
-                             poolclass=pool.StaticPool)
-
-    engine = create_engine(uri, **engine_kwargs)
-    session_factory = orm.sessionmaker(bind=engine)
-    session = orm.scoped_session(session_factory=session_factory) if base_dir else session_factory()
-
-    model.DeclarativeBase.metadata.create_all(bind=engine)
-    return dict(engine=engine, session=session)
-
-
 def release_sqlite_storage(storage):
     """
     Drops the tables and clears the session
@@ -95,3 +65,14 @@ def release_sqlite_storage(storage):
             session.close()
         for engine in set(mapi._engine for mapi in mapis):
             model.DeclarativeBase.metadata.drop_all(engine)
+
+
+def init_inmemory_model_storage():
+    uri = 'sqlite:///:memory:'
+    engine_kwargs = dict(connect_args={'check_same_thread': False}, poolclass=pool.StaticPool)
+
+    engine = create_engine(uri, **engine_kwargs)
+    session_factory = orm.sessionmaker(bind=engine)
+    session = session_factory()
+
+    return dict(engine=engine, session=session)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/storage/test_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_instrumentation.py b/tests/storage/test_instrumentation.py
index 9b4da4f..771342c 100644
--- a/tests/storage/test_instrumentation.py
+++ b/tests/storage/test_instrumentation.py
@@ -25,7 +25,7 @@ from aria.storage import (
     instrumentation,
     exceptions
 )
-from ..storage import get_sqlite_api_kwargs, release_sqlite_storage
+from ..storage import release_sqlite_storage, init_inmemory_model_storage
 
 
 STUB = instrumentation._STUB
@@ -328,10 +328,9 @@ def restore_instrumentation():
 
 @pytest.fixture
 def storage():
-    result = ModelStorage(
-        api_cls=sql_mapi.SQLAlchemyModelAPI,
-        api_kwargs=get_sqlite_api_kwargs(),
-        items=(MockModel1, MockModel2, StrictMockModel))
+    result = ModelStorage(api_cls=sql_mapi.SQLAlchemyModelAPI,
+                          items=(MockModel1, MockModel2, StrictMockModel),
+                          initiator=init_inmemory_model_storage)
     yield result
     release_sqlite_storage(result)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index d1596e3..34cc5df 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -22,14 +22,15 @@ from aria.storage import (
     sql_mapi,
 )
 from aria import application_model_storage
-from ..storage import get_sqlite_api_kwargs, release_sqlite_storage
+from ..storage import release_sqlite_storage, init_inmemory_model_storage
 
 from . import MockModel
 
 
 @pytest.fixture
 def storage():
-    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_kwargs=get_sqlite_api_kwargs())
+    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI,
+                                initiator=init_inmemory_model_storage)
     base_storage.register(MockModel)
     yield base_storage
     release_sqlite_storage(base_storage)
@@ -61,7 +62,7 @@ def test_model_storage(storage):
 
 def test_application_storage_factory():
     storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                        api_kwargs=get_sqlite_api_kwargs())
+                                        initiator=init_inmemory_model_storage)
     assert storage.node
     assert storage.node_instance
     assert storage.plugin

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
index 2088676..6450152 100644
--- a/tests/storage/test_models.py
+++ b/tests/storage/test_models.py
@@ -39,7 +39,7 @@ from aria.storage.model import (
 
 
 from tests import mock
-from tests.storage import get_sqlite_api_kwargs, release_sqlite_storage
+from ..storage import release_sqlite_storage, init_inmemory_model_storage
 
 
 @contextmanager
@@ -55,7 +55,7 @@ def sql_storage(storage_func):
 
 def _empty_storage():
     return application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                     api_kwargs=get_sqlite_api_kwargs())
+                                     initiator=init_inmemory_model_storage)
 
 
 def _blueprint_storage():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/storage/test_structures.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_structures.py b/tests/storage/test_structures.py
index 0223a98..4127905 100644
--- a/tests/storage/test_structures.py
+++ b/tests/storage/test_structures.py
@@ -25,7 +25,7 @@ from aria.storage import (
     exceptions
 )
 
-from ..storage import get_sqlite_api_kwargs, release_sqlite_storage, structure
+from ..storage import release_sqlite_storage, structure, init_inmemory_model_storage
 from . import MockModel
 from ..mock import (
     models,
@@ -36,7 +36,7 @@ from ..mock import (
 
 @pytest.fixture
 def storage():
-    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_kwargs=get_sqlite_api_kwargs())
+    base_storage = ModelStorage(sql_mapi.SQLAlchemyModelAPI, initiator=init_inmemory_model_storage)
     base_storage.register(MockModel)
     yield base_storage
     release_sqlite_storage(base_storage)
@@ -48,8 +48,10 @@ def module_cleanup():
 
 
 @pytest.fixture
-def context():
-    return mock_context.simple(get_sqlite_api_kwargs())
+def context(tmpdir):
+    ctx = mock_context.simple(str(tmpdir))
+    yield ctx
+    release_sqlite_storage(ctx.model)
 
 
 def test_inner_dict_update(storage):
@@ -174,7 +176,6 @@ def test_relationship_model_ordering(context):
         target_node_instance=target_node_instance,
     )
 
-
     context.model.node.put(new_node)
     context.model.node_instance.put(new_node_instance)
     context.model.relationship.put(source_to_new_relationship)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1498ad39/tests/utils/test_plugin.py
----------------------------------------------------------------------
diff --git a/tests/utils/test_plugin.py b/tests/utils/test_plugin.py
index 6f2dd92..09885ef 100644
--- a/tests/utils/test_plugin.py
+++ b/tests/utils/test_plugin.py
@@ -21,7 +21,7 @@ from aria import application_model_storage
 from aria.orchestrator import exceptions
 from aria.orchestrator import plugin
 from aria.utils.plugin import create as create_plugin
-from aria.storage.sql_mapi import SQLAlchemyModelAPI
+from aria.storage import sql_mapi
 
 from .. import storage
 
@@ -49,8 +49,8 @@ class TestPluginManager(object):
 
 @pytest.fixture
 def model():
-    api_kwargs = storage.get_sqlite_api_kwargs()
-    model = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
+    model = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
+                                      initiator=storage.init_inmemory_model_storage)
     yield model
     storage.release_sqlite_storage(model)