You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by av...@apache.org on 2017/05/09 13:47:58 UTC

[1/4] incubator-ariatosca git commit: ARIA-230 Dry execution doesn't log empty operations [Forced Update!]

Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-210-handle-relative-paths-in-cli-service-templates 0a3ea7991 -> 66727c395 (forced update)


ARIA-230 Dry execution doesn't log empty operations


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/b11fbc94
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/b11fbc94
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/b11fbc94

Branch: refs/heads/ARIA-210-handle-relative-paths-in-cli-service-templates
Commit: b11fbc946ae6d26ad6c70a283cc6d6501c320273
Parents: 0ec2370
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 17:29:29 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 01:02:46 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/executor/dry.py | 26 ++++++++++++------------
 1 file changed, 13 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b11fbc94/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index f6fb7a6..63ec392 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -32,20 +32,20 @@ class DryExecutor(BaseExecutor):
             task.started_at = datetime.utcnow()
             task.status = task.STARTED
 
+        dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
+        logger = task.context.logger.info if task.implementation else task.context.logger.debug
+
+        if hasattr(task.actor, 'source_node'):
+            name = '{source_node.name}->{target_node.name}'.format(
+                source_node=task.actor.source_node, target_node=task.actor.target_node)
+        else:
+            name = task.actor.name
+
         if task.implementation:
-            if hasattr(task.actor, 'source_node'):
-                name = '{source_node.name}->{target_node.name}'.format(
-                    source_node=task.actor.source_node, target_node=task.actor.target_node)
-            else:
-                name = task.actor.name
-
-            task.context.logger.info(
-                '<dry> {name} {task.interface_name}.{task.operation_name} started...'
-                .format(name=name, task=task))
-
-            task.context.logger.info(
-                '<dry> {name} {task.interface_name}.{task.operation_name} successful'
-                .format(name=name, task=task))
+            logger(dry_msg.format(name=name, task=task, suffix='started...'))
+            logger(dry_msg.format(name=name, task=task, suffix='successful'))
+        else:
+            logger(dry_msg.format(name=name, task=task, suffix='has no implementation'))
 
         # updating the task manually instead of calling self._task_succeeded(task),
         # to avoid any side effects raising that event might cause


[3/4] incubator-ariatosca git commit: ARIA-160 Tests fail spordically over parameter binding

Posted by av...@apache.org.
ARIA-160 Tests fail spordically over parameter binding


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/16fcca45
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/16fcca45
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/16fcca45

Branch: refs/heads/ARIA-210-handle-relative-paths-in-cli-service-templates
Commit: 16fcca45f60f81261757c9fee8f61782c479e30e
Parents: 1febf80
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 14:46:58 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 14:07:24 2017 +0300

----------------------------------------------------------------------
 aria/orchestrator/workflows/core/engine.py   |   3 +-
 tests/helpers.py                             |  23 +++--
 tests/orchestrator/context/test_operation.py | 107 ++++++++++++----------
 tests/orchestrator/context/test_toolbelt.py  |  40 ++++----
 tests/storage/__init__.py                    |   3 +-
 5 files changed, 92 insertions(+), 84 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index fd0dd6d..561265c 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -42,10 +42,9 @@ class Engine(logger.LoggerMixin):
         super(Engine, self).__init__(**kwargs)
         self._workflow_context = workflow_context
         self._execution_graph = networkx.DiGraph()
-        self._executor = executor
         translation.build_execution_graph(task_graph=tasks_graph,
                                           execution_graph=self._execution_graph,
-                                          default_executor=self._executor)
+                                          default_executor=executor)
 
     def execute(self):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/helpers.py
----------------------------------------------------------------------
diff --git a/tests/helpers.py b/tests/helpers.py
index 423e63f..3c3efc9 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -14,7 +14,6 @@
 # limitations under the License.
 
 import os
-import tempfile
 import json
 
 from . import ROOT_DIR
@@ -34,16 +33,19 @@ def get_service_template_uri(*args):
 
 
 class FilesystemDataHolder(object):
-    _tmpfile = tempfile.NamedTemporaryFile('w')
+
+    def __init__(self, path, reset=False):
+        self._path = path
+        if reset or not os.path.exists(self._path) or open(self._path).read() == '':
+            self._dump({})
 
     def _load(self):
-        return json.load(open(self._tmpfile.name))
+        with open(self._path) as f:
+            return json.load(f)
 
     def _dump(self, value):
-        return json.dump(value, open(self._tmpfile.name, 'w'))
-
-    def __init__(self):
-        self.clear()
+        with open(self._path, 'w') as f:
+            return json.dump(value, f)
 
     def __setitem__(self, key, value):
         dict_ = self._load()
@@ -56,9 +58,6 @@ class FilesystemDataHolder(object):
     def __iter__(self):
         return iter(self._load())
 
-    def clear(self):
-        self._dump({})
-
     def get(self, item, default=None):
         return self._load().get(item, default)
 
@@ -67,3 +66,7 @@ class FilesystemDataHolder(object):
         return_value = dict_.setdefault(key, value)
         self._dump(dict_)
         return return_value
+
+    @property
+    def path(self):
+        return self._path

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index 3180d89..cdeb5fa 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -38,7 +38,6 @@ from . import (
     execute,
 )
 
-global_test_holder = helpers.FilesystemDataHolder()
 
 @pytest.fixture
 def ctx(tmpdir):
@@ -68,11 +67,18 @@ def thread_executor():
         ex.close()
 
 
-def test_node_operation_task_execution(ctx, thread_executor):
+@pytest.fixture
+def dataholder(tmpdir):
+    dataholder_path = str(tmpdir.join('dataholder'))
+    holder = helpers.FilesystemDataHolder(dataholder_path)
+    return holder
+
+
+def test_node_operation_task_execution(ctx, thread_executor, dataholder):
     interface_name = 'Standard'
     operation_name = 'create'
 
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     interface = mock.models.create_interface(
         node.service,
@@ -97,11 +103,11 @@ def test_node_operation_task_execution(ctx, thread_executor):
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    assert global_test_holder['ctx_name'] == context.operation.NodeOperationContext.__name__
+    assert dataholder['ctx_name'] == context.operation.NodeOperationContext.__name__
 
     # Task bases assertions
-    assert global_test_holder['actor_name'] == node.name
-    assert global_test_holder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
+    assert dataholder['actor_name'] == node.name
+    assert dataholder['task_name'] == api.task.OperationTask.NAME_FORMAT.format(
         type='node',
         name=node.name,
         interface=interface_name,
@@ -109,19 +115,19 @@ def test_node_operation_task_execution(ctx, thread_executor):
     )
     operations = interface.operations
     assert len(operations) == 1
-    assert global_test_holder['implementation'] == operations.values()[0].implementation             # pylint: disable=no-member
-    assert global_test_holder['inputs']['putput'] is True
+    assert dataholder['implementation'] == operations.values()[0].implementation             # pylint: disable=no-member
+    assert dataholder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
-    assert global_test_holder['template_name'] == node.node_template.name
-    assert global_test_holder['node_name'] == node.name
+    assert dataholder['template_name'] == node.node_template.name
+    assert dataholder['node_name'] == node.name
 
 
-def test_relationship_operation_task_execution(ctx, thread_executor):
+def test_relationship_operation_task_execution(ctx, thread_executor, dataholder):
     interface_name = 'Configure'
     operation_name = 'post_configure'
 
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     relationship = ctx.model.relationship.list()[0]
     interface = mock.models.create_interface(
         relationship.source_node.service,
@@ -148,14 +154,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    assert global_test_holder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
+    assert dataholder['ctx_name'] == context.operation.RelationshipOperationContext.__name__
 
     # Task bases assertions
-    assert global_test_holder['actor_name'] == relationship.name
-    assert interface_name in global_test_holder['task_name']
+    assert dataholder['actor_name'] == relationship.name
+    assert interface_name in dataholder['task_name']
     operations = interface.operations
-    assert global_test_holder['implementation'] == operations.values()[0].implementation           # pylint: disable=no-member
-    assert global_test_holder['inputs']['putput'] is True
+    assert dataholder['implementation'] == operations.values()[0].implementation           # pylint: disable=no-member
+    assert dataholder['inputs']['putput'] is True
 
     # Context based attributes (sugaring)
     dependency_node_template = ctx.model.node_template.get_by_name(
@@ -165,14 +171,14 @@ def test_relationship_operation_task_execution(ctx, thread_executor):
         mock.models.DEPENDENT_NODE_TEMPLATE_NAME)
     dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
 
-    assert global_test_holder['target_node_template_name'] == dependency_node_template.name
-    assert global_test_holder['target_node_name'] == dependency_node.name
-    assert global_test_holder['relationship_name'] == relationship.name
-    assert global_test_holder['source_node_template_name'] == dependent_node_template.name
-    assert global_test_holder['source_node_name'] == dependent_node.name
+    assert dataholder['target_node_template_name'] == dependency_node_template.name
+    assert dataholder['target_node_name'] == dependency_node.name
+    assert dataholder['relationship_name'] == relationship.name
+    assert dataholder['source_node_template_name'] == dependent_node_template.name
+    assert dataholder['source_node_name'] == dependent_node.name
 
 
-def test_invalid_task_operation_id(ctx, thread_executor):
+def test_invalid_task_operation_id(ctx, thread_executor, dataholder):
     """
     Checks that the right id is used. The task created with id == 1, thus running the task on
     node with id == 2. will check that indeed the node uses the correct id.
@@ -191,7 +197,8 @@ def test_invalid_task_operation_id(ctx, thread_executor):
         node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__))
+        operation_kwargs=dict(implementation=op_path(get_node_id, module_path=__name__),
+                              inputs={'holder_path': dataholder.path})
     )
     node.interfaces[interface.name] = interface
     ctx.model.node.update(node)
@@ -202,12 +209,13 @@ def test_invalid_task_operation_id(ctx, thread_executor):
             api.task.OperationTask(
                 node,
                 interface_name=interface_name,
-                operation_name=operation_name)
+                operation_name=operation_name,
+            )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, executor=thread_executor)
 
-    op_node_id = global_test_holder[api.task.OperationTask.NAME_FORMAT.format(
+    op_node_id = dataholder[api.task.OperationTask.NAME_FORMAT.format(
         type='node',
         name=node.name,
         interface=interface_name,
@@ -376,42 +384,41 @@ def logged_operation(ctx, **_):
 
 
 @operation
-def basic_node_operation(ctx, **_):
-    operation_common(ctx)
-    global_test_holder['template_name'] = ctx.node_template.name
-    global_test_holder['node_name'] = ctx.node.name
+def basic_node_operation(ctx, holder_path, **_):
+    holder = helpers.FilesystemDataHolder(holder_path)
+
+    operation_common(ctx, holder)
+    holder['template_name'] = ctx.node_template.name
+    holder['node_name'] = ctx.node.name
 
 
 @operation
-def basic_relationship_operation(ctx, **_):
-    operation_common(ctx)
-    global_test_holder['target_node_template_name'] = ctx.target_node_template.name
-    global_test_holder['target_node_name'] = ctx.target_node.name
-    global_test_holder['relationship_name'] = ctx.relationship.name
-    global_test_holder['source_node_template_name'] = ctx.source_node_template.name
-    global_test_holder['source_node_name'] = ctx.source_node.name
+def basic_relationship_operation(ctx, holder_path, **_):
+    holder = helpers.FilesystemDataHolder(holder_path)
+
+    operation_common(ctx, holder)
+    holder['target_node_template_name'] = ctx.target_node_template.name
+    holder['target_node_name'] = ctx.target_node.name
+    holder['relationship_name'] = ctx.relationship.name
+    holder['source_node_template_name'] = ctx.source_node_template.name
+    holder['source_node_name'] = ctx.source_node.name
 
 
-def operation_common(ctx):
-    global_test_holder['ctx_name'] = ctx.__class__.__name__
+def operation_common(ctx, holder):
+    holder['ctx_name'] = ctx.__class__.__name__
 
-    global_test_holder['actor_name'] = ctx.task.actor.name
-    global_test_holder['task_name'] = ctx.task.name
-    global_test_holder['implementation'] = ctx.task.implementation
-    global_test_holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
+    holder['actor_name'] = ctx.task.actor.name
+    holder['task_name'] = ctx.task.name
+    holder['implementation'] = ctx.task.implementation
+    holder['inputs'] = dict(i.unwrap() for i in ctx.task.inputs.values())
 
 
 @operation
-def get_node_id(ctx, **_):
-    global_test_holder[ctx.name] = ctx.node.id
+def get_node_id(ctx, holder_path, **_):
+    helpers.FilesystemDataHolder(holder_path)[ctx.name] = ctx.node.id
 
 
 @operation
 def _test_plugin_workdir(ctx, filename, content):
     with open(os.path.join(ctx.plugin_workdir, filename), 'w') as f:
         f.write(content)
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
-    global_test_holder.clear()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 822ac5a..d199954 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -30,12 +30,10 @@ from . import (
     execute,
 )
 
-global_test_holder = helpers.FilesystemDataHolder()
-
 
 @pytest.fixture
 def workflow_context(tmpdir):
-    context = mock.context.simple(str(tmpdir), inmemory=True)
+    context = mock.context.simple(str(tmpdir))
     yield context
     storage.release_sqlite_storage(context.model)
 
@@ -49,6 +47,13 @@ def executor():
         result.close()
 
 
+@pytest.fixture
+def dataholder(tmpdir):
+    dataholder_path = str(tmpdir.join('dataholder'))
+    holder = helpers.FilesystemDataHolder(dataholder_path)
+    return holder
+
+
 def _get_elements(workflow_context):
     dependency_node_template = workflow_context.model.node_template.get_by_name(
         mock.models.DEPENDENCY_NODE_TEMPLATE_NAME)
@@ -75,17 +80,17 @@ def _get_elements(workflow_context):
         relationship
 
 
-def test_host_ip(workflow_context, executor):
+def test_host_ip(workflow_context, executor, dataholder):
+
     interface_name = 'Standard'
     operation_name = 'create'
     _, dependency_node, _, _, _ = _get_elements(workflow_context)
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     interface = mock.models.create_interface(
         dependency_node.service,
         interface_name=interface_name,
         operation_name=operation_name,
-        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__),
-                              inputs=inputs)
+        operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), inputs=inputs)
     )
     dependency_node.interfaces[interface.name] = interface
     dependency_node.runtime_properties['ip'] = '1.1.1.1'
@@ -105,14 +110,14 @@ def test_host_ip(workflow_context, executor):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert global_test_holder.get('host_ip') == dependency_node.runtime_properties.get('ip')
+    assert dataholder.get('host_ip') == dependency_node.runtime_properties.get('ip')
 
 
-def test_relationship_tool_belt(workflow_context, executor):
+def test_relationship_tool_belt(workflow_context, executor, dataholder):
     interface_name = 'Configure'
     operation_name = 'post_configure'
     _, _, _, _, relationship = _get_elements(workflow_context)
-    inputs = {'putput': True}
+    inputs = {'putput': True, 'holder_path': dataholder.path}
     interface = mock.models.create_interface(
         relationship.source_node.service,
         interface_name=interface_name,
@@ -136,7 +141,7 @@ def test_relationship_tool_belt(workflow_context, executor):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, executor=executor)
 
-    assert global_test_holder.get(api.task.OperationTask.NAME_FORMAT.format(
+    assert dataholder.get(api.task.OperationTask.NAME_FORMAT.format(
         type='relationship',
         name=relationship.name,
         interface=interface_name,
@@ -149,15 +154,10 @@ def test_wrong_model_toolbelt():
 
 
 @operation(toolbelt=True)
-def host_ip(toolbelt, **_):
-    global_test_holder['host_ip'] = toolbelt.host_ip
+def host_ip(toolbelt, holder_path, **_):
+    helpers.FilesystemDataHolder(holder_path)['host_ip'] = toolbelt.host_ip
 
 
 @operation(toolbelt=True)
-def relationship_operation(ctx, toolbelt, **_):
-    global_test_holder[ctx.name] = toolbelt._op_context.source_node.name
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
-    global_test_holder.clear()
+def relationship_operation(ctx, toolbelt, holder_path, **_):
+    helpers.FilesystemDataHolder(holder_path)[ctx.name] = toolbelt._op_context.source_node.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/16fcca45/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 66424db..8ca1480 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -51,6 +51,5 @@ def init_inmemory_model_storage():
 
     engine = create_engine(uri, **engine_kwargs)
     session_factory = orm.sessionmaker(bind=engine)
-    session = orm.scoped_session(session_factory=session_factory)
 
-    return dict(engine=engine, session=session)
+    return dict(engine=engine, session=session_factory())


[4/4] incubator-ariatosca git commit: ARIA-210 Handle relative paths in CLI service-templates

Posted by av...@apache.org.
ARIA-210 Handle relative paths in CLI service-templates

This was a rather simple change, mainly involving adding absolute path
references.

The problems were only in `service-templates store` and in
`service-templates create-archive`.
`service-templates validate` was not affected.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/66727c39
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/66727c39
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/66727c39

Branch: refs/heads/ARIA-210-handle-relative-paths-in-cli-service-templates
Commit: 66727c395ad6407304e8f67679d5dbd95828f08e
Parents: 16fcca4
Author: Avia Efrat <av...@gigaspaces.com>
Authored: Mon May 8 17:45:23 2017 +0300
Committer: Avia Efrat <av...@gigaspaces.com>
Committed: Tue May 9 16:47:50 2017 +0300

----------------------------------------------------------------------
 aria/cli/commands/service_templates.py |  4 +++-
 aria/cli/csar.py                       | 18 ++++++++++--------
 aria/cli/service_template_utils.py     |  2 +-
 tests/cli/test_service_templates.py    | 24 ++++++++++++++++++++++++
 4 files changed, 38 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/66727c39/aria/cli/commands/service_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/service_templates.py b/aria/cli/commands/service_templates.py
index 2537012..e459871 100644
--- a/aria/cli/commands/service_templates.py
+++ b/aria/cli/commands/service_templates.py
@@ -195,7 +195,9 @@ def create_archive(service_template_path, destination, logger):
     `destination` is the path of the output CSAR archive file
     """
     logger.info('Creating a CSAR archive')
-    csar.write(os.path.dirname(service_template_path), service_template_path, destination, logger)
+    if not destination.endswith(csar.CSAR_FILE_EXTENSION):
+        destination += csar.CSAR_FILE_EXTENSION
+    csar.write(service_template_path, destination, logger)
     logger.info('CSAR archive created at {0}'.format(destination))
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/66727c39/aria/cli/csar.py
----------------------------------------------------------------------
diff --git a/aria/cli/csar.py b/aria/cli/csar.py
index 5bc35ac..8f44557 100644
--- a/aria/cli/csar.py
+++ b/aria/cli/csar.py
@@ -22,7 +22,7 @@ import zipfile
 import requests
 from ruamel import yaml
 
-
+CSAR_FILE_EXTENSION = '.csar'
 META_FILE = 'TOSCA-Metadata/TOSCA.meta'
 META_FILE_VERSION_KEY = 'TOSCA-Meta-File-Version'
 META_FILE_VERSION_VALUE = '1.0'
@@ -38,17 +38,19 @@ BASE_METADATA = {
 }
 
 
-def write(source, entry, destination, logger):
-    source = os.path.expanduser(source)
-    destination = os.path.expanduser(destination)
-    entry_definitions = os.path.join(source, entry)
+def write(service_template_path, destination, logger):
+
+    service_template_path = os.path.abspath(os.path.expanduser(service_template_path))
+    source = os.path.dirname(service_template_path)
+    entry = os.path.basename(service_template_path)
+
     meta_file = os.path.join(source, META_FILE)
     if not os.path.isdir(source):
         raise ValueError('{0} is not a directory. Please specify the service template '
                          'directory.'.format(source))
-    if not os.path.isfile(entry_definitions):
+    if not os.path.isfile(service_template_path):
         raise ValueError('{0} does not exists. Please specify a valid entry point.'
-                         .format(entry_definitions))
+                         .format(service_template_path))
     if os.path.exists(destination):
         raise ValueError('{0} already exists. Please provide a path to where the CSAR should be '
                          'created.'.format(destination))
@@ -175,4 +177,4 @@ def read(source, destination=None, logger=None):
 
 
 def is_csar_archive(source):
-    return source.endswith('.csar')
+    return source.endswith(CSAR_FILE_EXTENSION)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/66727c39/aria/cli/service_template_utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/service_template_utils.py b/aria/cli/service_template_utils.py
index 382cce1..c953c02 100644
--- a/aria/cli/service_template_utils.py
+++ b/aria/cli/service_template_utils.py
@@ -53,7 +53,7 @@ def get(source, service_template_filename):
             return _get_service_template_file_from_archive(source, service_template_filename)
         else:
             # Maybe check if yaml.
-            return source
+            return os.path.abspath(source)
     elif len(source.split('/')) == 2:
         url = _map_to_github_url(source)
         downloaded_file = utils.download_file(url)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/66727c39/tests/cli/test_service_templates.py
----------------------------------------------------------------------
diff --git a/tests/cli/test_service_templates.py b/tests/cli/test_service_templates.py
index dd9eedd..633c973 100644
--- a/tests/cli/test_service_templates.py
+++ b/tests/cli/test_service_templates.py
@@ -12,6 +12,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import os
+import zipfile
 
 import pytest
 import mock
@@ -131,6 +133,18 @@ class TestServiceTemplatesStore(TestCliBase):
         assert 'Service template {name} stored'.format(
             name=mock_models.SERVICE_TEMPLATE_NAME) in self.logger_output_string
 
+    def test_store_relative_path_single_yaml_file(self, monkeypatch, mock_object):
+        monkeypatch.setattr(Core, 'create_service_template', mock_object)
+        monkeypatch.setattr(os.path, 'isfile', lambda x: True)
+        monkeypatch.setattr(service_template_utils, '_is_archive', lambda x: False)
+
+        self.invoke('service_templates store service_template.yaml {name}'.format(
+            name=mock_models.SERVICE_TEMPLATE_NAME))
+
+        mock_object.assert_called_with(os.path.join(os.getcwd(), 'service_template.yaml'),
+                                       mock.ANY,
+                                       mock.ANY)
+
     def test_store_raises_exception_resulting_from_name_uniqueness(self, monkeypatch, mock_object):
 
         monkeypatch.setattr(service_template_utils, 'get', mock_object)
@@ -244,3 +258,13 @@ class TestServiceTemplatesCreateArchive(TestCliBase):
         monkeypatch.setattr(csar, 'write', mock_object)
         self.invoke('service_templates create_archive stubpath stubdest')
         assert 'CSAR archive created at stubdest' in self.logger_output_string
+
+    def test_create_archive_from_relative_path(self, monkeypatch, mock_object):
+
+        monkeypatch.setattr(os.path, 'isfile', lambda x: True)
+        monkeypatch.setattr(os.path, 'isfile', mock_object)
+        monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock)
+
+        self.invoke('service_templates create_archive archive stubdest')
+
+        mock_object.assert_called_with(os.path.join(os.getcwd(), 'archive'))


[2/4] incubator-ariatosca git commit: ARIA-165 Make node name suffix UUIDs become more readable

Posted by av...@apache.org.
ARIA-165 Make node name suffix UUIDs become more readable


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/1febf80d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/1febf80d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/1febf80d

Branch: refs/heads/ARIA-210-handle-relative-paths-in-cli-service-templates
Commit: 1febf80dee57b837d2ed937bcdaa080bdc3bd822
Parents: b11fbc9
Author: max-orlov <ma...@gigaspaces.com>
Authored: Mon May 8 15:25:37 2017 +0300
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue May 9 11:24:07 2017 +0300

----------------------------------------------------------------------
 aria/modeling/service_template.py | 8 ++++++--
 aria/parser/modeling/context.py   | 5 -----
 2 files changed, 6 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1febf80d/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index 7fab4fc..f721b64 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -549,9 +549,13 @@ class NodeTemplateBase(TemplateModelMixin):
             ('requirement_templates', formatting.as_raw_list(self.requirement_templates))))
 
     def instantiate(self, container):
-        context = ConsumptionContext.get_thread_local()
         from . import models
-        name = context.modeling.generate_node_id(self.name)
+        if self.nodes:
+            highest_name_suffix = max(int(n.name.rsplit('_', 1)[-1]) for n in self.nodes)
+            suffix = highest_name_suffix + 1
+        else:
+            suffix = 1
+        name = '{name}_{index}'.format(name=self.name, index=suffix)
         node = models.Node(name=name,
                            type=self.type,
                            description=deepcopy_with_locators(self.description),

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/1febf80d/aria/parser/modeling/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/modeling/context.py b/aria/parser/modeling/context.py
index dff5991..4a53641 100644
--- a/aria/parser/modeling/context.py
+++ b/aria/parser/modeling/context.py
@@ -67,11 +67,6 @@ class ModelingContext(object):
         if self.instance is not None:
             model_storage.service.put(self.instance)
 
-    def generate_node_id(self, template_name):
-        return self.node_id_format.format(
-            template=template_name,
-            id=self.generate_id())
-
     def generate_id(self):
         if self.id_type == IdType.LOCAL_SERIAL:
             return self._serial_id_counter.next()