You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@ariatosca.apache.org by ra...@apache.org on 2017/04/02 12:53:45 UTC
[3/3] incubator-ariatosca git commit: ARIA-48 cli
ARIA-48 cli
Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/de2d57dd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/de2d57dd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/de2d57dd
Branch: refs/heads/ARIA-48-aria-cli
Commit: de2d57dd09f7acce5a1845530b8629ada7ff77d2
Parents: 07cbfcd
Author: Ran Ziv <ra...@gigaspaces.com>
Authored: Tue Mar 28 12:17:46 2017 +0300
Committer: Ran Ziv <ra...@gigaspaces.com>
Committed: Sun Apr 2 15:53:12 2017 +0300
----------------------------------------------------------------------
aria/__init__.py | 2 +-
aria/cli/VERSION | 3 +
aria/cli/args_parser.py | 269 ---------
aria/cli/cli.py | 113 ----
aria/cli/cli/__init__.py | 14 +
aria/cli/cli/aria.py | 453 +++++++++++++++
aria/cli/cli/helptexts.py | 56 ++
aria/cli/colorful_event.py | 152 ++++++
aria/cli/commands.py | 546 -------------------
aria/cli/commands/__init__.py | 14 +
aria/cli/commands/executions.py | 172 ++++++
aria/cli/commands/logs.py | 68 +++
aria/cli/commands/node_templates.py | 104 ++++
aria/cli/commands/nodes.py | 96 ++++
aria/cli/commands/plugins.py | 145 +++++
aria/cli/commands/service_templates.py | 207 +++++++
aria/cli/commands/services.py | 175 ++++++
aria/cli/commands/workflows.py | 107 ++++
aria/cli/config.py | 46 --
aria/cli/config/__init__.py | 14 +
aria/cli/config/config.py | 70 +++
aria/cli/config/config_template.yaml | 12 +
aria/cli/constants.py | 18 +
aria/cli/csar.py | 11 +-
aria/cli/dry.py | 88 ---
aria/cli/env.py | 118 ++++
aria/cli/exceptions.py | 54 +-
aria/cli/inputs.py | 118 ++++
aria/cli/logger.py | 113 ++++
aria/cli/main.py | 73 +++
aria/cli/service_template_utils.py | 140 +++++
aria/cli/storage.py | 95 ----
aria/cli/table.py | 90 +++
aria/cli/utils.py | 152 ++++++
aria/core.py | 116 ++++
aria/exceptions.py | 16 +
aria/modeling/__init__.py | 2 +
aria/modeling/exceptions.py | 18 +
aria/modeling/orchestration.py | 14 +-
aria/modeling/service_changes.py | 10 +-
aria/modeling/service_instance.py | 16 +-
aria/modeling/service_template.py | 16 +-
aria/modeling/utils.py | 87 ++-
aria/orchestrator/context/common.py | 27 +-
aria/orchestrator/context/workflow.py | 19 +-
.../execution_plugin/ctx_proxy/server.py | 3 +-
aria/orchestrator/runner.py | 101 ----
aria/orchestrator/workflow_runner.py | 147 +++++
aria/orchestrator/workflows/api/task.py | 41 +-
aria/orchestrator/workflows/builtin/__init__.py | 1 +
.../workflows/builtin/execute_operation.py | 15 +-
aria/orchestrator/workflows/builtin/utils.py | 86 ++-
aria/storage/core.py | 5 +
aria/utils/archive.py | 63 +++
aria/utils/exceptions.py | 11 +
aria/utils/file.py | 13 +
aria/utils/formatting.py | 43 ++
aria/utils/threading.py | 24 +
aria/utils/type.py | 57 ++
.../simple_v1_0/modeling/__init__.py | 3 +-
requirements.in | 5 +
setup.py | 2 +-
tests/mock/context.py | 7 +-
tests/mock/models.py | 13 +-
tests/modeling/test_models.py | 10 +-
tests/orchestrator/context/test_operation.py | 45 +-
.../context/test_resource_render.py | 4 +-
tests/orchestrator/context/test_serialize.py | 4 +-
tests/orchestrator/context/test_toolbelt.py | 11 +-
tests/orchestrator/context/test_workflow.py | 10 +-
.../orchestrator/execution_plugin/test_local.py | 10 +-
tests/orchestrator/execution_plugin/test_ssh.py | 2 +-
.../workflows/builtin/test_execute_operation.py | 3 +-
.../orchestrator/workflows/core/test_engine.py | 10 +-
.../test_task_graph_into_exececution_graph.py | 111 ----
.../test_task_graph_into_execution_graph.py | 111 ++++
tests/utils/test_threading.py | 35 ++
77 files changed, 3634 insertions(+), 1621 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index b9251d5..df75b1e 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -84,6 +84,6 @@ def application_resource_storage(api, api_kwargs=None, initiator=None, initiator
return storage.ResourceStorage(api_cls=api,
api_kwargs=api_kwargs,
- items=['blueprint', 'deployment', 'plugin'],
+ items=['service_template', 'service', 'plugin'],
initiator=initiator,
initiator_kwargs=initiator_kwargs)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/VERSION
----------------------------------------------------------------------
diff --git a/aria/cli/VERSION b/aria/cli/VERSION
new file mode 100644
index 0000000..6a3252e
--- /dev/null
+++ b/aria/cli/VERSION
@@ -0,0 +1,3 @@
+{
+ "version": "0.1.0"
+}
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/args_parser.py
----------------------------------------------------------------------
diff --git a/aria/cli/args_parser.py b/aria/cli/args_parser.py
deleted file mode 100644
index 81ee513..0000000
--- a/aria/cli/args_parser.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Argument parsing configuration and functions
-"""
-
-import argparse
-from functools import partial
-
-from ..utils.argparse import ArgumentParser
-
-NO_VERBOSE = 0
-
-
-class SmartFormatter(argparse.HelpFormatter):
- """
- TODO: what is this?
- """
- def _split_lines(self, text, width):
- if text.startswith('R|'):
- return text[2:].splitlines()
- return super(SmartFormatter, self)._split_lines(text, width)
-
-
-def sub_parser_decorator(func=None, **parser_settings):
- """
- Decorated for sub_parser argument definitions
- """
- if not func:
- return partial(sub_parser_decorator, **parser_settings)
-
- def _wrapper(parser):
- sub_parser = parser.add_parser(**parser_settings)
- sub_parser.add_argument(
- '-v', '--verbose',
- dest='verbosity',
- action='count',
- default=NO_VERBOSE,
- help='Set verbosity level (can be passed multiple times)')
- func(sub_parser)
- return sub_parser
- return _wrapper
-
-
-def config_parser(parser=None):
- """
- Top level argparse configuration
- """
- parser = parser or ArgumentParser(
- prog='ARIA',
- description="ARIA's Command Line Interface",
- formatter_class=SmartFormatter)
- parser.add_argument('-v', '--version', action='version')
- sub_parser = parser.add_subparsers(title='Commands', dest='command')
- add_init_parser(sub_parser)
- add_execute_parser(sub_parser)
- add_parse_parser(sub_parser)
- add_workflow_parser(sub_parser)
- add_spec_parser(sub_parser)
- add_csar_create_parser(sub_parser)
- add_csar_open_parser(sub_parser)
- add_csar_validate_parser(sub_parser)
- return parser
-
-
-@sub_parser_decorator(
- name='parse',
- help='Parse a blueprint',
- formatter_class=SmartFormatter)
-def add_parse_parser(parse):
- """
- ``parse`` command parser configuration
- """
- parse.add_argument(
- 'uri',
- help='URI or file path to service template')
- parse.add_argument(
- 'consumer',
- nargs='?',
- default='validate',
- help='"validate" (default), "presentation", "template", "types", "instance", or consumer '
- 'class name (full class path or short name)')
- parse.add_argument(
- '--loader-source',
- default='aria.parser.loading.DefaultLoaderSource',
- help='loader source class for the parser')
- parse.add_argument(
- '--reader-source',
- default='aria.parser.reading.DefaultReaderSource',
- help='reader source class for the parser')
- parse.add_argument(
- '--presenter-source',
- default='aria.parser.presentation.DefaultPresenterSource',
- help='presenter source class for the parser')
- parse.add_argument(
- '--presenter',
- help='force use of this presenter class in parser')
- parse.add_argument(
- '--prefix', nargs='*',
- help='prefixes for imports')
- parse.add_flag_argument(
- 'debug',
- help_true='print debug info',
- help_false='don\'t print debug info')
- parse.add_flag_argument(
- 'cached-methods',
- help_true='enable cached methods',
- help_false='disable cached methods',
- default=True)
-
-
-@sub_parser_decorator(
- name='workflow',
- help='Run a workflow on a blueprint',
- formatter_class=SmartFormatter)
-def add_workflow_parser(workflow):
- """
- ``workflow`` command parser configuration
- """
- workflow.add_argument(
- 'uri',
- help='URI or file path to service template')
- workflow.add_argument(
- '-w', '--workflow',
- default='install',
- help='The workflow name')
- workflow.add_flag_argument(
- 'dry',
- default=True,
- help_true='dry run',
- help_false='wet run')
-
-
-@sub_parser_decorator(
- name='init',
- help='Initialize environment',
- formatter_class=SmartFormatter)
-def add_init_parser(init):
- """
- ``init`` command parser configuration
- """
- init.add_argument(
- '-d', '--deployment-id',
- required=True,
- help='A unique ID for the deployment')
- init.add_argument(
- '-p', '--blueprint-path',
- dest='blueprint_path',
- required=True,
- help='The path to the desired blueprint')
- init.add_argument(
- '-i', '--inputs',
- dest='input',
- action='append',
- help='R|Inputs for the local workflow creation \n'
- '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files, \n'
- 'a JSON string or as "key1=value1;key2=value2"). \n'
- 'This argument can be used multiple times')
- init.add_argument(
- '-b', '--blueprint-id',
- dest='blueprint_id',
- required=True,
- help='The blueprint ID'
- )
-
-
-@sub_parser_decorator(
- name='execute',
- help='Execute a workflow',
- formatter_class=SmartFormatter)
-def add_execute_parser(execute):
- """
- ``execute`` command parser configuration
- """
- execute.add_argument(
- '-d', '--deployment-id',
- required=True,
- help='A unique ID for the deployment')
- execute.add_argument(
- '-w', '--workflow',
- dest='workflow_id',
- help='The workflow to execute')
- execute.add_argument(
- '-p', '--parameters',
- dest='parameters',
- action='append',
- help='R|Parameters for the workflow execution\n'
- '(Can be provided as wildcard based paths (*.yaml, etc..) to YAML files,\n'
- 'a JSON string or as "key1=value1;key2=value2").\n'
- 'This argument can be used multiple times.')
- execute.add_argument(
- '--task-retries',
- dest='task_retries',
- type=int,
- help='How many times should a task be retried in case of failure')
- execute.add_argument(
- '--task-retry-interval',
- dest='task_retry_interval',
- default=1,
- type=int,
- help='How many seconds to wait before each task is retried')
-
-
-@sub_parser_decorator(
- name='csar-create',
- help='Create a CSAR file from a TOSCA service template directory',
- formatter_class=SmartFormatter)
-def add_csar_create_parser(parse):
- parse.add_argument(
- 'source',
- help='Service template directory')
- parse.add_argument(
- 'entry',
- help='Entry definition file relative to service template directory')
- parse.add_argument(
- '-d', '--destination',
- help='Output CSAR zip destination',
- required=True)
-
-
-@sub_parser_decorator(
- name='csar-open',
- help='Extracts a CSAR file to a TOSCA service template directory',
- formatter_class=SmartFormatter)
-def add_csar_open_parser(parse):
- parse.add_argument(
- 'source',
- help='CSAR file location')
- parse.add_argument(
- '-d', '--destination',
- help='Output directory to extract the CSAR into',
- required=True)
-
-
-@sub_parser_decorator(
- name='csar-validate',
- help='Validates a CSAR file',
- formatter_class=SmartFormatter)
-def add_csar_validate_parser(parse):
- parse.add_argument(
- 'source',
- help='CSAR file location')
-
-
-@sub_parser_decorator(
- name='spec',
- help='Specification tool',
- formatter_class=SmartFormatter)
-def add_spec_parser(spec):
- """
- ``spec`` command parser configuration
- """
- spec.add_argument(
- '--csv',
- action='store_true',
- help='output as CSV')
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/cli.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli.py b/aria/cli/cli.py
deleted file mode 100644
index 8d014b3..0000000
--- a/aria/cli/cli.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI Entry point
-"""
-
-import os
-import logging
-import tempfile
-
-from .. import install_aria_extensions
-from ..logger import (
- create_logger,
- create_console_log_handler,
- create_file_log_handler,
- LoggerMixin,
-)
-from ..utils.exceptions import print_exception
-from .args_parser import config_parser
-from .commands import (
- ParseCommand,
- WorkflowCommand,
- InitCommand,
- ExecuteCommand,
- CSARCreateCommand,
- CSAROpenCommand,
- CSARValidateCommand,
- SpecCommand,
-)
-
-__version__ = '0.1.0'
-
-
-class AriaCli(LoggerMixin):
- """
- Context manager based class that enables proper top level error handling
- """
-
- def __init__(self, *args, **kwargs):
- super(AriaCli, self).__init__(*args, **kwargs)
- self.commands = {
- 'parse': ParseCommand.with_logger(base_logger=self.logger),
- 'workflow': WorkflowCommand.with_logger(base_logger=self.logger),
- 'init': InitCommand.with_logger(base_logger=self.logger),
- 'execute': ExecuteCommand.with_logger(base_logger=self.logger),
- 'csar-create': CSARCreateCommand.with_logger(base_logger=self.logger),
- 'csar-open': CSAROpenCommand.with_logger(base_logger=self.logger),
- 'csar-validate': CSARValidateCommand.with_logger(base_logger=self.logger),
- 'spec': SpecCommand.with_logger(base_logger=self.logger),
- }
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- """
- Here we will handle errors
- :param exc_type:
- :param exc_val:
- :param exc_tb:
- :return:
- """
- # todo: error handling
- # todo: cleanup if needed
- # TODO: user message if needed
- pass
-
- def run(self):
- """
- Parses user arguments and run the appropriate command
- """
- parser = config_parser()
- args, unknown_args = parser.parse_known_args()
-
- command_handler = self.commands[args.command]
- self.logger.debug('Running command: {args.command} handler: {0}'.format(
- command_handler, args=args))
- try:
- command_handler(args, unknown_args)
- except Exception as e:
- print_exception(e)
-
-
-def main():
- """
- CLI entry point
- """
- install_aria_extensions()
- create_logger(
- handlers=[
- create_console_log_handler(),
- create_file_log_handler(file_path=os.path.join(tempfile.gettempdir(), 'aria_cli.log')),
- ],
- level=logging.INFO)
- with AriaCli() as aria:
- aria.run()
-
-
-if __name__ == '__main__':
- main()
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/cli/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/__init__.py b/aria/cli/cli/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/cli/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/cli/aria.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/aria.py b/aria/cli/cli/aria.py
new file mode 100644
index 0000000..baa72eb
--- /dev/null
+++ b/aria/cli/cli/aria.py
@@ -0,0 +1,453 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import difflib
+import StringIO
+import traceback
+from functools import wraps
+
+import click
+
+from ..env import env, logger
+from ..cli import helptexts
+from ..inputs import inputs_to_dict
+from ..constants import SAMPLE_SERVICE_TEMPLATE_FILENAME
+from ...utils.exceptions import get_exception_as_string
+
+
+CLICK_CONTEXT_SETTINGS = dict(
+ help_option_names=['-h', '--help'],
+ token_normalize_func=lambda param: param.lower())
+
+
+class MutuallyExclusiveOption(click.Option):
+ """Makes options mutually exclusive. The option must pass a `cls` argument
+ with this class name and a `mutually_exclusive` argument with a list of
+ argument names it is mutually exclusive with.
+
+ NOTE: All mutually exclusive options must use this. It's not enough to
+ use it in just one of the options.
+ """
+
+ def __init__(self, *args, **kwargs):
+ self.mutually_exclusive = set(kwargs.pop('mutually_exclusive', []))
+ self.mutuality_error_message = \
+ kwargs.pop('mutuality_error_message',
+ helptexts.DEFAULT_MUTUALITY_MESSAGE)
+ self.mutuality_string = ', '.join(self.mutually_exclusive)
+ if self.mutually_exclusive:
+ help = kwargs.get('help', '')
+ kwargs['help'] = (
+ '{0}. This argument is mutually exclusive with '
+ 'arguments: [{1}] ({2})'.format(
+ help,
+ self.mutuality_string,
+ self.mutuality_error_message))
+ super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
+
+ def handle_parse_result(self, ctx, opts, args):
+ if self.mutually_exclusive.intersection(opts) and self.name in opts:
+ raise click.UsageError(
+ 'Illegal usage: `{0}` is mutually exclusive with '
+ 'arguments: [{1}] ({2}).'.format(
+ self.name,
+ self.mutuality_string,
+ self.mutuality_error_message))
+ return super(MutuallyExclusiveOption, self).handle_parse_result(
+ ctx, opts, args)
+
+
+def _format_version_data(version_data,
+ prefix=None,
+ suffix=None,
+ infix=None):
+ all_data = version_data.copy()
+ all_data['prefix'] = prefix or ''
+ all_data['suffix'] = suffix or ''
+ all_data['infix'] = infix or ''
+ output = StringIO.StringIO()
+ output.write('{prefix}{version}'.format(**all_data))
+ output.write('{suffix}'.format(**all_data))
+ return output.getvalue()
+
+
+def show_version(ctx, param, value):
+ if not value:
+ return
+
+ cli_version_data = env.get_version_data()
+ cli_version = _format_version_data(
+ cli_version_data,
+ prefix='ARIA CLI ',
+ infix=' ' * 5,
+ suffix='')
+
+ logger.info(cli_version)
+ ctx.exit()
+
+
+def inputs_callback(ctx, param, value):
+ """Allow to pass any inputs we provide to a command as
+ processed inputs instead of having to call `inputs_to_dict`
+ inside the command.
+
+ `@aria.options.inputs` already calls this callback so that
+ every time you use the option it returns the inputs as a
+ dictionary.
+ """
+ if not value:
+ return {}
+
+ return inputs_to_dict(value)
+
+
+def set_verbosity_level(ctx, param, value):
+ if not value:
+ return
+
+ env.logging.verbosity_level = value
+
+
+def set_cli_except_hook():
+
+ def recommend(possible_solutions):
+ logger.info('Possible solutions:')
+ for solution in possible_solutions:
+ logger.info(' - {0}'.format(solution))
+
+ def new_excepthook(tpe, value, tb):
+ if env.logging.is_high_verbose_level():
+ # log error including traceback
+ logger.error(get_exception_as_string(tpe, value, tb))
+ else:
+ # write the full error to the log file
+ with open(env.logging.log_file, 'a') as log_file:
+ traceback.print_exception(
+ etype=tpe,
+ value=value,
+ tb=tb,
+ file=log_file)
+ # print only the error message
+ print value
+
+ if hasattr(value, 'possible_solutions'):
+ recommend(getattr(value, 'possible_solutions'))
+
+ sys.excepthook = new_excepthook
+
+
+def pass_logger(func):
+ """Simply passes the logger to a command.
+ """
+ # Wraps here makes sure the original docstring propagates to click
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return func(logger=logger, *args, **kwargs)
+
+ return wrapper
+
+
+def pass_plugin_manager(func):
+ """Simply passes the plugin manager to a command.
+ """
+ # Wraps here makes sure the original docstring propagates to click
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return func(plugin_manager=env.plugin_manager, *args, **kwargs)
+
+ return wrapper
+
+
+def pass_model_storage(func):
+ """Simply passes the model storage to a command.
+ """
+ # Wraps here makes sure the original docstring propagates to click
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return func(model_storage=env.model_storage, *args, **kwargs)
+
+ return wrapper
+
+
+def pass_resource_storage(func):
+ """Simply passes the resource storage to a command.
+ """
+ # Wraps here makes sure the original docstring propagates to click
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return func(resource_storage=env.resource_storage, *args, **kwargs)
+
+ return wrapper
+
+
+def pass_context(func):
+ """Make click context ARIA specific
+
+ This exists purely for aesthetic reasons, otherwise
+ Some decorators are called `@click.something` instead of
+ `@aria.something`
+ """
+ return click.pass_context(func)
+
+
+class AliasedGroup(click.Group):
+ def __init__(self, *args, **kwargs):
+ self.max_suggestions = kwargs.pop("max_suggestions", 3)
+ self.cutoff = kwargs.pop("cutoff", 0.5)
+ super(AliasedGroup, self).__init__(*args, **kwargs)
+
+ def get_command(self, ctx, cmd_name):
+ rv = click.Group.get_command(self, ctx, cmd_name)
+ if rv is not None:
+ return rv
+ matches = \
+ [x for x in self.list_commands(ctx) if x.startswith(cmd_name)]
+ if not matches:
+ return None
+ elif len(matches) == 1:
+ return click.Group.get_command(self, ctx, matches[0])
+ ctx.fail('Too many matches: {0}'.format(', '.join(sorted(matches))))
+
+ def resolve_command(self, ctx, args):
+ """Override clicks ``resolve_command`` method
+ and appends *Did you mean ...* suggestions
+ to the raised exception message.
+ """
+ try:
+ return super(AliasedGroup, self).resolve_command(ctx, args)
+ except click.exceptions.UsageError as error:
+ error_msg = str(error)
+ original_cmd_name = click.utils.make_str(args[0])
+ matches = difflib.get_close_matches(
+ original_cmd_name,
+ self.list_commands(ctx),
+ self.max_suggestions,
+ self.cutoff)
+ if matches:
+ error_msg += '\n\nDid you mean one of these?\n {0}'.format(
+ '\n '.join(matches))
+ raise click.exceptions.UsageError(error_msg, error.ctx)
+
+
+def group(name):
+ """Allow to create a group with a default click context
+ and a cls for click's `didyoueamn` without having to repeat
+ it for every group.
+ """
+ return click.group(
+ name=name,
+ context_settings=CLICK_CONTEXT_SETTINGS,
+ cls=AliasedGroup)
+
+
+def command(*args, **kwargs):
+ """Make Click commands ARIA specific
+
+ This exists purely for aesthetical reasons, otherwise
+ Some decorators are called `@click.something` instead of
+ `@aria.something`
+ """
+ return click.command(*args, **kwargs)
+
+
+def argument(*args, **kwargs):
+ """Make Click arguments ARIA specific
+
+ This exists purely for aesthetic reasons, otherwise
+ Some decorators are called `@click.something` instead of
+ `@aria.something`
+ """
+ return click.argument(*args, **kwargs)
+
+
+class Options(object):
+ def __init__(self):
+ """The options api is nicer when you use each option by calling
+ `@aria.options.some_option` instead of `@aria.some_option`.
+
+ Note that some options are attributes and some are static methods.
+ The reason for that is that we want to be explicit regarding how
+ a developer sees an option. It it can receive arguments, it's a
+ method - if not, it's an attribute.
+ """
+ self.version = click.option(
+ '--version',
+ is_flag=True,
+ callback=show_version,
+ expose_value=False,
+ is_eager=True,
+ help=helptexts.VERSION)
+
+ self.inputs = click.option(
+ '-i',
+ '--inputs',
+ multiple=True,
+ callback=inputs_callback,
+ help=helptexts.INPUTS)
+
+ self.json_output = click.option(
+ '--json-output',
+ is_flag=True,
+ help=helptexts.JSON_OUTPUT)
+
+ self.init_hard_reset = click.option(
+ '--hard',
+ is_flag=True,
+ help=helptexts.HARD_RESET)
+
+ self.reset_context = click.option(
+ '-r',
+ '--reset-context',
+ is_flag=True,
+ help=helptexts.RESET_CONTEXT)
+
+ self.enable_colors = click.option(
+ '--enable-colors',
+ is_flag=True,
+ default=False,
+ help=helptexts.ENABLE_COLORS)
+
+ self.node_name = click.option(
+ '-n',
+ '--node-name',
+ required=False,
+ help=helptexts.NODE_NAME)
+
+ self.descending = click.option(
+ '--descending',
+ required=False,
+ is_flag=True,
+ default=False,
+ help=helptexts.DESCENDING)
+
+ @staticmethod
+ def verbose(expose_value=False):
+ return click.option(
+ '-v',
+ '--verbose',
+ count=True,
+ callback=set_verbosity_level,
+ expose_value=expose_value,
+ is_eager=True,
+ help=helptexts.VERBOSE)
+
+ @staticmethod
+ def force(help):
+ return click.option(
+ '-f',
+ '--force',
+ is_flag=True,
+ help=help)
+
+ @staticmethod
+ def service_template_filename():
+ return click.option(
+ '-n',
+ '--service-template-filename',
+ default=SAMPLE_SERVICE_TEMPLATE_FILENAME,
+ help=helptexts.SERVICE_TEMPLATE_FILENAME)
+
+ @staticmethod
+ def workflow_id(default):
+ return click.option(
+ '-w',
+ '--workflow-id',
+ default=default,
+ help=helptexts.WORKFLOW_TO_EXECUTE.format(default))
+
+ @staticmethod
+ def task_thread_pool_size(default=1):
+ return click.option(
+ '--task-thread-pool-size',
+ type=int,
+ default=default,
+ help=helptexts.TASK_THREAD_POOL_SIZE.format(default))
+
+ @staticmethod
+ def task_max_attempts(default=1):
+ return click.option(
+ '--task-max-attempts',
+ type=int,
+ default=default,
+ help=helptexts.TASK_MAX_ATTEMPTS.format(default))
+
+ @staticmethod
+ def sort_by(default='created_at'):
+ return click.option(
+ '--sort-by',
+ required=False,
+ default=default,
+ help=helptexts.SORT_BY)
+
+ @staticmethod
+ def task_retry_interval(default=1):
+ return click.option(
+ '--task-retry-interval',
+ type=int,
+ default=default,
+ help=helptexts.TASK_RETRY_INTERVAL.format(default))
+
+ @staticmethod
+ def service_id(required=False):
+ return click.option(
+ '-s',
+ '--service-id',
+ required=required,
+ help=helptexts.SERVICE_ID)
+
+ @staticmethod
+ def execution_id(required=False):
+ return click.option(
+ '-e',
+ '--execution-id',
+ required=required,
+ help=helptexts.EXECUTION_ID)
+
+ @staticmethod
+ def service_template_id(required=False):
+ return click.option(
+ '-t',
+ '--service-template-id',
+ required=required,
+ help=helptexts.SERVICE_TEMPLATE_ID)
+
+ @staticmethod
+ def service_template_path(required=False):
+ return click.option(
+ '-p',
+ '--service-template-path',
+ required=required,
+ type=click.Path(exists=True))
+
+ @staticmethod
+ def service_name(required=False):
+ return click.option(
+ '-s',
+ '--service-name',
+ required=required,
+ help=helptexts.SERVICE_ID)
+
+ @staticmethod
+ def service_template_name(required=False):
+ return click.option(
+ '-t',
+ '--service-template-name',
+ required=required,
+ help=helptexts.SERVICE_ID)
+
+
+options = Options()
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/cli/helptexts.py
----------------------------------------------------------------------
diff --git a/aria/cli/cli/helptexts.py b/aria/cli/cli/helptexts.py
new file mode 100644
index 0000000..02519cb
--- /dev/null
+++ b/aria/cli/cli/helptexts.py
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+VERBOSE = \
+ "Show verbose output. You can supply this up to three times (i.e. -vvv)"
+VERSION = "Display the version and exit"
+
+INPUTS_PARAMS_USAGE = (
+ '(Can be provided as wildcard based paths '
+ '(*.yaml, /my_inputs/, etc..) to YAML files, a JSON string or as '
+ 'key1=value1;key2=value2). This argument can be used multiple times'
+)
+WORKFLOW_TO_EXECUTE = "The workflow to execute [default: {0}]"
+
+SERVICE_TEMPLATE_PATH = "The path to the application's service template file"
+SERVICE_TEMPLATE_ID = "The unique identifier for the service template"
+
+RESET_CONTEXT = "Reset the working environment"
+HARD_RESET = "Hard reset the configuration, including coloring and loggers"
+ENABLE_COLORS = "Enable colors in logger (use --hard when working with" \
+ " an initialized environment) [default: False]"
+
+SERVICE_TEMPLATE_FILENAME = (
+ "The name of the archive's main service template file. "
+ "This is only relevant if uploading an archive")
+INPUTS = "Inputs for the service {0}".format(INPUTS_PARAMS_USAGE)
+PARAMETERS = "Parameters for the workflow {0}".format(INPUTS_PARAMS_USAGE)
+TASK_RETRY_INTERVAL = \
+ "How long of a minimal interval should occur between task retry attempts [default: {0}]"
+TASK_MAX_ATTEMPTS = \
+ "How many times should a task be attempted in case of failures [default: {0}]"
+
+JSON_OUTPUT = "Output events in a consumable JSON format"
+
+SERVICE_ID = "The unique identifier for the service"
+EXECUTION_ID = "The unique identifier for the execution"
+IGNORE_RUNNING_NODES = "Delete the service even if it has running nodes"
+
+NODE_NAME = "The node's name"
+
+DEFAULT_MUTUALITY_MESSAGE = 'Cannot be used simultaneously'
+
+SORT_BY = "Key for sorting the list"
+DESCENDING = "Sort list in descending order [default: False]"
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/colorful_event.py
----------------------------------------------------------------------
diff --git a/aria/cli/colorful_event.py b/aria/cli/colorful_event.py
new file mode 100644
index 0000000..53e3d02
--- /dev/null
+++ b/aria/cli/colorful_event.py
@@ -0,0 +1,152 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from contextlib import contextmanager
+
+from colorama import Fore, Style
+
+from cloudify.event import Event
+
+
+def colorful_property(prop):
+ """
+ A decorator for coloring values of the parent event type properties
+ :param prop: the property to color (should be a method returning a color)
+ :return: a property which colors the value of the parent event type's
+ property with the same name
+ """
+ def _decorator(self):
+ # getting value of property from parent event type
+ val = getattr(super(ColorfulEvent, self), prop.__name__)
+ # getting the desired color
+ color = prop(self)
+ # coloring the value
+ return self._color_message(val, color)
+ return property(_decorator)
+
+
+class ColorfulEvent(Event):
+
+ RESET_COLOR = Fore.RESET + Style.RESET_ALL
+
+ TIMESTAMP_COLOR = Fore.RESET
+ LOG_TYPE_COLOR = Fore.YELLOW
+ EVENT_TYPE_COLOR = Fore.MAGENTA
+ DEPLOYMENT_ID_COLOR = Fore.CYAN
+ OPERATION_INFO_COLOR = Fore.RESET
+ NODE_ID_COLOR = Fore.BLUE
+ SOURCE_ID_COLOR = Fore.BLUE
+ TARGET_ID_COLOR = Fore.BLUE
+ OPERATION_COLOR = Fore.YELLOW
+
+ # colors entire message part according to event type
+ _message_color_by_event_type = {
+ 'workflow_started': Style.BRIGHT + Fore.GREEN,
+ 'workflow_succeeded': Style.BRIGHT + Fore.GREEN,
+ 'workflow_failed': Style.BRIGHT + Fore.RED,
+ 'workflow_cancelled': Style.BRIGHT + Fore.YELLOW,
+
+ 'sending_task': Fore.RESET,
+ 'task_started': Fore.RESET,
+ 'task_succeeded': Fore.GREEN,
+ 'task_rescheduled': Fore.YELLOW,
+ 'task_failed': Fore.RED
+ }
+
+ # colors only the log level part
+ _log_level_to_color = {
+ 'INFO': Fore.CYAN,
+ 'WARN': Fore.YELLOW,
+ 'WARNING': Fore.YELLOW,
+ 'ERROR': Fore.RED,
+ 'FATAL': Fore.RED
+ }
+
+ _color_context = RESET_COLOR
+
+ @property
+ def operation_info(self):
+ color = self.OPERATION_INFO_COLOR
+
+ with self._nest_colors(color):
+ op_info = super(ColorfulEvent, self).operation_info
+
+ return self._color_message(op_info, color)
+
+ @property
+ def text(self):
+ event_type = super(ColorfulEvent, self).event_type # might be None
+ color = self._message_color_by_event_type.get(event_type)
+
+ with self._nest_colors(color):
+ msg = super(ColorfulEvent, self).text
+
+ return self._color_message(msg, color)
+
+ @property
+ def log_level(self):
+ lvl = super(ColorfulEvent, self).log_level
+ color = self._log_level_to_color.get(lvl)
+ return self._color_message(lvl, color)
+
+ @colorful_property
+ def timestamp(self):
+ return self.TIMESTAMP_COLOR
+
+ @colorful_property
+ def printable_timestamp(self):
+ return self.TIMESTAMP_COLOR
+
+ @colorful_property
+ def event_type_indicator(self):
+ return self.LOG_TYPE_COLOR if self.is_log_message else \
+ self.EVENT_TYPE_COLOR
+
+ @colorful_property
+ def operation(self):
+ return self.OPERATION_COLOR
+
+ @colorful_property
+ def node_id(self):
+ return self.NODE_ID_COLOR
+
+ @colorful_property
+ def source_id(self):
+ return self.SOURCE_ID_COLOR
+
+ @colorful_property
+ def target_id(self):
+ return self.TARGET_ID_COLOR
+
+ @colorful_property
+ def deployment_id(self):
+ return self.DEPLOYMENT_ID_COLOR
+
+ @contextmanager
+ def _nest_colors(self, nesting_color):
+ prev_color_context = self._color_context
+ if nesting_color:
+ self._color_context = nesting_color
+ yield
+ self._color_context = prev_color_context
+
+ def _color_message(self, val, color):
+ if not val or not color:
+ return val
+
+ return "{0}{1}{2}".format(
+ color,
+ val,
+ self._color_context)
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands.py b/aria/cli/commands.py
deleted file mode 100644
index ee329e7..0000000
--- a/aria/cli/commands.py
+++ /dev/null
@@ -1,546 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-CLI various commands implementation
-"""
-
-import json
-import os
-import sys
-import csv
-import shutil
-import tempfile
-from glob import glob
-from importlib import import_module
-
-from ruamel import yaml # @UnresolvedImport
-
-from .. import extension
-from ..logger import LoggerMixin
-from ..parser import iter_specifications
-from ..parser.consumption import (
- ConsumptionContext,
- ConsumerChain,
- Read,
- Validate,
- ServiceTemplate,
- Types,
- Inputs,
- ServiceInstance
-)
-from ..parser.loading import LiteralLocation, UriLocation
-from ..utils.application import StorageManager
-from ..utils.caching import cachedmethod
-from ..utils.console import (puts, Colored, indent)
-from ..utils.imports import (import_fullname, import_modules)
-from ..utils.collections import OrderedDict
-from ..orchestrator import WORKFLOW_DECORATOR_RESERVED_ARGUMENTS
-from ..orchestrator.runner import Runner
-from ..orchestrator.workflows.builtin import BUILTIN_WORKFLOWS
-from .dry import convert_to_dry
-
-from .exceptions import (
- AriaCliFormatInputsError,
- AriaCliYAMLInputsError,
- AriaCliInvalidInputsError
-)
-from . import csar
-
-
-class BaseCommand(LoggerMixin):
- """
- Base class for CLI commands.
- """
-
- def __repr__(self):
- return 'AriaCli({cls.__name__})'.format(cls=self.__class__)
-
- def __call__(self, args_namespace, unknown_args):
- """
- __call__ method is called when running command
- :param args_namespace:
- """
- pass
-
- def parse_inputs(self, inputs):
- """
- Returns a dictionary of inputs `resources` can be:
- - A list of files.
- - A single file
- - A directory containing multiple input files
- - A key1=value1;key2=value2 pairs string.
- - Wildcard based string (e.g. *-inputs.yaml)
- """
-
- parsed_dict = {}
-
- def _format_to_dict(input_string):
- self.logger.info('Processing inputs source: {0}'.format(input_string))
- try:
- input_string = input_string.strip()
- try:
- parsed_dict.update(json.loads(input_string))
- except BaseException:
- parsed_dict.update((i.split('=')
- for i in input_string.split(';')
- if i))
- except Exception as exc:
- raise AriaCliFormatInputsError(str(exc), inputs=input_string)
-
- def _handle_inputs_source(input_path):
- self.logger.info('Processing inputs source: {0}'.format(input_path))
- try:
- with open(input_path) as input_file:
- content = yaml.safe_load(input_file)
- except yaml.YAMLError as exc:
- raise AriaCliYAMLInputsError(
- '"{0}" is not a valid YAML. {1}'.format(input_path, str(exc)))
- if isinstance(content, dict):
- parsed_dict.update(content)
- return
- if content is None:
- return
- raise AriaCliInvalidInputsError('Invalid inputs', inputs=input_path)
-
- for input_string in inputs if isinstance(inputs, list) else [inputs]:
- if os.path.isdir(input_string):
- for input_file in os.listdir(input_string):
- _handle_inputs_source(os.path.join(input_string, input_file))
- continue
- input_files = glob(input_string)
- if input_files:
- for input_file in input_files:
- _handle_inputs_source(input_file)
- continue
- _format_to_dict(input_string)
- return parsed_dict
-
-
-class ParseCommand(BaseCommand):
- """
- :code:`parse` command.
-
- Given a blueprint, emits information in human-readable, JSON, or YAML format from various phases
- of the ARIA parser.
- """
-
- def __call__(self, args_namespace, unknown_args):
- super(ParseCommand, self).__call__(args_namespace, unknown_args)
-
- if args_namespace.prefix:
- for prefix in args_namespace.prefix:
- extension.parser.uri_loader_prefix().append(prefix)
-
- cachedmethod.ENABLED = args_namespace.cached_methods
-
- context = ParseCommand.create_context_from_namespace(args_namespace)
- context.args = unknown_args
-
- consumer = ConsumerChain(context, (Read, Validate))
-
- consumer_class_name = args_namespace.consumer
- dumper = None
- if consumer_class_name == 'validate':
- dumper = None
- elif consumer_class_name == 'presentation':
- dumper = consumer.consumers[0]
- elif consumer_class_name == 'template':
- consumer.append(ServiceTemplate)
- elif consumer_class_name == 'types':
- consumer.append(ServiceTemplate, Types)
- elif consumer_class_name == 'instance':
- consumer.append(ServiceTemplate, Inputs, ServiceInstance)
- else:
- consumer.append(ServiceTemplate, Inputs, ServiceInstance)
- consumer.append(import_fullname(consumer_class_name))
-
- if dumper is None:
- # Default to last consumer
- dumper = consumer.consumers[-1]
-
- consumer.consume()
-
- if not context.validation.dump_issues():
- dumper.dump()
- exit(1)
-
- @staticmethod
- def create_context_from_namespace(namespace, **kwargs):
- args = vars(namespace).copy()
- args.update(kwargs)
- return ParseCommand.create_context(**args)
-
- @staticmethod
- def create_context(uri,
- loader_source,
- reader_source,
- presenter_source,
- presenter,
- debug,
- **kwargs):
- context = ConsumptionContext()
- context.loading.loader_source = import_fullname(loader_source)()
- context.reading.reader_source = import_fullname(reader_source)()
- context.presentation.location = UriLocation(uri) if isinstance(uri, basestring) else uri
- context.presentation.presenter_source = import_fullname(presenter_source)()
- context.presentation.presenter_class = import_fullname(presenter)
- context.presentation.print_exceptions = debug
- return context
-
-
-class WorkflowCommand(BaseCommand):
- """
- :code:`workflow` command.
- """
-
- WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies')
-
- def __call__(self, args_namespace, unknown_args):
- super(WorkflowCommand, self).__call__(args_namespace, unknown_args)
-
- context = self._parse(args_namespace.uri)
- workflow_fn, inputs = self._get_workflow(context, args_namespace.workflow)
- self._dry = args_namespace.dry
- self._run(context, args_namespace.workflow, workflow_fn, inputs)
-
- def _parse(self, uri):
- # Parse
- context = ConsumptionContext()
- context.presentation.location = UriLocation(uri)
- consumer = ConsumerChain(context, (Read, Validate, ServiceTemplate, Inputs,
- ServiceInstance))
- consumer.consume()
-
- if context.validation.dump_issues():
- exit(1)
-
- return context
-
- def _get_workflow(self, context, workflow_name):
- if workflow_name in BUILTIN_WORKFLOWS:
- workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.{0}'.format(
- workflow_name))
- inputs = {}
- else:
- workflow = context.modeling.instance.policies.get(workflow_name)
- if workflow is None:
- raise AttributeError('workflow policy does not exist: "{0}"'.format(workflow_name))
- if workflow.type.role != 'workflow':
- raise AttributeError('policy is not a workflow: "{0}"'.format(workflow_name))
-
- sys.path.append(os.path.dirname(str(context.presentation.location)))
-
- workflow_fn = import_fullname(workflow.properties['implementation'].value)
-
- for k in workflow.properties:
- if k in WORKFLOW_DECORATOR_RESERVED_ARGUMENTS:
- raise AttributeError('workflow policy "{0}" defines a reserved property: "{1}"'
- .format(workflow_name, k))
-
- inputs = OrderedDict([
- (k, v.value) for k, v in workflow.properties.iteritems()
- if k not in WorkflowCommand.WORKFLOW_POLICY_INTERNAL_PROPERTIES
- ])
-
- return workflow_fn, inputs
-
- def _run(self, context, workflow_name, workflow_fn, inputs):
- # Storage
- def _initialize_storage(model_storage):
- if self._dry:
- convert_to_dry(context.modeling.instance)
- context.modeling.store(model_storage)
-
- # Create runner
- runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage,
- lambda: context.modeling.instance.id)
-
- # Run
- runner.run()
-
-
-class InitCommand(BaseCommand):
- """
- :code:`init` command.
-
- Broken. Currently maintained for reference.
- """
-
- _IN_VIRTUAL_ENV = hasattr(sys, 'real_prefix')
-
- def __call__(self, args_namespace, unknown_args):
- super(InitCommand, self).__call__(args_namespace, unknown_args)
- self._workspace_setup()
- inputs = self.parse_inputs(args_namespace.input) if args_namespace.input else None
- plan, deployment_plan = self._parse_blueprint(args_namespace.blueprint_path, inputs)
- self._create_storage(
- blueprint_plan=plan,
- blueprint_path=args_namespace.blueprint_path,
- deployment_plan=deployment_plan,
- blueprint_id=args_namespace.blueprint_id,
- deployment_id=args_namespace.deployment_id,
- main_file_name=os.path.basename(args_namespace.blueprint_path))
- self.logger.info('Initiated {0}'.format(args_namespace.blueprint_path))
- self.logger.info(
- 'If you make changes to the blueprint, '
- 'run `aria local init -p {0}` command again to apply them'.format(
- args_namespace.blueprint_path))
-
- def _workspace_setup(self):
- try:
- create_user_space()
- self.logger.debug(
- 'created user space path in: {0}'.format(user_space()))
- except IOError:
- self.logger.debug(
- 'user space path already exist - {0}'.format(user_space()))
- try:
- create_local_storage()
- self.logger.debug(
- 'created local storage path in: {0}'.format(local_storage()))
- except IOError:
- self.logger.debug(
- 'local storage path already exist - {0}'.format(local_storage()))
- return local_storage()
-
- def _parse_blueprint(self, blueprint_path, inputs=None):
- # TODO
- pass
-
- @staticmethod
- def _create_storage(
- blueprint_path,
- blueprint_plan,
- deployment_plan,
- blueprint_id,
- deployment_id,
- main_file_name=None):
- resource_storage = application_resource_storage(
- FileSystemResourceDriver(local_resource_storage()))
- model_storage = application_model_storage(
- FileSystemModelDriver(local_model_storage()))
- resource_storage.setup()
- model_storage.setup()
- storage_manager = StorageManager(
- model_storage=model_storage,
- resource_storage=resource_storage,
- blueprint_path=blueprint_path,
- blueprint_id=blueprint_id,
- blueprint_plan=blueprint_plan,
- deployment_id=deployment_id,
- deployment_plan=deployment_plan
- )
- storage_manager.create_blueprint_storage(
- blueprint_path,
- main_file_name=main_file_name
- )
- storage_manager.create_nodes_storage()
- storage_manager.create_deployment_storage()
- storage_manager.create_node_instances_storage()
-
-
-class ExecuteCommand(BaseCommand):
- """
- :code:`execute` command.
-
- Broken. Currently maintained for reference.
- """
-
- def __call__(self, args_namespace, unknown_args):
- super(ExecuteCommand, self).__call__(args_namespace, unknown_args)
- parameters = (self.parse_inputs(args_namespace.parameters)
- if args_namespace.parameters else {})
- resource_storage = application_resource_storage(
- FileSystemResourceDriver(local_resource_storage()))
- model_storage = application_model_storage(
- FileSystemModelDriver(local_model_storage()))
- deployment = model_storage.service_instance.get(args_namespace.deployment_id)
-
- try:
- workflow = deployment.workflows[args_namespace.workflow_id]
- except KeyError:
- raise ValueError(
- '{0} workflow does not exist. existing workflows are: {1}'.format(
- args_namespace.workflow_id,
- deployment.workflows.keys()))
-
- workflow_parameters = self._merge_and_validate_execution_parameters(
- workflow,
- args_namespace.workflow_id,
- parameters
- )
- workflow_context = WorkflowContext(
- name=args_namespace.workflow_id,
- model_storage=model_storage,
- resource_storage=resource_storage,
- deployment_id=args_namespace.deployment_id,
- workflow_id=args_namespace.workflow_id,
- parameters=workflow_parameters,
- )
- workflow_function = self._load_workflow_handler(workflow['operation'])
- tasks_graph = workflow_function(workflow_context, **workflow_context.parameters)
- executor = ProcessExecutor()
- workflow_engine = Engine(executor=executor,
- workflow_context=workflow_context,
- tasks_graph=tasks_graph)
- workflow_engine.execute()
- executor.close()
-
- @staticmethod
- def _merge_and_validate_execution_parameters(
- workflow,
- workflow_name,
- execution_parameters):
- merged_parameters = {}
- workflow_parameters = workflow.get('parameters', {})
- missing_mandatory_parameters = set()
-
- for name, param in workflow_parameters.iteritems():
- if 'default' not in param:
- if name not in execution_parameters:
- missing_mandatory_parameters.add(name)
- continue
- merged_parameters[name] = execution_parameters[name]
- continue
- merged_parameters[name] = (execution_parameters[name] if name in execution_parameters
- else param['default'])
-
- if missing_mandatory_parameters:
- raise ValueError(
- 'Workflow "{0}" must be provided with the following '
- 'parameters to execute: {1}'.format(
- workflow_name, ','.join(missing_mandatory_parameters)))
-
- custom_parameters = dict(
- (k, v) for (k, v) in execution_parameters.iteritems()
- if k not in workflow_parameters)
-
- if custom_parameters:
- raise ValueError(
- 'Workflow "{0}" does not have the following parameters declared: {1}. '
- 'Remove these parameters'.format(
- workflow_name, ','.join(custom_parameters.keys())))
-
- return merged_parameters
-
- @staticmethod
- def _load_workflow_handler(handler_path):
- module_name, spec_handler_name = handler_path.rsplit('.', 1)
- try:
- module = import_module(module_name)
- return getattr(module, spec_handler_name)
- except ImportError:
- # TODO: exception handler
- raise
- except AttributeError:
- # TODO: exception handler
- raise
-
-
-class BaseCSARCommand(BaseCommand):
- @staticmethod
- def _parse_and_dump(reader):
- context = ConsumptionContext()
- context.loading.prefixes += [os.path.join(reader.destination, 'definitions')]
- context.presentation.location = LiteralLocation(reader.entry_definitions_yaml)
- chain = ConsumerChain(context, (Read, Validate, Model, Instance))
- chain.consume()
- if context.validation.dump_issues():
- raise RuntimeError('Validation failed')
- dumper = chain.consumers[-1]
- dumper.dump()
-
- def _read(self, source, destination):
- reader = csar.read(
- source=source,
- destination=destination,
- logger=self.logger)
- self.logger.info(
- 'Path: {r.destination}\n'
- 'TOSCA meta file version: {r.meta_file_version}\n'
- 'CSAR Version: {r.csar_version}\n'
- 'Created By: {r.created_by}\n'
- 'Entry definitions: {r.entry_definitions}'
- .format(r=reader))
- self._parse_and_dump(reader)
-
- def _validate(self, source):
- workdir = tempfile.mkdtemp()
- try:
- self._read(
- source=source,
- destination=workdir)
- finally:
- shutil.rmtree(workdir, ignore_errors=True)
-
-
-class CSARCreateCommand(BaseCSARCommand):
- def __call__(self, args_namespace, unknown_args):
- super(CSARCreateCommand, self).__call__(args_namespace, unknown_args)
- csar.write(
- source=args_namespace.source,
- entry=args_namespace.entry,
- destination=args_namespace.destination,
- logger=self.logger)
- self._validate(args_namespace.destination)
-
-
-class CSAROpenCommand(BaseCSARCommand):
- def __call__(self, args_namespace, unknown_args):
- super(CSAROpenCommand, self).__call__(args_namespace, unknown_args)
- self._read(
- source=args_namespace.source,
- destination=args_namespace.destination)
-
-
-class CSARValidateCommand(BaseCSARCommand):
- def __call__(self, args_namespace, unknown_args):
- super(CSARValidateCommand, self).__call__(args_namespace, unknown_args)
- self._validate(args_namespace.source)
-
-
-class SpecCommand(BaseCommand):
- """
- :code:`spec` command.
-
- Emits all uses of :code:`@dsl_specification` in the codebase, in human-readable or CSV format.
- """
-
- def __call__(self, args_namespace, unknown_args):
- super(SpecCommand, self).__call__(args_namespace, unknown_args)
-
- # Make sure that all @dsl_specification decorators are processed
- for pkg in extension.parser.specification_package():
- import_modules(pkg)
-
- # TODO: scan YAML documents as well
-
- if args_namespace.csv:
- writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
- writer.writerow(('Specification', 'Section', 'Code', 'URL'))
- for spec, sections in iter_specifications():
- for section, details in sections:
- writer.writerow((spec, section, details['code'], details['url']))
-
- else:
- for spec, sections in iter_specifications():
- puts(Colored.cyan(spec))
- with indent(2):
- for section, details in sections:
- puts(Colored.blue(section))
- with indent(2):
- for k, v in details.iteritems():
- puts('%s: %s' % (Colored.magenta(k), v))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands/__init__.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/__init__.py b/aria/cli/commands/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/cli/commands/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands/executions.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/executions.py b/aria/cli/commands/executions.py
new file mode 100644
index 0000000..d767fa1
--- /dev/null
+++ b/aria/cli/commands/executions.py
@@ -0,0 +1,172 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import time
+
+from .. import utils
+from ..table import print_data
+from ..cli import aria, helptexts
+from ..exceptions import AriaCliError
+from ...modeling.models import Execution
+from ...storage.exceptions import StorageError
+from ...orchestrator.workflow_runner import WorkflowRunner
+from ...utils import formatting
+from ...utils import threading
+
+EXECUTION_COLUMNS = ['id', 'workflow_name', 'status', 'service_name',
+ 'created_at', 'error']
+
+
+@aria.group(name='executions')
+@aria.options.verbose()
+def executions():
+ """Handle workflow executions
+ """
+ pass
+
+
+@executions.command(name='show',
+ short_help='Show execution information')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(execution_id, model_storage, logger):
+ """Show information for a specific execution
+
+ `EXECUTION_ID` is the execution to get information on.
+ """
+ try:
+ logger.info('Showing execution {0}'.format(execution_id))
+ execution = model_storage.execution.get(execution_id)
+ except StorageError:
+ raise AriaCliError('Execution {0} not found'.format(execution_id))
+
+ print_data(EXECUTION_COLUMNS, execution.to_dict(), 'Execution:', max_width=50)
+
+ # print execution parameters
+ logger.info('Execution Inputs:')
+ if execution.inputs:
+ #TODO check this section, havent tested it
+ execution_inputs = [ei.to_dict() for ei in execution.inputs]
+ for input_name, input_value in formatting.decode_dict(
+ execution_inputs).iteritems():
+ logger.info('\t{0}: \t{1}'.format(input_name, input_value))
+ else:
+ logger.info('\tNo inputs')
+ logger.info('')
+
+
+@executions.command(name='list',
+ short_help='List service executions')
+@aria.options.service_name(required=False)
+@aria.options.sort_by()
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+ sort_by,
+ descending,
+ model_storage,
+ logger):
+ """List executions
+
+ If `SERVICE_NAME` is provided, list executions for that service.
+ Otherwise, list executions for all services.
+ """
+ if service_name:
+ logger.info('Listing executions for service {0}...'.format(
+ service_name))
+ try:
+ service = model_storage.service.get_by_name(service_name)
+ filters = dict(service=service)
+ except StorageError:
+ raise AriaCliError('Service {0} does not exist'.format(
+ service_name))
+ else:
+ logger.info('Listing all executions...')
+ filters = {}
+
+ executions = [e.to_dict() for e in model_storage.execution.list(
+ filters=filters,
+ sort=utils.storage_sort_param(sort_by, descending))]
+
+ print_data(EXECUTION_COLUMNS, executions, 'Executions:')
+
+
+@executions.command(name='start',
+ short_help='Execute a workflow')
+@aria.argument('workflow-name')
+@aria.options.service_name(required=True)
+@aria.options.inputs
+@aria.options.task_max_attempts()
+@aria.options.task_retry_interval()
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_resource_storage
+@aria.pass_plugin_manager
+@aria.pass_logger
+def start(workflow_name,
+ service_name,
+ inputs,
+ task_max_attempts,
+ task_retry_interval,
+ model_storage,
+ resource_storage,
+ plugin_manager,
+ logger):
+ """Execute a workflow
+
+ `WORKFLOW_ID` is the id of the workflow to execute (e.g. `uninstall`)
+ """
+ workflow_runner = \
+ WorkflowRunner(workflow_name, service_name, inputs,
+ model_storage, resource_storage, plugin_manager,
+ task_max_attempts, task_retry_interval)
+
+ execution_thread_name = '{0}_{1}'.format(service_name, workflow_name)
+ execution_thread = threading.ExceptionThread(target=workflow_runner.execute,
+ name=execution_thread_name)
+ execution_thread.daemon = True # allows force-cancel to exit immediately
+
+ logger.info('Starting execution. Press Ctrl+C cancel')
+ execution_thread.start()
+ try:
+ while execution_thread.is_alive():
+ # using join without a timeout blocks and ignores KeyboardInterrupt
+ execution_thread.join(1)
+ except KeyboardInterrupt:
+ _cancel_execution(workflow_runner, execution_thread, logger)
+
+ # raise any errors from the execution thread (note these are not workflow execution errors)
+ execution_thread.raise_error_if_exists()
+
+ execution = workflow_runner.execution
+ logger.info('Execution has ended with "{0}" status'.format(execution.status))
+ if execution.status == Execution.FAILED:
+ logger.info('Execution error:\n{0}'.format(execution.error))
+
+
+def _cancel_execution(workflow_runner, execution_thread, logger):
+ logger.info('Cancelling execution. Press Ctrl+C again to force-cancel')
+ try:
+ workflow_runner.cancel()
+ while execution_thread.is_alive():
+ execution_thread.join(1)
+ except KeyboardInterrupt:
+ logger.info('Force-cancelling execution')
+ # TODO handle execution (update status etc.) and exit process
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands/logs.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/logs.py b/aria/cli/commands/logs.py
new file mode 100644
index 0000000..3662063
--- /dev/null
+++ b/aria/cli/commands/logs.py
@@ -0,0 +1,68 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+
+
+@aria.group(name='logs')
+@aria.options.verbose()
+def logs():
+ """Show logs from workflow executions
+ """
+ pass
+
+
+@logs.command(name='list',
+ short_help='List execution logs')
+@aria.argument('execution-id')
+@aria.options.json_output
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(execution_id,
+ json_output,
+ model_storage,
+ logger):
+ """Display logs for an execution
+ """
+ logger.info('Listing logs for execution id {0}'.format(execution_id))
+ # events_logger = get_events_logger(json_output)
+ logs = model_storage.log.list(filters=dict(execution_fk=execution_id),
+ sort=utils.storage_sort_param('created_at', False))
+ # TODO: print logs nicely
+ if logs:
+ for log in logs:
+ print log
+ else:
+ logger.info('\tNo logs')
+
+
+@logs.command(name='delete',
+ short_help='Delete execution logs')
+@aria.argument('execution-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(execution_id, model_storage, logger):
+ """Delete logs of an execution
+
+ `EXECUTION_ID` is the execution logs to delete.
+ """
+ logger.info('Deleting logs for execution id {0}'.format(execution_id))
+ logs = model_storage.log.list(filters=dict(execution_fk=execution_id))
+ for log in logs:
+ model_storage.log.delete(log)
+ logger.info('Deleted logs for execution id {0}'.format(execution_id))
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands/node_templates.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/node_templates.py b/aria/cli/commands/node_templates.py
new file mode 100644
index 0000000..5614aee
--- /dev/null
+++ b/aria/cli/commands/node_templates.py
@@ -0,0 +1,104 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..table import print_data
+from .. import utils
+from ..cli import aria
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+
+NODE_TEMPLATE_COLUMNS = ['id', 'name', 'description', 'service_template_name', 'type_name']
+
+
+@aria.group(name='node-templates')
+@aria.options.verbose()
+def node_templates():
+ """Handle a service template's node templates
+ """
+ pass
+
+
+@node_templates.command(name='show',
+ short_help='Show node information')
+@aria.argument('node-template-id')
+# @aria.options.service_template_name(required=True)
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_template_id, model_storage, logger):
+ """Show information for a specific node of a specific service template
+
+ `NODE_TEMPLATE_ID` is the node id to get information on.
+ """
+ # logger.info('Showing node template {0} for service template {1}'.format(
+ # node_template_id, service_template_name))
+ logger.info('Showing node template {0}'.format(node_template_id))
+ try:
+ #TODO get node template of a specific service template instead?
+ node_template = model_storage.node_template.get(node_template_id)
+ except StorageError:
+ raise AriaCliError('Node template {0} was not found'.format(node_template_id))
+
+ print_data(NODE_TEMPLATE_COLUMNS, node_template.to_dict(), 'Node template:', max_width=50)
+
+ # print node template properties
+ logger.info('Node template properties:')
+ if node_template.properties:
+ logger.info(utils.get_parameter_templates_as_string(node_template.properties))
+ else:
+ logger.info('\tNo properties')
+
+ # print node IDs
+ nodes = node_template.nodes.all()
+ logger.info('Nodes:')
+ if nodes:
+ for node in nodes:
+ logger.info('\t{0}'.format(node.name))
+ else:
+ logger.info('\tNo nodes')
+
+
+@node_templates.command(name='list',
+ short_help='List node templates for a service template')
+@aria.options.service_template_name()
+@aria.options.sort_by('service_template_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_template_name, sort_by, descending, model_storage, logger):
+ """List node templates
+
+ If `SERVICE_TEMPLATE_NAME` is provided, list nodes for that service template.
+ Otherwise, list node templates for all service templates.
+ """
+ if service_template_name:
+ logger.info('Listing node templates for service template {0}...'.format(
+ service_template_name))
+ try:
+ service_template = model_storage.service_template.get_by_name(service_template_name)
+ filters = dict(service_template=service_template)
+ except StorageError:
+ raise AriaCliError('Service template {0} does not exist'.format(service_template_name))
+ else:
+ logger.info('Listing all node templates...')
+ filters = {}
+
+ node_templates = [nt.to_dict() for nt in model_storage.node_template.list(
+ filters=filters,
+ sort=utils.storage_sort_param(sort_by, descending))]
+
+ print_data(NODE_TEMPLATE_COLUMNS, node_templates, 'Node templates:')
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands/nodes.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/nodes.py b/aria/cli/commands/nodes.py
new file mode 100644
index 0000000..f38c917
--- /dev/null
+++ b/aria/cli/commands/nodes.py
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import utils
+from ..cli import aria
+from ..table import print_data
+from ..exceptions import AriaCliError
+from ...storage.exceptions import StorageError
+
+
+NODE_COLUMNS = ['id', 'name', 'service_name', 'node_template_name', 'state']
+
+
+@aria.group(name='nodes')
+@aria.options.verbose()
+def nodes():
+ """Handle a service's nodes
+ """
+ pass
+
+
+@nodes.command(name='show',
+ short_help='Show node information')
+@aria.argument('node_id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(node_id, model_storage, logger):
+ """Showing information for a specific node
+
+ `NODE_ID` is the id of the node to get information on.
+ """
+ logger.info('Showing node {0}'.format(node_id))
+ try:
+ node = model_storage.node.get(node_id)
+ except StorageError:
+ raise AriaCliError('Node {0} not found'.format(node_id))
+
+ print_data(NODE_COLUMNS, node.to_dict(), 'Node:', 50)
+
+ # print node attributes
+ logger.info('Node attributes:')
+ if node.runtime_properties:
+ for prop_name, prop_value in node.runtime_properties.iteritems():
+ logger.info('\t{0}: {1}'.format(prop_name, prop_value))
+ else:
+ logger.info('\tNo attributes')
+ logger.info('')
+
+
+@nodes.command(name='list',
+ short_help='List node for a service')
+@aria.options.service_name(required=False)
+@aria.options.sort_by('service_name')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(service_name,
+ sort_by,
+ descending,
+ model_storage,
+ logger):
+ """List nodes
+
+ If `SERVICE_NAME` is provided, list nodes for that service.
+ Otherwise, list nodes for all services.
+ """
+ if service_name:
+ logger.info('Listing nodes for service {0}...'.format(service_name))
+ try:
+ service = model_storage.service.get_by_name(service_name)
+ filters = dict(service=service)
+ except StorageError:
+ raise AriaCliError('Service {0} does not exist'.format(service_name))
+ else:
+ logger.info('Listing all nodes...')
+ filters = {}
+
+ nodes = [node.to_dict() for node in model_storage.node.list(
+ filters=filters,
+ sort=utils.storage_sort_param(sort_by, descending))]
+
+ print_data(NODE_COLUMNS, nodes, 'Nodes:')
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/de2d57dd/aria/cli/commands/plugins.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/plugins.py b/aria/cli/commands/plugins.py
new file mode 100644
index 0000000..d31aa99
--- /dev/null
+++ b/aria/cli/commands/plugins.py
@@ -0,0 +1,145 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import tarfile
+
+from ..table import print_data
+from ..cli import helptexts, aria
+from ..exceptions import AriaCliError
+from ..utils import storage_sort_param
+
+
+PLUGIN_COLUMNS = ['id', 'package_name', 'package_version', 'distribution',
+ 'supported_platform', 'distribution_release', 'uploaded_at']
+EXCLUDED_COLUMNS = ['archive_name', 'distribution_version', 'excluded_wheels',
+ 'package_source', 'supported_py_versions', 'wheels']
+
+
+@aria.group(name='plugins')
+@aria.options.verbose()
+def plugins():
+ """Handle plugins
+ """
+ pass
+
+
+@plugins.command(name='validate',
+ short_help='Validate a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_logger
+def validate(plugin_path, logger):
+ """Validate a plugin
+
+ This will try to validate the plugin's archive is not corrupted.
+ A valid plugin is a wagon (http://github.com/cloudify-cosomo/wagon)
+ in the tar.gz format (suffix may also be .wgn).
+
+ `PLUGIN_PATH` is the path to wagon archive to validate.
+ """
+ logger.info('Validating plugin {0}...'.format(plugin_path))
+
+ if not tarfile.is_tarfile(plugin_path):
+ raise AriaCliError(
+ 'Archive {0} is of an unsupported type. Only '
+ 'tar.gz/wgn is allowed'.format(plugin_path))
+ with tarfile.open(plugin_path) as tar:
+ tar_members = tar.getmembers()
+ package_json_path = "{0}/{1}".format(
+ tar_members[0].name, 'package.json')
+ # TODO: Find a better way to validate a plugin.
+ try:
+ tar.getmember(package_json_path)
+ except KeyError:
+ raise AriaCliError(
+ 'Failed to validate plugin {0} '
+ '(package.json was not found in archive)'.format(plugin_path))
+
+ logger.info('Plugin validated successfully')
+
+
+@plugins.command(name='delete',
+ short_help='Delete a plugin')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def delete(plugin_id, model_storage, logger):
+ """Delete a plugin
+
+ `PLUGIN_ID` is the id of the plugin to delete.
+ """
+ logger.info('Deleting plugin {0}...'.format(plugin_id))
+ model_storage.plugin.delete(plugin_id=plugin_id)
+ logger.info('Plugin deleted')
+
+
+@plugins.command(name='install',
+ short_help='Install a plugin')
+@aria.argument('plugin-path')
+@aria.options.verbose()
+@aria.pass_context
+@aria.pass_logger
+def install(ctx, plugin_path, logger):
+ """Install a plugin
+
+ `PLUGIN_PATH` is the path to wagon archive to install.
+ """
+ ctx.invoke(validate, plugin_path=plugin_path)
+ logger.info('Installing plugin {0}...'.format(plugin_path))
+ plugin = plugin_manager.install(plugin_path)
+ logger.info("Plugin installed. The plugin's id is {0}".format(plugin.id))
+
+
+@plugins.command(name='show',
+ short_help='show plugin information')
+@aria.argument('plugin-id')
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def show(plugin_id, model_storage, logger):
+ """Show information for a specific plugin
+
+ `PLUGIN_ID` is the id of the plugin to show information on.
+ """
+ logger.info('Showing plugin {0}...'.format(plugin_id))
+ plugin = model_storage.plugin.get(plugin_id)
+ _transform_plugin_response(plugin)
+ print_data(PLUGIN_COLUMNS, plugin, 'Plugin:')
+
+
+@plugins.command(name='list',
+ short_help='List plugins')
+@aria.options.sort_by('uploaded_at')
+@aria.options.descending
+@aria.options.verbose()
+@aria.pass_model_storage
+@aria.pass_logger
+def list(sort_by, descending, model_storage, logger):
+ """List all plugins on the manager
+ """
+ logger.info('Listing all plugins...')
+ plugins_list = model_storage.plugin.list(
+ sort=storage_sort_param(sort_by, descending))
+ for plugin in plugins_list:
+ _transform_plugin_response(plugin)
+ print_data(PLUGIN_COLUMNS, plugins_list, 'Plugins:')
+
+
+def _transform_plugin_response(plugin):
+ """Remove any columns that shouldn't be displayed in the CLI
+ """
+ for column in EXCLUDED_COLUMNS:
+ plugin.pop(column, None)