You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ariatosca.apache.org by em...@apache.org on 2017/10/31 18:52:05 UTC

[8/9] incubator-ariatosca git commit: ARIA-1 Parser test suite

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/topology/template_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/topology/template_handler.py b/aria/orchestrator/topology/template_handler.py
index a84a988..3b1948a 100644
--- a/aria/orchestrator/topology/template_handler.py
+++ b/aria/orchestrator/topology/template_handler.py
@@ -69,7 +69,7 @@ class ServiceTemplate(common.TemplateHandlerBase):
                     plugin = plugin_specification.plugin
                     service.plugins[plugin.name] = plugin
                 else:
-                    self._topology.report('specified plugin not found: {0}'.format(
+                    self._topology.report(u'specified plugin not found: {0}'.format(
                         plugin_specification.name), level=self._topology.Issue.EXTERNAL)
         service.meta_data = self._topology.instantiate(self._model.meta_data)
 
@@ -108,17 +108,18 @@ class ServiceTemplate(common.TemplateHandlerBase):
     def _scaling(self, node_template):
         scaling = node_template.scaling
 
-        if any([scaling['min_instances'] < 0,
+        if any((scaling['min_instances'] < 0,
                 scaling['max_instances'] < scaling['min_instances'],
                 scaling['max_instances'] < 0,
 
                 scaling['default_instances'] < 0,
                 scaling['default_instances'] < scaling['min_instances'],
                 scaling['default_instances'] > scaling['max_instances']
-               ]):
+               )):
             self._topology.report(
-                'invalid scaling parameters for node template "{0}": min={min_instances}, max='
-                '{max_instances}, default={default_instances}'.format(self._model.name, **scaling),
+                u'invalid scaling parameters for node template "{0}": min={min_instances}, max='
+                u'{max_instances}, default={default_instances}'.format(node_template.name,
+                                                                       **scaling),
                 level=self._topology.Issue.BETWEEN_TYPES)
 
         return scaling
@@ -150,18 +151,18 @@ class ArtifactTemplate(common.TemplateHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Artifact type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Artifact type: {0}'.format(out_stream.type_style(
                 self._model.type.name)))
-            out_stream.write('Source path: {0}'.format(out_stream.literal_style(
+            out_stream.write(u'Source path: {0}'.format(out_stream.literal_style(
                 self._model.source_path)))
             if self._model.target_path is not None:
-                out_stream.write('Target path: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Target path: {0}'.format(out_stream.literal_style(
                     self._model.target_path)))
             if self._model.repository_url is not None:
-                out_stream.write('Repository URL: {0}'.format(
+                out_stream.write(u'Repository URL: {0}'.format(
                     out_stream.literal_style(self._model.repository_url)))
             if self._model.repository_credential:
-                out_stream.write('Repository credential: {0}'.format(
+                out_stream.write(u'Repository credential: {0}'.format(
                     out_stream.literal_style(self._model.repository_credential)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
 
@@ -189,17 +190,17 @@ class CapabilityTemplate(common.TemplateHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             out_stream.write(
-                'Occurrences: {0:d}{1}'.format(
+                u'Occurrences: {0:d}{1}'.format(
                     self._model.min_occurrences or 0,
-                    ' to {0:d}'.format(self._model.max_occurrences)
+                    u' to {0:d}'.format(self._model.max_occurrences)
                     if self._model.max_occurrences is not None
                     else ' or more'))
             if self._model.valid_source_node_types:
-                out_stream.write('Valid source node types: {0}'.format(
-                    ', '.join((str(out_stream.type_style(v.name))
-                               for v in self._model.valid_source_node_types))))
+                out_stream.write(u'Valid source node types: {0}'.format(
+                    u', '.join((str(out_stream.type_style(v.name))
+                                for v in self._model.valid_source_node_types))))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
 
     def coerce(self, **kwargs):
@@ -228,19 +229,19 @@ class RequirementTemplate(common.TemplateHandlerBase):
             out_stream.write('Requirement:')
         with out_stream.indent():
             if self._model.target_node_type is not None:
-                out_stream.write('Target node type: {0}'.format(
+                out_stream.write(u'Target node type: {0}'.format(
                     out_stream.type_style(self._model.target_node_type.name)))
             elif self._model.target_node_template is not None:
-                out_stream.write('Target node template: {0}'.format(
+                out_stream.write(u'Target node template: {0}'.format(
                     out_stream.node_style(self._model.target_node_template.name)))
             if self._model.target_capability_type is not None:
-                out_stream.write('Target capability type: {0}'.format(
+                out_stream.write(u'Target capability type: {0}'.format(
                     out_stream.type_style(self._model.target_capability_type.name)))
             elif self._model.target_capability_name is not None:
-                out_stream.write('Target capability name: {0}'.format(
+                out_stream.write(u'Target capability name: {0}'.format(
                     out_stream.node_style(self._model.target_capability_name)))
             if self._model.target_node_template_constraints:
-                out_stream.write('Target node template constraints:')
+                out_stream.write(u'Target node template constraints:')
                 with out_stream.indent():
                     for constraint in self._model.target_node_template_constraints:
                         out_stream.write(out_stream.literal_style(constraint))
@@ -261,16 +262,16 @@ class RequirementTemplate(common.TemplateHandlerBase):
 
 class GroupTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
-        out_stream.write('Group template: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Group template: {0}'.format(out_stream.node_style(self._model.name)))
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.interface_templates, out_stream,
                                 title='Interface Templates')
             if self._model.node_templates:
-                out_stream.write('Member node templates: {0}'.format(', '.join(
+                out_stream.write(u'Member node templates: {0}'.format(u', '.join(
                     (str(out_stream.node_style(v.name)) for v in self._model.node_templates))))
 
     def coerce(self, **kwargs):
@@ -303,7 +304,7 @@ class InterfaceTemplate(common.TemplateHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Interface type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Interface type: {0}'.format(out_stream.type_style(
                 self._model.type.name)))
             self._topology.dump(self._model.inputs, out_stream, title='Inputs')
             self._topology.dump(self._model.operation_templates, out_stream,
@@ -332,11 +333,11 @@ class InterfaceTemplate(common.TemplateHandlerBase):
 
 class NodeTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
-        out_stream.write('Node template: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Node template: {0}'.format(out_stream.node_style(self._model.name)))
         with out_stream.indent():
             if self._model.description:
                 out_stream.write(out_stream.meta_style(self._model.description))
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.attributes, out_stream, title='Attributes')
             self._topology.dump(
@@ -391,17 +392,17 @@ class NodeTemplate(common.TemplateHandlerBase):
 
 class PolicyTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
-        out_stream.write('Policy template: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Policy template: {0}'.format(out_stream.node_style(self._model.name)))
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             if self._model.node_templates:
-                out_stream.write('Target node templates: {0}'.format(', '.join(
+                out_stream.write(u'Target node templates: {0}'.format(u', '.join(
                     (str(out_stream.node_style(v.name)) for v in self._model.node_templates))))
             if self._model.group_templates:
-                out_stream.write('Target group templates: {0}'.format(', '.join(
+                out_stream.write(u'Target group templates: {0}'.format(u', '.join(
                     (str(out_stream.node_style(v.name)) for v in self._model.group_templates))))
 
     def coerce(self, **kwargs):
@@ -432,7 +433,7 @@ class SubstitutionTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
         out_stream.write('Substitution template:')
         with out_stream.indent():
-            out_stream.write('Node type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Node type: {0}'.format(out_stream.type_style(
                 self._model.node_type.name)))
             self._topology.dump(self._model.mappings, out_stream, title='Mappings')
 
@@ -453,7 +454,7 @@ class SubstitutionTemplateMapping(common.TemplateHandlerBase):
             node_template = self._model.capability_template.node_template
         else:
             node_template = self._model.requirement_template.node_template
-        out_stream.write('{0} -> {1}.{2}'.format(
+        out_stream.write(u'{0} -> {1}.{2}'.format(
             out_stream.node_style(self._model.name),
             out_stream.node_style(node_template.name),
             out_stream.node_style(self._model.capability_template.name
@@ -475,7 +476,7 @@ class SubstitutionTemplateMapping(common.TemplateHandlerBase):
         nodes = node_template.nodes
         if len(nodes) == 0:
             self._topology.report(
-                'mapping "{0}" refers to node template "{1}" but there are no node instances'.
+                u'mapping "{0}" refers to node template "{1}" but there are no node instances'.
                 format(self._model.mapped_name, self._model.node_template.name),
                 level=self._topology.Issue.BETWEEN_INSTANCES)
             return None
@@ -493,8 +494,8 @@ class SubstitutionTemplateMapping(common.TemplateHandlerBase):
     def validate(self, **_):
         if self._model.capability_template is None and self._model.requirement_template is None:
             self._topology.report(
-                'mapping "{0}" refers to neither capability nor a requirement '
-                'in node template: {1}'.format(
+                u'mapping "{0}" refers to neither capability nor a requirement '
+                u'in node template: {1}'.format(
                     self._model.name, formatting.safe_repr(self._model.node_template.name)),
                 level=self._topology.Issue.BETWEEN_TYPES)
 
@@ -502,10 +503,10 @@ class SubstitutionTemplateMapping(common.TemplateHandlerBase):
 class RelationshipTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
         if self._model.type is not None:
-            out_stream.write('Relationship type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Relationship type: {0}'.format(out_stream.type_style(
                 self._model.type.name)))
         else:
-            out_stream.write('Relationship template: {0}'.format(
+            out_stream.write(u'Relationship template: {0}'.format(
                 out_stream.node_style(self._model.name)))
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
@@ -539,27 +540,27 @@ class OperationTemplate(common.TemplateHandlerBase):
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
             if self._model.implementation is not None:
-                out_stream.write('Implementation: {0}'.format(
+                out_stream.write(u'Implementation: {0}'.format(
                     out_stream.literal_style(self._model.implementation)))
             if self._model.dependencies:
-                out_stream.write('Dependencies: {0}'.format(', '.join(
+                out_stream.write(u'Dependencies: {0}'.format(u', '.join(
                     (str(out_stream.literal_style(v)) for v in self._model.dependencies))))
             self._topology.dump(self._model.inputs, out_stream, title='Inputs')
             if self._model.executor is not None:
-                out_stream.write('Executor: {0}'.format(
+                out_stream.write(u'Executor: {0}'.format(
                     out_stream.literal_style(self._model.executor)))
             if self._model.max_attempts is not None:
-                out_stream.write('Max attempts: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Max attempts: {0}'.format(out_stream.literal_style(
                     self._model.max_attempts)))
             if self._model.retry_interval is not None:
-                out_stream.write('Retry interval: {0}'.format(
+                out_stream.write(u'Retry interval: {0}'.format(
                     out_stream.literal_style(self._model.retry_interval)))
             if self._model.plugin_specification is not None:
-                out_stream.write('Plugin specification: {0}'.format(
+                out_stream.write(u'Plugin specification: {0}'.format(
                     out_stream.literal_style(self._model.plugin_specification.name)))
             self._topology.dump(self._model.configurations, out_stream, title='Configuration')
             if self._model.function is not None:
-                out_stream.write('Function: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Function: {0}'.format(out_stream.literal_style(
                     self._model.function)))
 
     def coerce(self, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/topology/topology.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/topology/topology.py b/aria/orchestrator/topology/topology.py
index f86c9dd..ef5322e 100644
--- a/aria/orchestrator/topology/topology.py
+++ b/aria/orchestrator/topology/topology.py
@@ -104,7 +104,7 @@ class Topology(issue.ReporterMixin):
 
         # if model is empty, no need to print out the section name
         if model and title:
-            out_stream.write('{0}:'.format(title))
+            out_stream.write(u'{0}:'.format(title))
 
         if isinstance(model, dict):
             if str(out_stream):
@@ -133,18 +133,18 @@ class Topology(issue.ReporterMixin):
     def _dump_graph_node(self, out_stream, node, capability=None):
         out_stream.write(out_stream.node_style(node.name))
         if capability is not None:
-            out_stream.write('{0} ({1})'.format(out_stream.property_style(capability.name),
-                                                out_stream.type_style(capability.type.name)))
+            out_stream.write(u'{0} ({1})'.format(out_stream.property_style(capability.name),
+                                                 out_stream.type_style(capability.type.name)))
         if node.outbound_relationships:
             with out_stream.indent():
                 for relationship_model in node.outbound_relationships:
                     styled_relationship_name = out_stream.property_style(relationship_model.name)
                     if relationship_model.type is not None:
-                        out_stream.write('-> {0} ({1})'.format(
+                        out_stream.write(u'-> {0} ({1})'.format(
                             styled_relationship_name,
                             out_stream.type_style(relationship_model.type.name)))
                     else:
-                        out_stream.write('-> {0}'.format(styled_relationship_name))
+                        out_stream.write(u'-> {0}'.format(styled_relationship_name))
                     with out_stream.indent(3):
                         self._dump_graph_node(out_stream,
                                               relationship_model.target_node,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 4dbf29b..276fdba 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -152,14 +152,14 @@ class WorkflowRunner(object):
         if self._workflow_name not in self.service.workflows and \
                         self._workflow_name not in builtin.BUILTIN_WORKFLOWS:
             raise exceptions.UndeclaredWorkflowError(
-                'No workflow policy {0} declared in service {1}'
+                u'No workflow policy {0} declared in service {1}'
                 .format(self._workflow_name, self.service.name))
 
     def _validate_no_active_executions(self, execution):
         active_executions = [e for e in self.service.executions if e.is_active()]
         if active_executions:
             raise exceptions.ActiveExecutionsError(
-                "Can't start execution; Service {0} has an active execution with ID {1}"
+                u"Can't start execution; Service {0} has an active execution with ID {1}"
                 .format(self.service.name, active_executions[0].id))
 
     def _get_workflow_fn(self):
@@ -182,7 +182,7 @@ class WorkflowRunner(object):
             workflow_fn = import_fullname(workflow.function)
         except ImportError:
             raise exceptions.WorkflowImplementationNotFoundError(
-                'Could not find workflow {0} function at {1}'.format(
+                u'Could not find workflow {0} function at {1}'.format(
                     self._workflow_name, workflow.function))
 
         return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index 6ce4a00..67adc0b 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -78,7 +78,7 @@ class OperationTask(BaseTask):
     :vartype retry_interval: float
     """
 
-    NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
+    NAME_FORMAT = u'{interface}:{operation}@{type}:{name}'
 
     def __init__(self,
                  actor,
@@ -112,8 +112,8 @@ class OperationTask(BaseTask):
         # interface/operation.
         if not has_operation(actor, interface_name, operation_name):
             raise exceptions.OperationNotFoundException(
-                'Could not find operation "{operation_name}" on interface '
-                '"{interface_name}" for {actor_type} "{actor.name}"'.format(
+                u'Could not find operation "{operation_name}" on interface '
+                u'"{interface_name}" for {actor_type} "{actor.name}"'.format(
                     operation_name=operation_name,
                     interface_name=interface_name,
                     actor_type=type(actor).__name__.lower(),
@@ -149,8 +149,8 @@ class OperationTask(BaseTask):
         elif isinstance(actor, models.Relationship):
             self._context_cls = context.operation.RelationshipOperationContext
         else:
-            raise exceptions.TaskCreationException('Could not create valid context for '
-                                                   '{actor.__class__}'.format(actor=actor))
+            raise exceptions.TaskCreationException(u'Could not create valid context for '
+                                                   u'{actor.__class__}'.format(actor=actor))
 
     def __repr__(self):
         return self.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/api/task_graph.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task_graph.py b/aria/orchestrator/workflows/api/task_graph.py
index 900a0d1..20bf1bd 100644
--- a/aria/orchestrator/workflows/api/task_graph.py
+++ b/aria/orchestrator/workflows/api/task_graph.py
@@ -52,7 +52,7 @@ class TaskGraph(object):
         self._graph = DiGraph()
 
     def __repr__(self):
-        return '{name}(id={self._id}, name={self.name}, graph={self._graph!r})'.format(
+        return u'{name}(id={self._id}, name={self.name}, graph={self._graph!r})'.format(            # pylint: disable=redundant-keyword-arg
             name=self.__class__.__name__, self=self)
 
     @property
@@ -91,7 +91,7 @@ class TaskGraph(object):
          ``dependent_task`` is not in the graph
         """
         if not self.has_tasks(dependent_task):
-            raise TaskNotInGraphError('Task id: {0}'.format(dependent_task.id))
+            raise TaskNotInGraphError(u'Task id: {0}'.format(dependent_task.id))
         for _, dependency_id in self._graph.out_edges_iter(dependent_task.id):
             yield self.get_task(dependency_id)
 
@@ -104,7 +104,7 @@ class TaskGraph(object):
          ``dependency_task`` is not in the graph
         """
         if not self.has_tasks(dependency_task):
-            raise TaskNotInGraphError('Task id: {0}'.format(dependency_task.id))
+            raise TaskNotInGraphError(u'Task id: {0}'.format(dependency_task.id))
         for dependent_id, _ in self._graph.in_edges_iter(dependency_task.id):
             yield self.get_task(dependent_id)
 
@@ -119,7 +119,7 @@ class TaskGraph(object):
          the graph with the given ID
         """
         if not self._graph.has_node(task_id):
-            raise TaskNotInGraphError('Task id: {0}'.format(task_id))
+            raise TaskNotInGraphError(u'Task id: {0}'.format(task_id))
         data = self._graph.node[task_id]
         return data['task']
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 949f864..256927c 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -60,7 +60,7 @@ def execute_operation(
         for node in ctx.nodes:
             if node.id not in filtered_node_ids:
                 subgraphs[node.id] = ctx.task_graph(
-                    name='execute_operation_stub_{0}'.format(node.id))
+                    name=u'execute_operation_stub_{0}'.format(node.id))
 
     # registering actual tasks to sequences
     for node in filtered_nodes:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index 0ec3cd8..71ef13a 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -28,7 +28,7 @@ from aria.orchestrator.context import operation
 from .. import exceptions
 from ..executor.base import StubTaskExecutor
 # Import required so all signals are registered
-from . import events_handler  # pylint: disable=unused-import
+from . import events_handler                                                                        # pylint: disable=unused-import
 
 
 class Engine(logger.LoggerMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/core/events_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/events_handler.py b/aria/orchestrator/workflows/core/events_handler.py
index 473475e..067d0c3 100644
--- a/aria/orchestrator/workflows/core/events_handler.py
+++ b/aria/orchestrator/workflows/core/events_handler.py
@@ -166,5 +166,5 @@ def _update_node_state_if_necessary(ctx, is_transitional=False):
 
 def _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, status):
     workflow_context.logger.info(
-        "'{workflow_name}' workflow execution {status} before the cancel request"
-        "was fully processed".format(workflow_name=workflow_context.workflow_name, status=status))
+        u"'{workflow_name}' workflow execution {status} before the cancel request"
+        u"was fully processed".format(workflow_name=workflow_context.workflow_name, status=status))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/core/graph_compiler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/graph_compiler.py b/aria/orchestrator/workflows/core/graph_compiler.py
index 81543d5..83fbfea 100644
--- a/aria/orchestrator/workflows/core/graph_compiler.py
+++ b/aria/orchestrator/workflows/core/graph_compiler.py
@@ -90,11 +90,11 @@ class GraphCompiler(object):
 
     @staticmethod
     def _start_graph_suffix(api_id):
-        return '{0}-Start'.format(api_id)
+        return u'{0}-Start'.format(api_id)
 
     @staticmethod
     def _end_graph_suffix(api_id):
-        return '{0}-End'.format(api_id)
+        return u'{0}-End'.format(api_id)
 
     @staticmethod
     def _get_non_dependent_tasks(execution):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 9eee1e1..1099091 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -24,7 +24,7 @@ from ... import modeling
 
 def _get_task_name(task):
     if isinstance(task.actor, modeling.model_bases.service_instance.RelationshipBase):
-        return '{source_node.name}->{target_node.name}'.format(
+        return u'{source_node.name}->{target_node.name}'.format(
             source_node=task.actor.source_node, target_node=task.actor.target_node)
     else:
         return task.actor.name
@@ -40,7 +40,7 @@ def _start_task_handler(ctx, **kwargs):
         suffix = 'has no implementation'
         logger = ctx.logger.debug
 
-    logger('{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
+    logger(u'{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
         name=_get_task_name(ctx.task), task=ctx.task, suffix=suffix))
 
 
@@ -48,38 +48,38 @@ def _start_task_handler(ctx, **kwargs):
 def _success_task_handler(ctx, **kwargs):
     if not ctx.task.function:
         return
-    ctx.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
+    ctx.logger.info(u'{name} {task.interface_name}.{task.operation_name} successful'
                     .format(name=_get_task_name(ctx.task), task=ctx.task))
 
 
 @events.on_failure_task_signal.connect
 def _failure_operation_handler(ctx, traceback, **kwargs):
     ctx.logger.error(
-        '{name} {task.interface_name}.{task.operation_name} failed'
+        u'{name} {task.interface_name}.{task.operation_name} failed'
         .format(name=_get_task_name(ctx.task), task=ctx.task), extra=dict(traceback=traceback)
     )
 
 
 @events.start_workflow_signal.connect
 def _start_workflow_handler(context, **kwargs):
-    context.logger.info("Starting '{ctx.workflow_name}' workflow execution".format(ctx=context))
+    context.logger.info(u"Starting '{ctx.workflow_name}' workflow execution".format(ctx=context))
 
 
 @events.on_failure_workflow_signal.connect
 def _failure_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution failed".format(ctx=context))
+    context.logger.info(u"'{ctx.workflow_name}' workflow execution failed".format(ctx=context))
 
 
 @events.on_success_workflow_signal.connect
 def _success_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution succeeded".format(ctx=context))
+    context.logger.info(u"'{ctx.workflow_name}' workflow execution succeeded".format(ctx=context))
 
 
 @events.on_cancelled_workflow_signal.connect
 def _cancel_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution canceled".format(ctx=context))
+    context.logger.info(u"'{ctx.workflow_name}' workflow execution canceled".format(ctx=context))
 
 
 @events.on_cancelling_workflow_signal.connect
 def _cancelling_workflow_handler(context, **kwargs):
-    context.logger.info("Cancelling '{ctx.workflow_name}' workflow execution".format(ctx=context))
+    context.logger.info(u"Cancelling '{ctx.workflow_name}' workflow execution".format(ctx=context))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/exceptions.py b/aria/orchestrator/workflows/exceptions.py
index 2a1d6b1..6fce81c 100644
--- a/aria/orchestrator/workflows/exceptions.py
+++ b/aria/orchestrator/workflows/exceptions.py
@@ -55,10 +55,10 @@ class ProcessException(ExecutorException):
         Describes the error in detail
         """
         return (
-            'Command "{error.command}" executed with an error.{0}'
-            'code: {error.return_code}{0}'
-            'error: {error.stderr}{0}'
-            'output: {error.stdout}'.format(os.linesep, error=self))
+            u'Command "{error.command}" executed with an error.{0}'
+            u'code: {error.return_code}{0}'
+            u'error: {error.stderr}{0}'
+            u'output: {error.stdout}'.format(os.linesep, error=self))
 
 
 class AriaEngineError(exceptions.AriaError):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index a2b3513..aab84ec 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -89,7 +89,7 @@ class CeleryExecutor(BaseExecutor):
             exception = async_result.result
         except BaseException as e:
             exception = RuntimeError(
-                'Could not de-serialize exception of task {0} --> {1}: {2}'
+                u'Could not de-serialize exception of task {0} --> {1}: {2}'
                 .format(task.name, type(e).__name__, str(e)))
         self._task_failed(task, exception=exception)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index 9314e5d..bdb0eaf 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -22,7 +22,7 @@ from datetime import datetime
 from . import base
 
 
-class DryExecutor(base.BaseExecutor):                                                                    # pylint: disable=abstract-method
+class DryExecutor(base.BaseExecutor):                                                               # pylint: disable=abstract-method
     """
     Dry task executor: prints task information without causing any side effects.
     """
@@ -33,11 +33,11 @@ class DryExecutor(base.BaseExecutor):
             ctx.task.started_at = datetime.utcnow()
             ctx.task.status = ctx.task.STARTED
 
-            dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
+            dry_msg = u'<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
             logger = ctx.logger.info if ctx.task.function else ctx.logger.debug
 
             if hasattr(ctx.task.actor, 'source_node'):
-                name = '{source_node.name}->{target_node.name}'.format(
+                name = u'{source_node.name}->{target_node.name}'.format(
                     source_node=ctx.task.actor.source_node, target_node=ctx.task.actor.target_node)
             else:
                 name = ctx.task.actor.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 185f15f..4143127 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -23,8 +23,8 @@ import os
 import sys
 
 # As part of the process executor implementation, subprocess are started with this module as their
-# entry point. We thus remove this module's directory from the python path if it happens to be
-# there
+# entry point. We thus remove this module's directory from the Python path if it happens to be
+# there.
 
 from collections import namedtuple
 
@@ -201,11 +201,11 @@ class ProcessExecutor(base.BaseExecutor):
                         break
                     request_handler = self._request_handlers.get(request_type)
                     if not request_handler:
-                        raise RuntimeError('Invalid request type: {0}'.format(request_type))
+                        raise RuntimeError(u'Invalid request type: {0}'.format(request_type))
                     task_id = request['task_id']
                     request_handler(task_id=task_id, request=request, response=response)
             except BaseException as e:
-                self.logger.debug('Error in process executor listener: {0}'.format(e))
+                self.logger.debug(u'Error in process executor listener: {0}'.format(e))
 
     @contextlib.contextmanager
     def _accept_request(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 170620e..5786a04 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -43,7 +43,7 @@ class ThreadExecutor(BaseExecutor):
         self._queue = Queue.Queue()
         self._pool = []
         for i in range(pool_size):
-            name = 'ThreadExecutor-{index}'.format(index=i+1)
+            name = 'ThreadExecutor-{0:d}'.format(i+1)
             thread = threading.Thread(target=self._processor, name=name)
             thread.daemon = True
             thread.start()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/consumption/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/context.py b/aria/parser/consumption/context.py
index 9164984..91807a4 100644
--- a/aria/parser/consumption/context.py
+++ b/aria/parser/consumption/context.py
@@ -83,14 +83,14 @@ class ConsumptionContext(object):
         try:
             self.out.write(string)
         except UnicodeEncodeError:
-            self.out.write(string.encode('utf8'))
+            self.out.write(string.encode('utf-8'))
 
     def has_arg_switch(self, name):
-        name = '--%s' % name
+        name = '--{0}'.format(name)
         return name in self.args
 
     def get_arg_value(self, name, default=None):
-        name = '--%s=' % name
+        name = '--{0}='.format(name)
         for arg in self.args:
             if arg.startswith(name):
                 return arg[len(name):]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/consumption/inputs.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/inputs.py b/aria/parser/consumption/inputs.py
index fe7e192..55f6e92 100644
--- a/aria/parser/consumption/inputs.py
+++ b/aria/parser/consumption/inputs.py
@@ -46,7 +46,7 @@ class Inputs(Consumer):
 
         if not isinstance(inputs, dict):
             self.context.validation.report(
-                'Inputs consumer: inputs are not a dict: %s' % safe_repr(inputs))
+                u'Inputs consumer: inputs are not a dict: {0}'.format(safe_repr(inputs)))
             return
 
         for name, value in inputs.iteritems():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/consumption/presentation.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/presentation.py b/aria/parser/consumption/presentation.py
index 542b3f0..5f09c4d 100644
--- a/aria/parser/consumption/presentation.py
+++ b/aria/parser/consumption/presentation.py
@@ -13,25 +13,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-from ...utils.threading import FixedThreadPoolExecutor
-from ...utils.formatting import json_dumps, yaml_dumps
+from ...utils.formatting import (json_dumps, yaml_dumps)
 from ..loading import UriLocation
-from ..reading import AlreadyReadException
 from ..presentation import PresenterNotFoundError
 from .consumer import Consumer
 
 
+PRESENTATION_CACHE = {}
+CANONICAL_LOCATION_CACHE = {}
+
+
 class Read(Consumer):
     """
     Reads the presentation, handling imports recursively.
 
-    It works by consuming a data source via appropriate :class:`~aria.parser.loading.Loader`,
+    It works by consuming a data source via appropriate :class:`~aria.parser.loading.Loader`Entry,
     :class:`~aria.parser.reading.Reader`, and :class:`~aria.parser.presentation.Presenter`
     instances.
 
     It supports agnostic raw data composition for presenters that have
-    ``_get_import_locations`` and ``_merge_import``.
+    ``_get_import_cache`` and ``_merge_import``.
 
     To improve performance, loaders are called asynchronously on separate threads.
 
@@ -39,39 +40,25 @@ class Read(Consumer):
     cycle, for example if the agnostic raw data has dependencies that must also be parsed.
     """
 
-    def consume(self):
-        if self.context.presentation.location is None:
-            self.context.validation.report('Presentation consumer: missing location')
-            return
-
-        presenter = None
-        imported_presentations = None
+    def __init__(self, context):
+        super(Read, self).__init__(context)
+        self._cache = {}
 
-        executor = FixedThreadPoolExecutor(size=self.context.presentation.threads,
-                                           timeout=self.context.presentation.timeout)
-        executor.print_exceptions = self.context.presentation.print_exceptions
-        try:
-            presenter = self._present(self.context.presentation.location, None, None, executor)
-            executor.drain()
-
-            # Handle exceptions
-            for e in executor.exceptions:
-                self._handle_exception(e)
+    def consume(self):
+        # Present the main location and all imports recursively
+        main, results = self._present_all()
 
-            imported_presentations = executor.returns
-        finally:
-            executor.close()
+        # Merge presentations
+        main.merge(results, self.context)
 
-        # Merge imports
-        if (imported_presentations is not None) and hasattr(presenter, '_merge_import'):
-            for imported_presentation in imported_presentations:
-                okay = True
-                if hasattr(presenter, '_validate_import'):
-                    okay = presenter._validate_import(self.context, imported_presentation)
-                if okay:
-                    presenter._merge_import(imported_presentation)
+        # Cache merged presentations
+        if self.context.presentation.cache:
+            for result in results:
+                result.cache()
 
-        self.context.presentation.presenter = presenter
+        self.context.presentation.presenter = main.presentation
+        if main.canonical_location is not None:
+            self.context.presentation.location = main.canonical_location
 
     def dump(self):
         if self.context.has_arg_switch('yaml'):
@@ -86,52 +73,193 @@ class Read(Consumer):
             self.context.presentation.presenter._dump(self.context)
 
     def _handle_exception(self, e):
-        if isinstance(e, AlreadyReadException):
+        if isinstance(e, _Skip):
             return
         super(Read, self)._handle_exception(e)
 
-    def _present(self, location, origin_location, presenter_class, executor):
+    def _present_all(self):
+        location = self.context.presentation.location
+
+        if location is None:
+            self.context.validation.report('Read consumer: missing location')
+            return
+
+        executor = self.context.presentation.create_executor()
+        try:
+            # This call may recursively submit tasks to the executor if there are imports
+            main = self._present(location, None, None, executor)
+
+            # Wait for all tasks to complete
+            executor.drain()
+
+            # Handle exceptions
+            for e in executor.exceptions:
+                self._handle_exception(e)
+
+            results = executor.returns or []
+        finally:
+            executor.close()
+
+        results.insert(0, main)
+
+        return main, results
+
+    def _present(self, location, origin_canonical_location, origin_presenter_class, executor):
         # Link the context to this thread
         self.context.set_thread_local()
 
-        raw = self._read(location, origin_location)
+        # Canonicalize the location
+        if self.context.reading.reader is None:
+            loader, canonical_location = self._create_loader(location, origin_canonical_location)
+        else:
+            # If a reader is specified in the context then we skip loading
+            loader = None
+            canonical_location = location
+
+        # Skip self imports
+        if canonical_location == origin_canonical_location:
+            raise _Skip()
+
+        if self.context.presentation.cache:
+            # Is the presentation in the global cache?
+            try:
+                presentation = PRESENTATION_CACHE[canonical_location]
+                return _Result(presentation, canonical_location, origin_canonical_location)
+            except KeyError:
+                pass
+
+        try:
+            # Is the presentation in the local cache?
+            presentation = self._cache[canonical_location]
+            return _Result(presentation, canonical_location, origin_canonical_location)
+        except KeyError:
+            pass
+
+        # Create and cache new presentation
+        presentation = self._create_presentation(canonical_location, loader, origin_presenter_class)
+        self._cache[canonical_location] = presentation
 
+        # Submit imports to executor
+        if hasattr(presentation, '_get_import_locations'):
+            import_locations = presentation._get_import_locations(self.context)
+            if import_locations:
+                for import_location in import_locations:
+                    import_location = UriLocation(import_location)
+                    executor.submit(self._present, import_location, canonical_location,
+                                    presentation.__class__, executor)
+
+        return _Result(presentation, canonical_location, origin_canonical_location)
+
+    def _create_loader(self, location, origin_canonical_location):
+        loader = self.context.loading.loader_source.get_loader(self.context.loading, location,
+                                                               origin_canonical_location)
+
+        canonical_location = None
+
+        if origin_canonical_location is not None:
+            cache_key = (origin_canonical_location, location)
+            try:
+                canonical_location = CANONICAL_LOCATION_CACHE[cache_key]
+                return loader, canonical_location
+            except KeyError:
+                pass
+        else:
+            cache_key = None
+
+        canonical_location = loader.get_canonical_location()
+
+        # Because retrieving the canonical location can be costly, we will try to cache it
+        if cache_key is not None:
+            CANONICAL_LOCATION_CACHE[cache_key] = canonical_location
+
+        return loader, canonical_location
+
+    def _create_presentation(self, canonical_location, loader, default_presenter_class):
+        # The reader we specified in the context will override
+        reader = self.context.reading.reader
+
+        if reader is None:
+            # Read raw data from loader
+            reader = self.context.reading.reader_source.get_reader(self.context.reading,
+                                                                   canonical_location, loader)
+
+        raw = reader.read()
+
+        # Wrap raw data in presenter class
         if self.context.presentation.presenter_class is not None:
-            # The presenter class we specified in the context overrides everything
+            # The presenter class we specified in the context will override
             presenter_class = self.context.presentation.presenter_class
         else:
             try:
                 presenter_class = self.context.presentation.presenter_source.get_presenter(raw)
             except PresenterNotFoundError:
-                if presenter_class is None:
+                if default_presenter_class is None:
                     raise
-            # We'll use the presenter class we were given (from the presenter that imported us)
-            if presenter_class is None:
-                raise PresenterNotFoundError('presenter not found')
+                else:
+                    presenter_class = default_presenter_class
+
+        if presenter_class is None:
+            raise PresenterNotFoundError(u'presenter not found: {0}'.format(canonical_location))
 
         presentation = presenter_class(raw=raw)
 
-        if presentation is not None and hasattr(presentation, '_link_locators'):
+        if hasattr(presentation, '_link_locators'):
             presentation._link_locators()
 
-        # Submit imports to executor
-        if hasattr(presentation, '_get_import_locations'):
-            import_locations = presentation._get_import_locations(self.context)
-            if import_locations:
-                for import_location in import_locations:
-                    # The imports inherit the parent presenter class and use the current location as
-                    # their origin location
-                    import_location = UriLocation(import_location)
-                    executor.submit(self._present, import_location, location, presenter_class,
-                                    executor)
-
         return presentation
 
-    def _read(self, location, origin_location):
-        if self.context.reading.reader is not None:
-            return self.context.reading.reader.read()
-        loader = self.context.loading.loader_source.get_loader(self.context.loading, location,
-                                                               origin_location)
-        reader = self.context.reading.reader_source.get_reader(self.context.reading, location,
-                                                               loader)
-        return reader.read()
+
+class _Result(object):
+    def __init__(self, presentation, canonical_location, origin_canonical_location):
+        self.presentation = presentation
+        self.canonical_location = canonical_location
+        self.origin_canonical_location = origin_canonical_location
+        self.merged = False
+
+    def get_imports(self, results):
+        imports = []
+
+        def has_import(result):
+            for i in imports:
+                if i.canonical_location == result.canonical_location:
+                    return True
+            return False
+
+        for result in results:
+            if result.origin_canonical_location == self.canonical_location:
+                if not has_import(result):
+                    imports.append(result)
+        return imports
+
+    def merge(self, results, context):
+        # Make sure to only merge each presentation once
+        if self.merged:
+            return
+        self.merged = True
+        for result in results:
+            if result.presentation == self.presentation:
+                result.merged = True
+
+        for result in self.get_imports(results):
+            # Make sure import is merged
+            result.merge(results, context)
+
+            # Validate import
+            if hasattr(self.presentation, '_validate_import'):
+                if not self.presentation._validate_import(context, result.presentation):
+                    # _validate_import will report an issue if invalid
+                    continue
+
+            # Merge import
+            if hasattr(self.presentation, '_merge_import'):
+                self.presentation._merge_import(result.presentation)
+
+    def cache(self):
+        if not self.merged:
+            raise RuntimeError(u'Only merged presentations can be cached: {0}'
+                               .format(self.canonical_location))
+        PRESENTATION_CACHE[self.canonical_location] = self.presentation
+
+
+class _Skip(Exception):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/consumption/validation.py
----------------------------------------------------------------------
diff --git a/aria/parser/consumption/validation.py b/aria/parser/consumption/validation.py
index a7bc3b8..dd145ce 100644
--- a/aria/parser/consumption/validation.py
+++ b/aria/parser/consumption/validation.py
@@ -24,7 +24,7 @@ class Validate(Consumer):
 
     def consume(self):
         if self.context.presentation.presenter is None:
-            self.context.validation.report('Validation consumer: missing presenter')
+            self.context.validation.report('Validate consumer: missing presenter')
             return
 
         self.context.presentation.presenter._validate(self.context)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/loading/file.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/file.py b/aria/parser/loading/file.py
index a02bd69..f6fd284 100644
--- a/aria/parser/loading/file.py
+++ b/aria/parser/loading/file.py
@@ -38,27 +38,28 @@ class FileTextLoader(Loader):
             self._file = codecs.open(self.path, mode='r', encoding=self.encoding, buffering=1)
         except IOError as e:
             if e.errno == 2:
-                raise DocumentNotFoundException('file not found: "%s"' % self.path, cause=e)
+                raise DocumentNotFoundException(u'file not found: "{0}"'.format(self.path), cause=e)
             else:
-                raise LoaderException('file I/O error: "%s"' % self.path, cause=e)
+                raise LoaderException(u'file I/O error: "{0}"'.format(self.path), cause=e)
         except Exception as e:
-            raise LoaderException('file error: "%s"' % self.path, cause=e)
+            raise LoaderException(u'file error: "{0}"'.format(self.path), cause=e)
 
     def close(self):
         if self._file is not None:
             try:
                 self._file.close()
             except IOError as e:
-                raise LoaderException('file I/O error: "%s"' % self.path, cause=e)
+                raise LoaderException(u'file I/O error: "{0}"'.format(self.path), cause=e)
             except Exception as e:
-                raise LoaderException('file error: "%s"' % self.path, cause=e)
+                raise LoaderException(u'file error: "{0}"'.format(self.path), cause=e)
+            self._file = None
 
     def load(self):
         if self._file is not None:
             try:
                 return self._file.read()
             except IOError as e:
-                raise LoaderException('file I/O error: "%s"' % self.path, cause=e)
+                raise LoaderException(u'file I/O error: "{0}"'.format(self.path), cause=e)
             except Exception as e:
-                raise LoaderException('file error %s' % self.path, cause=e)
+                raise LoaderException(u'file error {0}'.format(self.path), cause=e)
         return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/loading/literal.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/literal.py b/aria/parser/loading/literal.py
index 7865008..208ab53 100644
--- a/aria/parser/loading/literal.py
+++ b/aria/parser/loading/literal.py
@@ -29,3 +29,6 @@ class LiteralLoader(Loader):
 
     def load(self):
         return self.location.content
+
+    def get_canonical_location(self):
+        return self.location

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/loading/loader.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/loader.py b/aria/parser/loading/loader.py
index e1abfbf..a1dc3c6 100644
--- a/aria/parser/loading/loader.py
+++ b/aria/parser/loading/loader.py
@@ -32,3 +32,6 @@ class Loader(object):
 
     def load(self):
         raise NotImplementedError
+
+    def get_canonical_location(self):                                                               # pylint: disable=no-self-use
+        return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/loading/location.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/location.py b/aria/parser/loading/location.py
index 902e856..255f650 100644
--- a/aria/parser/loading/location.py
+++ b/aria/parser/loading/location.py
@@ -27,9 +27,6 @@ class Location(object):
     an appropriate :class:`~aria.parser.loading.Loader`.
     """
 
-    def is_equivalent(self, location):
-        raise NotImplementedError
-
     @property
     def prefix(self):
         return None
@@ -47,15 +44,12 @@ class UriLocation(Location):
     def __init__(self, uri):
         self.uri = uri
 
-    def is_equivalent(self, location):
-        return isinstance(location, UriLocation) and (location.uri == self.uri)
-
     @property
     def prefix(self):
         prefix = os.path.dirname(self.uri)
         if prefix and (as_file(prefix) is None):
-            # Yes, it's weird, but dirname handles URIs,
-            # too: http://stackoverflow.com/a/35616478/849021
+            # Yes, it's weird, but dirname handles URIs, too:
+            # http://stackoverflow.com/a/35616478/849021
             # We just need to massage it with a trailing slash
             prefix += '/'
         return prefix
@@ -63,6 +57,12 @@ class UriLocation(Location):
     def __str__(self):
         return self.uri
 
+    def __eq__(self, other):
+        return isinstance(other, UriLocation) and (other.uri == self.uri)
+
+    def __hash__(self):
+        return hash(self.uri)
+
 
 class LiteralLocation(Location):
     """
@@ -75,8 +75,11 @@ class LiteralLocation(Location):
         self.content = content
         self.name = name
 
-    def is_equivalent(self, location):
-        return isinstance(location, LiteralLocation) and (location.content == self.content)
-
     def __str__(self):
-        return '<%s>' % self.name
+        return u'<{0}>'.format(self.name)
+
+    def __eq__(self, other):
+        return isinstance(other, LiteralLocation) and (other.content == self.content)
+
+    def __hash__(self):
+        return hash(self.content)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/loading/request.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/request.py b/aria/parser/loading/request.py
index a809347..5f12055 100644
--- a/aria/parser/loading/request.py
+++ b/aria/parser/loading/request.py
@@ -57,19 +57,19 @@ class RequestLoader(Loader):
         try:
             self._response = SESSION.get(self.uri, headers=self.headers)
         except InvalidSchema as e:
-            raise DocumentNotFoundException('document not found: "%s"' % self.uri, cause=e)
+            raise DocumentNotFoundException(u'document not found: "{0}"'.format(self.uri), cause=e)
         except ConnectionError as e:
-            raise LoaderException('request connection error: "%s"' % self.uri, cause=e)
+            raise LoaderException(u'request connection error: "{0}"'.format(self.uri), cause=e)
         except Exception as e:
-            raise LoaderException('request error: "%s"' % self.uri, cause=e)
+            raise LoaderException(u'request error: "{0}"'.format(self.uri), cause=e)
 
         status = self._response.status_code
         if status == 404:
             self._response = None
-            raise DocumentNotFoundException('document not found: "%s"' % self.uri)
+            raise DocumentNotFoundException(u'document not found: "{0}"'.format(self.uri))
         elif status != 200:
             self._response = None
-            raise LoaderException('request error %d: "%s"' % (status, self.uri))
+            raise LoaderException(u'request error {0:d}: "{1}"'.format(status, self.uri))
 
 
 class RequestTextLoader(RequestLoader):
@@ -81,8 +81,8 @@ class RequestTextLoader(RequestLoader):
         if self._response is not None:
             try:
                 if self._response.encoding is None:
-                    self._response.encoding = 'utf8'
+                    self._response.encoding = 'utf-8'
                 return self._response.text
             except Exception as e:
-                raise LoaderException('request error: %s' % self.uri, cause=e)
+                raise LoaderException(u'request error: {0}'.format(self.uri), cause=e)
         return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/loading/uri.py
----------------------------------------------------------------------
diff --git a/aria/parser/loading/uri.py b/aria/parser/loading/uri.py
index a5a18e6..83f5ced 100644
--- a/aria/parser/loading/uri.py
+++ b/aria/parser/loading/uri.py
@@ -20,6 +20,7 @@ from ...extension import parser
 from ...utils.collections import StrictList
 from ...utils.uris import as_file
 from .loader import Loader
+from .location import UriLocation
 from .file import FileTextLoader
 from .request import RequestTextLoader
 from .exceptions import DocumentNotFoundException
@@ -44,6 +45,7 @@ class UriTextLoader(Loader):
         self.location = location
         self._prefixes = StrictList(value_class=basestring)
         self._loader = None
+        self._canonical_location = None
 
         def add_prefix(prefix):
             if prefix and (prefix not in self._prefixes):
@@ -60,23 +62,27 @@ class UriTextLoader(Loader):
         add_prefixes(parser.uri_loader_prefix())
 
     def open(self):
-        try:
-            self._open(self.location.uri)
-            return
-        except DocumentNotFoundException:
-            # Try prefixes in order
-            for prefix in self._prefixes:
-                prefix_as_file = as_file(prefix)
-                if prefix_as_file is not None:
-                    uri = os.path.join(prefix_as_file, self.location.uri)
-                else:
-                    uri = urljoin(prefix, self.location.uri)
-                try:
-                    self._open(uri)
-                    return
-                except DocumentNotFoundException:
-                    pass
-        raise DocumentNotFoundException('document not found at URI: "%s"' % self.location)
+        if self._loader is not None:
+            self._loader.open()
+        else:
+            try:
+                self._open(self.location.uri)
+                return
+            except DocumentNotFoundException:
+                # Try prefixes in order
+                for prefix in self._prefixes:
+                    prefix_as_file = as_file(prefix)
+                    if prefix_as_file is not None:
+                        uri = os.path.join(prefix_as_file, self.location.uri)
+                    else:
+                        uri = urljoin(prefix, self.location.uri)
+                    try:
+                        self._open(uri)
+                        return
+                    except DocumentNotFoundException:
+                        pass
+            raise DocumentNotFoundException(u'document not found at URI: "{0}"'
+                                            .format(self.location))
 
     def close(self):
         if self._loader is not None:
@@ -85,6 +91,11 @@ class UriTextLoader(Loader):
     def load(self):
         return self._loader.load() if self._loader is not None else None
 
+    def get_canonical_location(self):
+        self.open()
+        self.close()
+        return self._canonical_location
+
     def _open(self, uri):
         the_file = as_file(uri)
         if the_file is not None:
@@ -94,4 +105,4 @@ class UriTextLoader(Loader):
             loader = RequestTextLoader(self.context, uri)
         loader.open() # might raise an exception
         self._loader = loader
-        self.location.uri = uri
+        self._canonical_location = UriLocation(uri)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/presentation/__init__.py
----------------------------------------------------------------------
diff --git a/aria/parser/presentation/__init__.py b/aria/parser/presentation/__init__.py
index 5633e7b..aa5439c 100644
--- a/aria/parser/presentation/__init__.py
+++ b/aria/parser/presentation/__init__.py
@@ -69,6 +69,7 @@ Field validators
 .. autosummary::
    :nosignatures:
 
+   aria.parser.presentation.not_negative_validator
    aria.parser.presentation.type_validator
    aria.parser.presentation.list_type_validator
    aria.parser.presentation.list_length_validator
@@ -93,19 +94,19 @@ Utilities
    aria.parser.presentation.report_issue_for_circular_type_hierarchy
 """
 
-from .exceptions import PresenterException, PresenterNotFoundError
+from .exceptions import (PresenterException, PresenterNotFoundError)
 from .context import PresentationContext
 from .presenter import Presenter
-from .presentation import Value, PresentationBase, Presentation, AsIsPresentation
-from .source import PresenterSource, DefaultPresenterSource
-from .null import NULL, none_to_null, null_to_none
+from .presentation import (Value, PresentationBase, Presentation, AsIsPresentation)
+from .source import (PresenterSource, DefaultPresenterSource)
+from .null import (NULL, none_to_null, null_to_none)
 from .fields import (Field, has_fields, short_form_field, allow_unknown_fields, primitive_field,
                      primitive_list_field, primitive_dict_field, primitive_dict_unknown_fields,
                      object_field, object_list_field, object_dict_field,
                      object_sequenced_list_field, object_dict_unknown_fields, field_getter,
                      field_setter, field_validator)
-from .field_validators import (type_validator, list_type_validator, list_length_validator,
-                               derived_from_validator)
+from .field_validators import (not_negative_validator, type_validator, list_type_validator,
+                               list_length_validator, derived_from_validator)
 from .utils import (get_locator, parse_types_dict_names, validate_primitive, validate_no_short_form,
                     validate_no_unknown_fields, validate_known_fields, get_parent_presentation,
                     report_issue_for_unknown_type, report_issue_for_unknown_parent_type,
@@ -141,6 +142,7 @@ __all__ = (
     'field_getter',
     'field_setter',
     'field_validator',
+    'not_negative_validator',
     'type_validator',
     'list_type_validator',
     'list_length_validator',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/presentation/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/presentation/context.py b/aria/parser/presentation/context.py
index 44a6f82..c12d6fd 100644
--- a/aria/parser/presentation/context.py
+++ b/aria/parser/presentation/context.py
@@ -15,6 +15,7 @@
 
 
 from .source import DefaultPresenterSource
+from ...utils.threading import (BlockingExecutor, FixedThreadPoolExecutor)
 
 
 class PresentationContext(object):
@@ -31,9 +32,13 @@ class PresentationContext(object):
     :vartype presenter_class: type
     :ivar import_profile: whether to import the profile by default (defaults to ``True``)
     :vartype import_profile: bool
-    :ivar threads: number of threads to use when reading data
+    :ivar validate_normative: whether to validate normative types (defaults to ``True``)
+    :vartype validate_normative: bool
+    :ivar cache: whether to cache presentations (defaults to ``True``)
+    :vartype cache: bool
+    :ivar threads: number of threads to use when reading data (defaults to 8)
     :vartype threads: int
-    :ivar timeout: timeout in seconds for loading data
+    :ivar timeout: timeout in seconds for loading data (defaults to 10)
     :vartype timeout: float
     :ivar print_exceptions: whether to print exceptions while reading data
     :vartype print_exceptions: bool
@@ -45,6 +50,8 @@ class PresentationContext(object):
         self.presenter_source = DefaultPresenterSource()
         self.presenter_class = None  # overrides
         self.import_profile = True
+        self.validate_normative = True
+        self.cache = True
         self.threads = 8  # reasonable default for networking multithreading
         self.timeout = 10  # in seconds
         self.print_exceptions = False
@@ -63,3 +70,12 @@ class PresentationContext(object):
         """
 
         return self.presenter._get_from_dict(*names) if self.presenter is not None else None
+
+    def create_executor(self):
+        if self.threads == 1:
+            # BlockingExecutor is much faster for the single-threaded case
+            return BlockingExecutor(print_exceptions=self.print_exceptions)
+
+        return FixedThreadPoolExecutor(size=self.threads,
+                                       timeout=self.timeout,
+                                       print_exceptions=self.print_exceptions)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/presentation/field_validators.py
----------------------------------------------------------------------
diff --git a/aria/parser/presentation/field_validators.py b/aria/parser/presentation/field_validators.py
index aa04913..bd7bfdd 100644
--- a/aria/parser/presentation/field_validators.py
+++ b/aria/parser/presentation/field_validators.py
@@ -14,12 +14,29 @@
 # limitations under the License.
 
 
+from ...utils.formatting import safe_repr
 from ..validation import Issue
 from .utils import (parse_types_dict_names, report_issue_for_unknown_type,
                     report_issue_for_parent_is_self, report_issue_for_unknown_parent_type,
                     report_issue_for_circular_type_hierarchy)
 
 
+def not_negative_validator(field, presentation, context):
+    """
+    Makes sure that the field is not negative.
+
+    Can be used with the :func:`field_validator` decorator.
+    """
+
+    field.default_validate(presentation, context)
+    value = getattr(presentation, field.name)
+    if (value is not None) and (value < 0):
+        context.validation.report(u'field "{0}" is negative: {1}'
+                                  .format(field.name, safe_repr(value)),
+                                  locator=presentation._get_child_locator(field.name),
+                                  level=Issue.FIELD)
+
+
 def type_validator(type_name, *types_dict_names):
     """
     Makes sure that the field refers to an existing type defined in the root presenter.
@@ -101,8 +118,10 @@ def list_length_validator(length):
         values = getattr(presentation, field.name)
         if isinstance(values, list):
             if len(values) != length:
-                context.validation.report('field "%s" does not have exactly %d elements in "%s"'
-                                          % (field.name, length, presentation._fullname),
+                context.validation.report(u'field "{0}" does not have exactly {1:d} elements in '
+                                          u'"{2}": {3}'
+                                          .format(field.name, length, presentation._fullname,
+                                                  safe_repr(values)),
                                           locator=presentation._get_child_locator(field.name),
                                           level=Issue.FIELD)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/d233b238/aria/parser/presentation/fields.py
----------------------------------------------------------------------
diff --git a/aria/parser/presentation/fields.py b/aria/parser/presentation/fields.py
index 5c3e074..27fb236 100644
--- a/aria/parser/presentation/fields.py
+++ b/aria/parser/presentation/fields.py
@@ -338,7 +338,7 @@ def has_fields_getitem(self, key):
     if not isinstance(key, basestring):
         raise TypeError('key must be a string')
     if key not in self.__class__.FIELDS:
-        raise KeyError('no \'%s\' property' % key)
+        raise KeyError('no \'{0}\' property'.format(key))
     return getattr(self, key)
 
 
@@ -346,7 +346,7 @@ def has_fields_setitem(self, key, value):
     if not isinstance(key, basestring):
         raise TypeError('key must be a string')
     if key not in self.__class__.FIELDS:
-        raise KeyError('no \'%s\' property' % key)
+        raise KeyError('no \'{0}\' property'.format(key))
     return setattr(self, key, value)
 
 
@@ -354,7 +354,7 @@ def has_fields_delitem(self, key):
     if not isinstance(key, basestring):
         raise TypeError('key must be a string')
     if key not in self.__class__.FIELDS:
-        raise KeyError('no \'%s\' property' % key)
+        raise KeyError('no \'{0}\' property'.format(key))
     return setattr(self, key, None)
 
 
@@ -389,7 +389,7 @@ class Field(object):
 
     @property
     def full_name(self):
-        return 'field "%s" in "%s"' % (self.name, full_type_name(self.container_cls))
+        return u'field "{0}" in "{1}"'.format(self.name, full_type_name(self.container_cls))
 
     @property
     def full_cls_name(self):
@@ -420,7 +420,7 @@ class Field(object):
         if value is None:
             return
 
-        dumper = getattr(self, '_dump_%s' % self.field_variant)
+        dumper = getattr(self, '_dump_{0}'.format(self.field_variant))
         dumper(context, value)
 
     def default_get(self, presentation, context):
@@ -457,8 +457,8 @@ class Field(object):
 
         if value is None:
             if self.required:
-                raise InvalidValueError('required %s does not have a value' % self.full_name,
-                                        locator=self.get_locator(raw))
+                raise InvalidValueError(u'required {0} does not have a value'
+                                        .format(self.full_name, locator=self.get_locator(raw)))
             else:
                 return None
 
@@ -466,21 +466,20 @@ class Field(object):
 
         if self.allowed is not None:
             if value not in self.allowed:
-                raise InvalidValueError('%s is not %s'
-                                        % (self.full_name, ' or '.join([safe_repr(v)
-                                                                        for v in self.allowed])),
+                raise InvalidValueError(u'{0} is not {1}'
+                                        .format(self.full_name,
+                                                u' or '.join([safe_repr(v) for v in self.allowed])),
                                         locator=self.get_locator(raw))
 
         # Handle get according to variant
 
-        getter = getattr(self, '_get_{field_variant}'.format(field_variant=self.field_variant),
-                         None)
+        getter = getattr(self, '_get_{0}'.format(self.field_variant), None)
 
         if getter is None:
             locator = self.get_locator(raw)
-            location = (' @%s' % locator) if locator is not None else ''
-            raise AttributeError('%s has unsupported field variant: "%s"%s'
-                                 % (self.full_name, self.field_variant, location))
+            location = (u' @{0}'.format(locator)) if locator is not None else ''
+            raise AttributeError(u'{0} has unsupported field variant: "{1}"{2}'
+                                 .format(self.full_name, self.field_variant, location))
 
         return getter(presentation, raw, value, context)
 
@@ -489,6 +488,9 @@ class Field(object):
         if is_short_form_field and not is_dict:
             # Handle short form
             value = raw
+            if value is None:
+                # An explicit null
+                value = NULL
         elif is_dict:
             if self.name in raw:
                 value = raw[self.name]
@@ -559,26 +561,28 @@ class Field(object):
     # primitive
 
     def _get_primitive(self, presentation, raw, value, context):
-        if (self.cls is not None and not isinstance(value, self.cls)
-                and value is not None and value is not NULL):
+        if (self.cls is not None) and (not isinstance(value, self.cls)) \
+                and (value is not None):
             try:
                 return self._coerce_primitive(value, context)
             except ValueError as e:
-                raise InvalidValueError('%s is not a valid "%s": %s' %
-                                        (self.full_name, self.full_cls_name, safe_repr(value)),
+                raise InvalidValueError(u'{0} is not a valid "{1}": {2}'
+                                        .format(self.full_name, self.full_cls_name,
+                                                safe_repr(value)),
                                         locator=self.get_locator(raw), cause=e)
         return value
 
     def _dump_primitive(self, context, value):
         if hasattr(value, 'as_raw'):
             value = as_raw(value)
-        puts('%s: %s' % (self.name, context.style.literal_style(value)))
+        puts(u'{0}: {1}'.format(self.name, context.style.literal_style(value)))
 
     # primitive list
 
     def _get_primitive_list(self, presentation, raw, value, context):
         if not isinstance(value, list):
-            raise InvalidValueError('%s is not a list: %s' % (self.full_name, safe_repr(value)),
+            raise InvalidValueError(u'{0} is not a list: {1}'
+                                    .format(self.full_name, safe_repr(value)),
                                     locator=self.get_locator(raw))
         primitive_list = value
         if self.cls is not None:
@@ -587,26 +591,28 @@ class Field(object):
             primitive_list = []
             for i, _ in enumerate(value):
                 primitive = value[i]
+                if primitive is None:
+                    primitive = NULL
                 try:
                     primitive = self._coerce_primitive(primitive, context)
                 except ValueError as e:
-                    raise InvalidValueError('%s is not a list of "%s": element %d is %s'
-                                            % (self.full_name,
-                                               self.full_cls_name,
-                                               i,
-                                               safe_repr(primitive)),
+                    raise InvalidValueError(u'{0} is not a list of "{1}": element {2:d} is {3}'
+                                            .format(self.full_name,
+                                                    self.full_cls_name,
+                                                    i,
+                                                    safe_repr(primitive)),
                                             locator=self.get_locator(raw), cause=e)
                 if primitive in primitive_list:
-                    raise InvalidValueError('%s has a duplicate "%s": %s'
-                                            % (self.full_name,
-                                               self.full_cls_name,
-                                               safe_repr(primitive)),
+                    raise InvalidValueError(u'{0} has a duplicate "{1}": {2}'
+                                            .format(self.full_name,
+                                                    self.full_cls_name,
+                                                    safe_repr(primitive)),
                                             locator=self.get_locator(raw))
                 primitive_list.append(primitive)
         return FrozenList(primitive_list)
 
     def _dump_primitive_list(self, context, value):
-        puts('%s:' % self.name)
+        puts(u'{0}:'.format(self.name))
         with context.style.indent():
             for primitive in value:
                 if hasattr(primitive, 'as_raw'):
@@ -617,7 +623,8 @@ class Field(object):
 
     def _get_primitive_dict(self, presentation, raw, value, context):
         if not isinstance(value, dict):
-            raise InvalidValueError('%s is not a dict: %s' % (self.full_name, safe_repr(value)),
+            raise InvalidValueError(u'{0} is not a dict: {1}'
+                                    .format(self.full_name, safe_repr(value)),
                                     locator=self.get_locator(raw))
         primitive_dict = value
         if self.cls is not None:
@@ -625,17 +632,21 @@ class Field(object):
                 context = Field._get_context()
             primitive_dict = OrderedDict()
             for k, v in value.iteritems():
+                if v is None:
+                    v = NULL
                 try:
                     primitive_dict[k] = self._coerce_primitive(v, context)
                 except ValueError as e:
-                    raise InvalidValueError('%s is not a dict of "%s" values: entry "%d" is %s'
-                                            % (self.full_name, self.full_cls_name, k, safe_repr(v)),
+                    raise InvalidValueError(u'{0} is not a dict of "{1}" values: entry "{2:d}" '
+                                            u'is {3}'
+                                            .format(self.full_name, self.full_cls_name, k,
+                                                    safe_repr(v)),
                                             locator=self.get_locator(raw),
                                             cause=e)
         return FrozenDict(primitive_dict)
 
     def _dump_primitive_dict(self, context, value):
-        puts('%s:' % self.name)
+        puts(u'{0}:'.format(self.name))
         with context.style.indent():
             for v in value.itervalues():
                 if hasattr(v, 'as_raw'):
@@ -648,13 +659,13 @@ class Field(object):
         try:
             return self.cls(name=self.name, raw=value, container=presentation)
         except TypeError as e:
-            raise InvalidValueError('%s cannot not be initialized to an instance of "%s": %s'
-                                    % (self.full_name, self.full_cls_name, safe_repr(value)),
+            raise InvalidValueError(u'{0} cannot not be initialized to an instance of "{1}": {2}'
+                                    .format(self.full_name, self.full_cls_name, safe_repr(value)),
                                     cause=e,
                                     locator=self.get_locator(raw))
 
     def _dump_object(self, context, value):
-        puts('%s:' % self.name)
+        puts(u'{0}:'.format(self.name))
         with context.style.indent():
             if hasattr(value, '_dump'):
                 value._dump(context)
@@ -663,13 +674,13 @@ class Field(object):
 
     def _get_object_list(self, presentation, raw, value, context):
         if not isinstance(value, list):
-            raise InvalidValueError('%s is not a list: %s'
-                                    % (self.full_name, safe_repr(value)),
+            raise InvalidValueError(u'{0} is not a list: {1}'
+                                    .format(self.full_name, safe_repr(value)),
                                     locator=self.get_locator(raw))
         return FrozenList((self.cls(name=self.name, raw=v, container=presentation) for v in value))
 
     def _dump_object_list(self, context, value):
-        puts('%s:' % self.name)
+        puts(u'{0}:'.format(self.name))
         with context.style.indent():
             for v in value:
                 if hasattr(v, '_dump'):
@@ -679,13 +690,14 @@ class Field(object):
 
     def _get_object_dict(self, presentation, raw, value, context):
         if not isinstance(value, dict):
-            raise InvalidValueError('%s is not a dict: %s' % (self.full_name, safe_repr(value)),
+            raise InvalidValueError(u'{0} is not a dict: {1}'
+                                    .format(self.full_name, safe_repr(value)),
                                     locator=self.get_locator(raw))
         return FrozenDict(((k, self.cls(name=k, raw=v, container=presentation))
                            for k, v in value.iteritems()))
 
     def _dump_object_dict(self, context, value):
-        puts('%s:' % self.name)
+        puts(u'{0}:'.format(self.name))
         with context.style.indent():
             for v in value.itervalues():
                 if hasattr(v, '_dump'):
@@ -695,26 +707,27 @@ class Field(object):
 
     def _get_sequenced_object_list(self, presentation, raw, value, context):
         if not isinstance(value, list):
-            raise InvalidValueError('%s is not a sequenced list (a list of dicts, '
-                                    'each with exactly one key): %s'
-                                    % (self.full_name, safe_repr(value)),
+            raise InvalidValueError(u'{0} is not a sequenced list (a list of dicts, '
+                                    u'each with exactly one key): {1}'
+                                    .format(self.full_name, safe_repr(value)),
                                     locator=self.get_locator(raw))
         sequence = []
         for v in value:
             if not isinstance(v, dict):
-                raise InvalidValueError('%s list elements are not all dicts with '
-                                        'exactly one key: %s' % (self.full_name, safe_repr(value)),
+                raise InvalidValueError(u'{0} list elements are not all dicts with '
+                                        u'exactly one key: {1}'
+                                        .format(self.full_name, safe_repr(value)),
                                         locator=self.get_locator(raw))
             if len(v) != 1:
-                raise InvalidValueError('%s list elements do not all have exactly one key: %s'
-                                        % (self.full_name, safe_repr(value)),
+                raise InvalidValueError(u'{0} list elements do not all have exactly one key: {1}'
+                                        .format(self.full_name, safe_repr(value)),
                                         locator=self.get_locator(raw))
             key, value = v.items()[0]
             sequence.append((key, self.cls(name=key, raw=value, container=presentation)))
         return FrozenList(sequence)
 
     def _dump_sequenced_object_list(self, context, value):
-        puts('%s:' % self.name)
+        puts(u'{0}:'.format(self.name))
         for _, v in value:
             if hasattr(v, '_dump'):
                 v._dump(context)
@@ -730,13 +743,15 @@ class Field(object):
                 primitive_dict = OrderedDict()
                 for k, v in raw.iteritems():
                     if k not in presentation.FIELDS:
+                        if v is None:
+                            v = NULL
                         try:
                             primitive_dict[k] = self._coerce_primitive(v, context)
                         except ValueError as e:
-                            raise InvalidValueError('%s is not a dict of "%s" values:'
-                                                    ' entry "%d" is %s'
-                                                    % (self.full_name, self.full_cls_name,
-                                                       k, safe_repr(v)),
+                            raise InvalidValueError(u'{0} is not a dict of "{1}" values:'
+                                                    u' entry "{2}" is {3}'
+                                                    .format(self.full_name, self.full_cls_name,
+                                                            k, safe_repr(v)),
                                                     locator=self.get_locator(raw),
                                                     cause=e)
             return FrozenDict(primitive_dict)