ariatosca-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From emblempar...@apache.org
Subject [2/2] incubator-ariatosca git commit: Fixes to presentation caching.
Date Mon, 25 Sep 2017 21:57:52 GMT
Fixes to presentation caching.

Remove /tests/parser and move into /tests/topology and
/tests/extensions.

Various cleanups.


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/fa116d1d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/fa116d1d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/fa116d1d

Branch: refs/heads/ARIA-1-parser-test-suite
Commit: fa116d1d6ba34a02581c646ba3d69779468bf256
Parents: 8fbde87
Author: Tal Liron <tal.liron@gmail.com>
Authored: Mon Sep 25 16:57:06 2017 -0500
Committer: Tal Liron <tal.liron@gmail.com>
Committed: Mon Sep 25 16:57:06 2017 -0500

----------------------------------------------------------------------
 aria/__init__.py                                |   4 +-
 aria/cli/commands/services.py                   |   2 +-
 aria/cli/utils.py                               |   4 +-
 aria/modeling/functions.py                      |   2 +-
 aria/modeling/mixins.py                         |   4 +-
 aria/modeling/orchestration.py                  |   2 +-
 aria/modeling/service_common.py                 |   2 +-
 aria/modeling/service_instance.py               |   2 +-
 aria/modeling/service_template.py               |   4 +-
 aria/modeling/utils.py                          |   2 +-
 aria/orchestrator/context/common.py             |   6 +-
 aria/orchestrator/context/operation.py          |  12 +-
 aria/orchestrator/context/workflow.py           |   4 +-
 aria/orchestrator/decorators.py                 |   2 +-
 aria/orchestrator/execution_plugin/common.py    |  12 +-
 .../execution_plugin/ctx_proxy/client.py        |   4 +-
 .../execution_plugin/ctx_proxy/server.py        |  10 +-
 .../execution_plugin/instantiation.py           |  20 +--
 aria/orchestrator/execution_plugin/local.py     |   4 +-
 .../execution_plugin/ssh/operations.py          |  46 ++---
 .../orchestrator/execution_plugin/ssh/tunnel.py |   4 +-
 aria/orchestrator/plugin.py                     |  12 +-
 aria/orchestrator/topology/instance_handler.py  |  70 ++++----
 aria/orchestrator/topology/template_handler.py  |  74 ++++----
 aria/orchestrator/topology/topology.py          |  10 +-
 aria/orchestrator/workflow_runner.py            |   6 +-
 aria/orchestrator/workflows/api/task.py         |  10 +-
 aria/orchestrator/workflows/api/task_graph.py   |   8 +-
 .../workflows/builtin/execute_operation.py      |   2 +-
 aria/orchestrator/workflows/core/engine.py      |   2 +-
 .../workflows/core/events_handler.py            |   4 +-
 .../workflows/core/graph_compiler.py            |   4 +-
 aria/orchestrator/workflows/events_logging.py   |  18 +-
 aria/orchestrator/workflows/exceptions.py       |   8 +-
 aria/orchestrator/workflows/executor/celery.py  |   2 +-
 aria/orchestrator/workflows/executor/dry.py     |   6 +-
 aria/orchestrator/workflows/executor/process.py |   4 +-
 aria/orchestrator/workflows/executor/thread.py  |   2 +-
 aria/parser/consumption/presentation.py         |   3 +
 aria/parser/loading/loader.py                   |   2 +-
 aria/parser/presentation/fields.py              |   2 +-
 aria/parser/presentation/presentation.py        |   6 +-
 aria/parser/presentation/source.py              |   2 +-
 aria/parser/reading/source.py                   |   2 +-
 aria/parser/specification.py                    |   2 +-
 aria/storage/filesystem_rapi.py                 |   4 +-
 aria/utils/caching.py                           |   2 +-
 aria/utils/formatting.py                        |   2 +-
 aria/utils/versions.py                          |   2 +-
 .../simple_nfv_v1_0/presenter.py                |   2 +-
 .../simple_v1_0/assignments.py                  |   2 +-
 .../simple_v1_0/data_types.py                   |  44 ++---
 .../simple_v1_0/definitions.py                  |   2 +-
 .../aria_extension_tosca/simple_v1_0/misc.py    |   2 +-
 .../simple_v1_0/modeling/__init__.py            |   4 +-
 .../simple_v1_0/modeling/data_types.py          |   6 +-
 .../simple_v1_0/modeling/functions.py           |  10 +-
 .../simple_v1_0/modeling/interfaces.py          |   4 +-
 .../simple_v1_0/modeling/requirements.py        |   2 +-
 .../simple_v1_0/presentation/extensible.py      |   2 +-
 .../presentation/field_validators.py            |   2 +-
 .../simple_v1_0/presentation/types.py           |   2 +-
 .../simple_v1_0/presenter.py                    |   4 +-
 .../aria_extension_tosca/simple_v1_0/types.py   |   4 +-
 .../simple_v1_0/test_names.py                   |  57 ++++++
 tests/instantiation/__init__.py                 |  14 --
 tests/instantiation/test_configuration.py       | 172 ------------------
 tests/parser/__init__.py                        |  14 --
 tests/parser/service_templates.py               |  85 ---------
 tests/parser/test_reqs_caps.py                  |  29 ----
 tests/parser/test_tosca_simple_v1_0/__init__.py |  14 --
 .../presentation/__init__.py                    |   0
 .../presentation/test_types.py                  |  23 ---
 .../test_tosca_simple_v1_0/test_end2end.py      | 112 ------------
 tests/parser/utils.py                           |  68 --------
 .../types/shorthand-1/shorthand-1.yaml          |  23 ---
 .../types/typequalified-1/typequalified-1.yaml  |  23 ---
 tests/topology/__init__.py                      |  14 ++
 tests/topology/service_templates.py             |  70 ++++++++
 tests/topology/test_configuration.py            | 173 +++++++++++++++++++
 tests/topology/test_end2end.py                  | 112 ++++++++++++
 tests/topology/test_reqs_caps.py                |  29 ++++
 tests/topology/utils.py                         |  68 ++++++++
 83 files changed, 782 insertions(+), 833 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 76a62ce..4befcf1 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -23,8 +23,8 @@ import pkg_resources
 aria_package_name = 'apache-ariatosca'
 __version__ = pkg_resources.get_distribution(aria_package_name).version
 
-from .orchestrator.decorators import workflow, operation  # pylint: disable=wrong-import-position
-from . import (  # pylint: disable=wrong-import-position
+from .orchestrator.decorators import workflow, operation                                            # pylint: disable=wrong-import-position
+from . import (                                                                                     # pylint: disable=wrong-import-position
     extension,
     utils,
     parser,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/cli/commands/services.py
----------------------------------------------------------------------
diff --git a/aria/cli/commands/services.py b/aria/cli/commands/services.py
index 6752899..32622a9 100644
--- a/aria/cli/commands/services.py
+++ b/aria/cli/commands/services.py
@@ -137,7 +137,7 @@ def list(service_template_name,
 @aria.pass_logger
 def create(service_template_name,
            service_name,
-           inputs,  # pylint: disable=redefined-outer-name
+           inputs,                                                                                  # pylint: disable=redefined-outer-name
            model_storage,
            resource_storage,
            plugin_manager,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/cli/utils.py
----------------------------------------------------------------------
diff --git a/aria/cli/utils.py b/aria/cli/utils.py
index 697ff37..1b5d666 100644
--- a/aria/cli/utils.py
+++ b/aria/cli/utils.py
@@ -58,7 +58,7 @@ def check_overriding_storage_exceptions(e, model_class, name):
             'There already a exists a {model_class} with the same name' \
                 .format(model_class=model_class, name=name, linesep=os.linesep)
         trace = sys.exc_info()[2]
-        raise type(e), type(e)(new_message), trace  # pylint: disable=raising-non-exception
+        raise type(e), type(e)(new_message), trace                                                  # pylint: disable=raising-non-exception
 
 
 def download_file(url):
@@ -107,7 +107,7 @@ def generate_progress_handler(file_path, action='', max_bar_length=80):
 
         filled_length = min(bar_length, int(round(bar_length * read_bytes / float(total_bytes))))
         percents = min(100.00, round(100.00 * (read_bytes / float(total_bytes)), 2))
-        bar = '#' * filled_length + '-' * (bar_length - filled_length)  # pylint: disable=blacklisted-name
+        bar = '#' * filled_length + '-' * (bar_length - filled_length)                              # pylint: disable=blacklisted-name
 
         # The \r caret makes sure the cursor moves back to the beginning of the line
         sys.stdout.write('\r{0} {1} |{2}| {3}%'.format(action, file_name, bar, percents))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/functions.py
----------------------------------------------------------------------
diff --git a/aria/modeling/functions.py b/aria/modeling/functions.py
index 554bbfb..f3f0f22 100644
--- a/aria/modeling/functions.py
+++ b/aria/modeling/functions.py
@@ -66,7 +66,7 @@ class Evaluation(object):
         self.final = final
 
 
-def evaluate(value, container_holder, report_issues=False): # pylint: disable=too-many-branches
+def evaluate(value, container_holder, report_issues=False):                                         # pylint: disable=too-many-branches
     """
     Recursively attempts to call ``__evaluate__``. If an evaluation occurred will return an
     :class:`Evaluation`, otherwise it will be ``None``. If any evaluation is non-final, then the

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/mixins.py
----------------------------------------------------------------------
diff --git a/aria/modeling/mixins.py b/aria/modeling/mixins.py
index d58c25a..eb1ac83 100644
--- a/aria/modeling/mixins.py
+++ b/aria/modeling/mixins.py
@@ -201,7 +201,7 @@ class ParameterMixin(TemplateModelMixin, caching.HasCachedMethods):
 
     @property
     @caching.cachedmethod
-    def container(self): # pylint: disable=too-many-return-statements,too-many-branches
+    def container(self):                                                                            # pylint: disable=too-many-return-statements,too-many-branches
         """
         The logical container for this parameter, which would be another model: service, node,
         group, or policy (or their templates).
@@ -319,7 +319,7 @@ class ParameterMixin(TemplateModelMixin, caching.HasCachedMethods):
         type_name = canonical_type_name(value)
         if type_name is None:
             type_name = full_type_name(value)
-        return cls(name=name,  # pylint: disable=unexpected-keyword-arg
+        return cls(name=name,                                                                       # pylint: disable=unexpected-keyword-arg
                    type_name=type_name,
                    value=value,
                    description=description)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/orchestration.py
----------------------------------------------------------------------
diff --git a/aria/modeling/orchestration.py b/aria/modeling/orchestration.py
index 4d4f0fe..da91295 100644
--- a/aria/modeling/orchestration.py
+++ b/aria/modeling/orchestration.py
@@ -436,7 +436,7 @@ class TaskBase(mixins.ModelMixin):
         return self.node or self.relationship
 
     @orm.validates('max_attempts')
-    def validate_max_attempts(self, _, value):                                  # pylint: disable=no-self-use
+    def validate_max_attempts(self, _, value):                                                      # pylint: disable=no-self-use
         """
         Validates that max attempts is either -1 or a positive number.
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/service_common.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_common.py b/aria/modeling/service_common.py
index c813416..6ca80ee 100644
--- a/aria/modeling/service_common.py
+++ b/aria/modeling/service_common.py
@@ -91,7 +91,7 @@ class InputBase(ParameterMixin):
     """)
 
     @classmethod
-    def wrap(cls, name, value, description=None, required=True):  # pylint: disable=arguments-differ
+    def wrap(cls, name, value, description=None, required=True):                                    # pylint: disable=arguments-differ
         input = super(InputBase, cls).wrap(name, value, description)
         input.required = required
         return input

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/service_instance.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_instance.py b/aria/modeling/service_instance.py
index 01c4da9..b0e426c 100644
--- a/aria/modeling/service_instance.py
+++ b/aria/modeling/service_instance.py
@@ -319,7 +319,7 @@ class NodeBase(InstanceModelMixin):
     # region one_to_one relationships
 
     @declared_attr
-    def host(cls): # pylint: disable=method-hidden
+    def host(cls):                                                                                  # pylint: disable=method-hidden
         """
         Node in which we are hosted (can be ``None``).
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/service_template.py
----------------------------------------------------------------------
diff --git a/aria/modeling/service_template.py b/aria/modeling/service_template.py
index cd0adb4..0933407 100644
--- a/aria/modeling/service_template.py
+++ b/aria/modeling/service_template.py
@@ -1415,7 +1415,7 @@ class InterfaceTemplateBase(TemplateModelMixin):
             ('name', self.name),
             ('description', self.description),
             ('type_name', self.type.name),
-            ('inputs', formatting.as_raw_dict(self.inputs)),  # pylint: disable=no-member
+            ('inputs', formatting.as_raw_dict(self.inputs)),                                        # pylint: disable=no-member
             # TODO fix self.properties reference
             ('operation_templates', formatting.as_raw_list(self.operation_templates))))
 
@@ -1714,7 +1714,7 @@ class PluginSpecificationBase(TemplateModelMixin):
         return relationship.many_to_one(cls, 'service_template')
 
     @declared_attr
-    def plugin(cls): # pylint: disable=method-hidden
+    def plugin(cls):                                                                                # pylint: disable=method-hidden
         """
         Matched plugin.
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/aria/modeling/utils.py b/aria/modeling/utils.py
index 6e851f2..1b6b375 100644
--- a/aria/modeling/utils.py
+++ b/aria/modeling/utils.py
@@ -35,7 +35,7 @@ class ModelJSONEncoder(JSONEncoder):
         # Just here to make sure Sphinx doesn't grab the base constructor's docstring
         super(ModelJSONEncoder, self).__init__(*args, **kwargs)
 
-    def default(self, o):  # pylint: disable=method-hidden
+    def default(self, o):                                                                           # pylint: disable=method-hidden
         from .mixins import ModelMixin
         if isinstance(o, ModelMixin):
             if hasattr(o, 'value'):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index 3c5f618..90205fd 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -108,9 +108,9 @@ class BaseContext(object):
                                                    execution_id=self._execution_id)
 
     def __repr__(self):
-        return (
-            '{name}(name={self.name}, '
-            'deployment_id={self._service_id}, '
+        return (                                                                                    # pylint: disable=redundant-keyword-arg
+            u'{name}(name={self.name}, '
+            u'deployment_id={self._service_id}, '
             .format(name=self.__class__.__name__, self=self))
 
     @contextmanager

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index 8613ec3..7f6612e 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -40,10 +40,10 @@ class BaseOperationContext(common.BaseContext):
         self._register_logger(task_id=self.task.id, level=logger_level)
 
     def __repr__(self):
-        details = 'function={task.function}; ' \
-                  'operation_arguments={task.arguments}'\
+        details = u'function={task.function}; ' \
+                  u'operation_arguments={task.arguments}'\
             .format(task=self.task)
-        return '{name}({0})'.format(details, name=self.name)
+        return u'{name}({0})'.format(details, name=self.name)
 
     @property
     def task(self):
@@ -65,9 +65,9 @@ class BaseOperationContext(common.BaseContext):
         """
         if self.task.plugin is None:
             return None
-        plugin_workdir = '{0}/plugins/{1}/{2}'.format(self._workdir,
-                                                      self.service.id,
-                                                      self.task.plugin.name)
+        plugin_workdir = u'{0}/plugins/{1}/{2}'.format(self._workdir,
+                                                       self.service.id,
+                                                       self.task.plugin.name)
         file.makedirs(plugin_workdir)
         return plugin_workdir
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 738d2fd..5a323a6 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -73,7 +73,7 @@ class WorkflowContext(BaseContext):
         """
         Iterates over nodes templates.
         """
-        key = 'service_{0}'.format(self.model.node_template.model_cls.name_column_name())
+        key = u'service_{0}'.format(self.model.node_template.model_cls.name_column_name())
 
         return self.model.node_template.iter(
             filters={
@@ -86,7 +86,7 @@ class WorkflowContext(BaseContext):
         """
         Iterates over nodes.
         """
-        key = 'service_{0}'.format(self.model.node.model_cls.name_column_name())
+        key = u'service_{0}'.format(self.model.node.model_cls.name_column_name())
         return self.model.node.iter(
             filters={
                 key: getattr(self.service, self.service.name_column_name())

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/decorators.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/decorators.py b/aria/orchestrator/decorators.py
index 4b163d6..4de0397 100644
--- a/aria/orchestrator/decorators.py
+++ b/aria/orchestrator/decorators.py
@@ -80,6 +80,6 @@ def operation(func=None, toolbelt=False, suffix_template='', logging_handlers=No
 
 
 def _generate_name(func_name, ctx, suffix_template, **custom_kwargs):
-    return '{func_name}.{suffix}'.format(
+    return u'{func_name}.{suffix}'.format(
         func_name=func_name,
         suffix=suffix_template.format(ctx=ctx, **custom_kwargs) or generate_uuid(variant='uuid'))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/common.py b/aria/orchestrator/execution_plugin/common.py
index ce6746c..1c279d3 100644
--- a/aria/orchestrator/execution_plugin/common.py
+++ b/aria/orchestrator/execution_plugin/common.py
@@ -35,13 +35,13 @@ def download_script(ctx, script_path):
     split = script_path.split('://')
     schema = split[0]
     suffix = script_path.split('/')[-1]
-    file_descriptor, dest_script_path = tempfile.mkstemp(suffix='-{0}'.format(suffix))
+    file_descriptor, dest_script_path = tempfile.mkstemp(suffix=u'-{0}'.format(suffix))
     os.close(file_descriptor)
     try:
         if schema in ('http', 'https'):
             response = requests.get(script_path)
             if response.status_code == 404:
-                ctx.task.abort('Failed to download script: {0} (status code: {1})'
+                ctx.task.abort(u'Failed to download script: {0} (status code: {1})'
                                .format(script_path, response.status_code))
             content = response.text
             with open(dest_script_path, 'wb') as f:
@@ -84,7 +84,7 @@ def create_process_config(script_path, process, operation_kwargs, quote_json_env
         if isinstance(v, (dict, list, tuple, bool, int, float)):
             v = json.dumps(v)
             if quote_json_env_vars:
-                v = "'{0}'".format(v)
+                v = u"'{0}'".format(v)
         if is_windows():
             # These <k,v> environment variables will subsequently
             # be used in a subprocess.Popen() call, as the `env` parameter.
@@ -102,9 +102,9 @@ def create_process_config(script_path, process, operation_kwargs, quote_json_env
     command = script_path
     command_prefix = process.get('command_prefix')
     if command_prefix:
-        command = '{0} {1}'.format(command_prefix, command)
+        command = u'{0} {1}'.format(command_prefix, command)
     if args:
-        command = ' '.join([command] + [str(a) for a in args])
+        command = u' '.join([command] + [str(a) for a in args])
     process['command'] = command
     return process
 
@@ -150,5 +150,5 @@ def check_error(ctx, error_check_func=None, reraise=False):
         error_check_func()
     # if this function is called from within an ``except`` clause, a re-raise maybe required
     if reraise:
-        raise  # pylint: disable=misplaced-bare-raise
+        raise                                                                                       # pylint: disable=misplaced-bare-raise
     return _error

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/ctx_proxy/client.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/client.py b/aria/orchestrator/execution_plugin/ctx_proxy/client.py
index 84d66f1..a569c78 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/client.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/client.py
@@ -32,7 +32,7 @@ CTX_SOCKET_URL = 'CTX_SOCKET_URL'
 class _RequestError(RuntimeError):
 
     def __init__(self, ex_message, ex_type, ex_traceback):
-        super(_RequestError, self).__init__(self, '{0}: {1}'.format(ex_type, ex_message))
+        super(_RequestError, self).__init__(self, u'{0}: {1}'.format(ex_type, ex_message))
         self.ex_type = ex_type
         self.ex_message = ex_message
         self.ex_traceback = ex_traceback
@@ -45,7 +45,7 @@ def _http_request(socket_url, request, method, timeout):
     response = opener.open(request, timeout=timeout)
 
     if response.code != 200:
-        raise RuntimeError('Request failed: {0}'.format(response))
+        raise RuntimeError(u'Request failed: {0}'.format(response))
     return json.loads(response.read())
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
index 91b95d9..d8aa8fb 100644
--- a/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ b/aria/orchestrator/execution_plugin/ctx_proxy/server.py
@@ -37,7 +37,7 @@ class CtxProxy(object):
         self.ctx = ctx
         self._ctx_patcher = ctx_patcher
         self.port = _get_unused_port()
-        self.socket_url = 'http://localhost:{0}'.format(self.port)
+        self.socket_url = 'http://localhost:{0:d}'.format(self.port)
         self.server = None
         self._started = Queue.Queue(1)
         self.thread = self._start_server()
@@ -73,7 +73,7 @@ class CtxProxy(object):
                     def address_string(self):
                         return self.client_address[0]
 
-                    def log_request(*args, **kwargs):  # pylint: disable=no-method-argument
+                    def log_request(*args, **kwargs):                                               # pylint: disable=no-method-argument
                         if not self.quiet:
                             return wsgiref.simple_server.WSGIRequestHandler.log_request(*args,
                                                                                         **kwargs)
@@ -110,7 +110,7 @@ class CtxProxy(object):
             self.server.server_close()
 
     def _request_handler(self):
-        request = bottle.request.body.read()  # pylint: disable=no-member
+        request = bottle.request.body.read()                                                        # pylint: disable=no-member
         response = self._process(request)
         return bottle.LocalResponse(
             body=json.dumps(response, cls=modeling.utils.ModelJSONEncoder),
@@ -195,7 +195,7 @@ def _process_arguments(obj, args):
             # Modify object attribute
             setattr(obj, modifying_key, modifying_value)
         else:
-            raise CtxError('Cannot modify `{0}` of `{1!r}`'.format(modifying_key, obj))
+            raise CtxError(u'Cannot modify `{0}` of `{1!r}`'.format(modifying_key, obj))
 
     return obj
 
@@ -233,7 +233,7 @@ def _process_next_operation(obj, args, modifying):
             obj[arg] = {}
         return obj[arg], args
 
-    raise CtxParsingError('Cannot parse argument: `{0!r}`'.format(arg))
+    raise CtxParsingError(u'Cannot parse argument: `{0!r}`'.format(arg))
 
 
 def _get_unused_port():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/instantiation.py b/aria/orchestrator/execution_plugin/instantiation.py
index 8b52015..d859043 100644
--- a/aria/orchestrator/execution_plugin/instantiation.py
+++ b/aria/orchestrator/execution_plugin/instantiation.py
@@ -64,8 +64,8 @@ def _configure_local(operation):
     """
 
     from . import operations
-    operation.function = '{0}.{1}'.format(operations.__name__,
-                                          operations.run_script_locally.__name__)
+    operation.function = u'{0}.{1}'.format(operations.__name__,
+                                           operations.run_script_locally.__name__)
 
 
 def _configure_remote(operation, reporter):
@@ -105,7 +105,7 @@ def _configure_remote(operation, reporter):
 
     # Make sure we have a user
     if fabric_env.get('user') is None:
-        reporter.report('must configure "ssh.user" for "{0}"'.format(operation.implementation),
+        reporter.report(u'must configure "ssh.user" for "{0}"'.format(operation.implementation),
                         level=reporter.Issue.BETWEEN_TYPES)
 
     # Make sure we have an authentication value
@@ -120,8 +120,8 @@ def _configure_remote(operation, reporter):
     operation.arguments['fabric_env'] = Argument.wrap('fabric_env', fabric_env,
                                                       'Fabric configuration.')
 
-    operation.function = '{0}.{1}'.format(operations.__name__,
-                                          operations.run_script_with_ssh.__name__)
+    operation.function = u'{0}.{1}'.format(operations.__name__,
+                                           operations.run_script_with_ssh.__name__)
 
 
 def _get_process(operation, reporter):
@@ -144,7 +144,7 @@ def _get_process(operation, reporter):
         elif k == 'env':
             _validate_type(v, dict, 'process.env', reporter)
         else:
-            reporter.report('unsupported configuration parameter: "process.{0}"'.format(k),
+            reporter.report(u'unsupported configuration parameter: "process.{0}"'.format(k),
                             level=reporter.Issue.BETWEEN_TYPES)
     return value
 
@@ -175,7 +175,7 @@ def _get_ssh(operation, reporter):
         elif k == 'address':
             _validate_type(v, basestring, 'ssh.address', reporter)
         else:
-            reporter.report('unsupported configuration parameter: "ssh.{0}"'.format(k),
+            reporter.report(u'unsupported configuration parameter: "ssh.{0}"'.format(k),
                             level=reporter.Issue.BETWEEN_TYPES)
     return value
 
@@ -185,7 +185,7 @@ def _validate_type(value, the_type, name, reporter):
         return
     if not isinstance(value, the_type):
         reporter.report(
-            '"{0}" configuration is not a {1}: {2}'.format(
+            u'"{0}" configuration is not a {1}: {2}'.format(
                 name, utils.type.full_type_name(the_type), utils.formatting.safe_repr(value)),
             level=reporter.Issue.BETWEEN_TYPES)
 
@@ -202,7 +202,7 @@ def _coerce_bool(value, name, reporter):
         return False
     else:
         reporter.report(
-            '"{0}" configuration is not "true" or "false": {1}'.format(
+            u'"{0}" configuration is not "true" or "false": {1}'.format(
                 name, utils.formatting.safe_repr(value)),
             level=reporter.Issue.BETWEEN_TYPES)
 
@@ -212,6 +212,6 @@ def _dict_to_list_of_strings(the_dict, name, reporter):
     value = []
     for k in sorted(the_dict):
         v = the_dict[k]
-        _validate_type(v, basestring, '{0}.{1}'.format(name, k), reporter)
+        _validate_type(v, basestring, u'{0}.{1}'.format(name, k), reporter)
         value.append(v)
     return value

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/local.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/local.py b/aria/orchestrator/execution_plugin/local.py
index 04b9ecd..abb5b52 100644
--- a/aria/orchestrator/execution_plugin/local.py
+++ b/aria/orchestrator/execution_plugin/local.py
@@ -78,7 +78,7 @@ def _execute_func(script_path, ctx, process, operation_kwargs):
     command = process['command']
     env = os.environ.copy()
     env.update(process['env'])
-    ctx.logger.info('Executing: {0}'.format(command))
+    ctx.logger.info(u'Executing: {0}'.format(command))
     with ctx_proxy.server.CtxProxy(ctx, common.patch_ctx) as proxy:
         env[ctx_proxy.client.CTX_SOCKET_URL] = proxy.socket_url
         running_process = subprocess.Popen(
@@ -95,7 +95,7 @@ def _execute_func(script_path, ctx, process, operation_kwargs):
         exit_code = running_process.wait()
     stdout_consumer.join()
     stderr_consumer.join()
-    ctx.logger.info('Execution done (exit_code={0}): {1}'.format(exit_code, command))
+    ctx.logger.info(u'Execution done (exit_code={0}): {1}'.format(exit_code, command))
 
     def error_check_func():
         if exit_code:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/ssh/operations.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ssh/operations.py b/aria/orchestrator/execution_plugin/ssh/operations.py
index c40e783..759f1d2 100644
--- a/aria/orchestrator/execution_plugin/ssh/operations.py
+++ b/aria/orchestrator/execution_plugin/ssh/operations.py
@@ -48,7 +48,7 @@ def run_commands(ctx, commands, fabric_env, use_sudo, hide_output, **_):
     with fabric.api.settings(_hide_output(ctx, groups=hide_output),
                              **_fabric_env(ctx, fabric_env, warn_only=True)):
         for command in commands:
-            ctx.logger.info('Running command: {0}'.format(command))
+            ctx.logger.info(u'Running command: {0}'.format(command))
             run = fabric.api.sudo if use_sudo else fabric.api.run
             result = run(command)
             if result.failed:
@@ -70,8 +70,8 @@ def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **k
             # there may be race conditions with other operations that
             # may be running in parallel, so we pass -p to make sure
             # we get 0 exit code if the directory already exists
-            fabric.api.run('mkdir -p {0} && mkdir -p {1}'.format(paths.remote_scripts_dir,
-                                                                 paths.remote_work_dir))
+            fabric.api.run(u'mkdir -p {0} && mkdir -p {1}'.format(paths.remote_scripts_dir,
+                                                                  paths.remote_work_dir))
             # this file has to be present before using ctx
             fabric.api.put(_PROXY_CLIENT_PATH, paths.remote_ctx_path)
         process = common.create_process_config(
@@ -82,7 +82,7 @@ def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **k
         fabric.api.put(paths.local_script_path, paths.remote_script_path)
         with ctx_proxy.server.CtxProxy(ctx, _patch_ctx) as proxy:
             local_port = proxy.port
-            with fabric.context_managers.cd(process.get('cwd', paths.remote_work_dir)):  # pylint: disable=not-context-manager
+            with fabric.context_managers.cd(process.get('cwd', paths.remote_work_dir)):             # pylint: disable=not-context-manager
                 with tunnel.remote(ctx, local_port=local_port) as remote_port:
                     local_socket_url = proxy.socket_url
                     remote_socket_url = local_socket_url.replace(str(local_port), str(remote_port))
@@ -93,8 +93,8 @@ def run_script(ctx, script_path, fabric_env, process, use_sudo, hide_output, **k
                         remote_socket_url=remote_socket_url)
                     fabric.api.put(env_script, paths.remote_env_script_path)
                     try:
-                        command = 'source {0} && {1}'.format(paths.remote_env_script_path,
-                                                             process['command'])
+                        command = u'source {0} && {1}'.format(paths.remote_env_script_path,
+                                                              process['command'])
                         run = fabric.api.sudo if use_sudo else fabric.api.run
                         run(command)
                     except exceptions.TaskException:
@@ -136,8 +136,8 @@ def _hide_output(ctx, groups):
     """ Hides Fabric's output for every 'entity' in `groups` """
     groups = set(groups or [])
     if not groups.issubset(constants.VALID_FABRIC_GROUPS):
-        ctx.task.abort('`hide_output` must be a subset of {0} (Provided: {1})'
-                       .format(', '.join(constants.VALID_FABRIC_GROUPS), ', '.join(groups)))
+        ctx.task.abort(u'`hide_output` must be a subset of {0} (Provided: {1})'
+                       .format(u', '.join(constants.VALID_FABRIC_GROUPS), u', '.join(groups)))
     return fabric.api.hide(*groups)
 
 
@@ -165,16 +165,16 @@ def _fabric_env(ctx, fabric_env, warn_only):
 def _write_environment_script_file(process, paths, local_socket_url, remote_socket_url):
     env_script = StringIO.StringIO()
     env = process['env']
-    env['PATH'] = '{0}:$PATH'.format(paths.remote_ctx_dir)
-    env['PYTHONPATH'] = '{0}:$PYTHONPATH'.format(paths.remote_ctx_dir)
-    env_script.write('chmod +x {0}\n'.format(paths.remote_script_path))
-    env_script.write('chmod +x {0}\n'.format(paths.remote_ctx_path))
+    env['PATH'] = u'{0}:$PATH'.format(paths.remote_ctx_dir)
+    env['PYTHONPATH'] = u'{0}:$PYTHONPATH'.format(paths.remote_ctx_dir)
+    env_script.write(u'chmod +x {0}\n'.format(paths.remote_script_path))
+    env_script.write(u'chmod +x {0}\n'.format(paths.remote_ctx_path))
     env.update({
         ctx_proxy.client.CTX_SOCKET_URL: remote_socket_url,
-        'LOCAL_{0}'.format(ctx_proxy.client.CTX_SOCKET_URL): local_socket_url
+        u'LOCAL_{0}'.format(ctx_proxy.client.CTX_SOCKET_URL): local_socket_url
     })
     for key, value in env.iteritems():
-        env_script.write('export {0}={1}\n'.format(key, value))
+        env_script.write(u'export {0}={1}\n'.format(key, value))
     return env_script
 
 
@@ -184,12 +184,12 @@ class _Paths(object):
         self.local_script_path = local_script_path
         self.remote_ctx_dir = base_dir
         self.base_script_path = os.path.basename(self.local_script_path)
-        self.remote_ctx_path = '{0}/ctx'.format(self.remote_ctx_dir)
-        self.remote_scripts_dir = '{0}/scripts'.format(self.remote_ctx_dir)
-        self.remote_work_dir = '{0}/work'.format(self.remote_ctx_dir)
-        random_suffix = ''.join(random.choice(string.ascii_lowercase + string.digits)
-                                for _ in range(8))
-        remote_path_suffix = '{0}-{1}'.format(self.base_script_path, random_suffix)
-        self.remote_env_script_path = '{0}/env-{1}'.format(self.remote_scripts_dir,
-                                                           remote_path_suffix)
-        self.remote_script_path = '{0}/{1}'.format(self.remote_scripts_dir, remote_path_suffix)
+        self.remote_ctx_path = u'{0}/ctx'.format(self.remote_ctx_dir)
+        self.remote_scripts_dir = u'{0}/scripts'.format(self.remote_ctx_dir)
+        self.remote_work_dir = u'{0}/work'.format(self.remote_ctx_dir)
+        random_suffix = u''.join(random.choice(string.ascii_lowercase + string.digits)
+                                 for _ in range(8))
+        remote_path_suffix = u'{0}-{1}'.format(self.base_script_path, random_suffix)
+        self.remote_env_script_path = u'{0}/env-{1}'.format(self.remote_scripts_dir,
+                                                            remote_path_suffix)
+        self.remote_script_path = u'{0}/{1}'.format(self.remote_scripts_dir, remote_path_suffix)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/execution_plugin/ssh/tunnel.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/execution_plugin/ssh/tunnel.py b/aria/orchestrator/execution_plugin/ssh/tunnel.py
index e76d525..05ea4ed 100644
--- a/aria/orchestrator/execution_plugin/ssh/tunnel.py
+++ b/aria/orchestrator/execution_plugin/ssh/tunnel.py
@@ -64,10 +64,10 @@ def remote(ctx, local_port, remote_port=0, local_host='localhost', remote_bind_a
             try:
                 channel.close()
             except Exception as ex2:
-                close_error = ' (While trying to close channel: {0})'.format(ex2)
+                close_error = u' (While trying to close channel: {0})'.format(ex2)
             else:
                 close_error = ''
-            ctx.task.abort('[{0}] rtunnel: cannot connect to {1}:{2} ({3}){4}'
+            ctx.task.abort(u'[{0}] rtunnel: cannot connect to {1}:{2} ({3}){4}'
                            .format(fabric.api.env.host_string, local_host, local_port, e,
                                    close_error))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/plugin.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/plugin.py b/aria/orchestrator/plugin.py
index 756a28e..4f29e4f 100644
--- a/aria/orchestrator/plugin.py
+++ b/aria/orchestrator/plugin.py
@@ -67,8 +67,8 @@ class PluginManager(object):
         if len(self._model.plugin.list(filters={'package_name': plugin.package_name,
                                                 'package_version': plugin.package_version})):
             raise exceptions.PluginAlreadyExistsError(
-                'Plugin {0}, version {1} already exists'.format(plugin.package_name,
-                                                                plugin.package_version))
+                u'Plugin {0}, version {1} already exists'.format(plugin.package_name,
+                                                                 plugin.package_version))
         self._install_wagon(source=source, prefix=self.get_plugin_dir(plugin))
         self._model.plugin.put(plugin)
         return plugin
@@ -120,8 +120,8 @@ class PluginManager(object):
         """
         if not zipfile.is_zipfile(source):
             raise exceptions.InvalidPluginError(
-                'Archive {0} is of an unsupported type. Only '
-                'zip/wgn is allowed'.format(source))
+                u'Archive {0} is of an unsupported type. Only '
+                u'zip/wgn is allowed'.format(source))
         with zipfile.ZipFile(source, 'r') as zip_file:
             infos = zip_file.infolist()
             try:
@@ -130,8 +130,8 @@ class PluginManager(object):
                 zip_file.getinfo(package_json_path)
             except (KeyError, ValueError, IndexError):
                 raise exceptions.InvalidPluginError(
-                    'Failed to validate plugin {0} '
-                    '(package.json was not found in archive)'.format(source))
+                    u'Failed to validate plugin {0} '
+                    u'(package.json was not found in archive)'.format(source))
 
     def _install_wagon(self, source, prefix):
         pip_freeze_output = self._pip_freeze()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/topology/instance_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/topology/instance_handler.py b/aria/orchestrator/topology/instance_handler.py
index 3449414..fad00b9 100644
--- a/aria/orchestrator/topology/instance_handler.py
+++ b/aria/orchestrator/topology/instance_handler.py
@@ -34,18 +34,18 @@ class Artifact(common.InstanceHandlerBase):
             out_stream.write(out_stream.node_style(self._model.name))
             out_stream.write(out_stream.meta_style(self._model.description))
             with out_stream.indent():
-                out_stream.write('Artifact type: {0}'.format(out_stream.type_style(
+                out_stream.write(u'Artifact type: {0}'.format(out_stream.type_style(
                     self._model.type.name)))
-                out_stream.write('Source path: {0}'.format(
+                out_stream.write(u'Source path: {0}'.format(
                     out_stream.literal_style(self._model.source_path)))
                 if self._model.target_path is not None:
-                    out_stream.write('Target path: {0}'.format(
+                    out_stream.write(u'Target path: {0}'.format(
                         out_stream.literal_style(self._model.target_path)))
                 if self._model.repository_url is not None:
-                    out_stream.write('Repository URL: {0}'.format(
+                    out_stream.write(u'Repository URL: {0}'.format(
                         out_stream.literal_style(self._model.repository_url)))
                 if self._model.repository_credential:
-                    out_stream.write('Repository credential: {0}'.format(
+                    out_stream.write(u'Repository credential: {0}'.format(
                         out_stream.literal_style(self._model.repository_credential)))
                 self._topology.dump(self._model.properties, out_stream, title='Properties')
 
@@ -60,11 +60,11 @@ class Capability(common.InstanceHandlerBase):
     def dump(self, out_stream):
         out_stream.write(out_stream.node_style(self._model.name))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
-            out_stream.write('Occurrences: {0:d} ({1:d}{2})'.format(
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Occurrences: {0:d} ({1:d}{2})'.format(
                 self._model.occurrences,
                 self._model.min_occurrences or 0,
-                ' to {0:d}'.format(self._model.max_occurrences)
+                u' to {0:d}'.format(self._model.max_occurrences)
                 if self._model.max_occurrences is not None
                 else ' or more'))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
@@ -81,9 +81,9 @@ class Group(common.ActorHandlerBase):
                        **kwargs)
 
     def dump(self, out_stream):
-        out_stream.write('Group: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Group: {0}'.format(out_stream.node_style(self._model.name)))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.interfaces, out_stream, title='Interfaces')
             if self._model.nodes:
@@ -111,7 +111,7 @@ class Interface(common.ActorHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Interface type: {0}'.format(
+            out_stream.write(u'Interface type: {0}'.format(
                 out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.inputs, out_stream, title='Inputs')
             self._topology.dump(self._model.operations, out_stream, title='Operations')
@@ -146,10 +146,10 @@ class Node(common.ActorHandlerBase):
                        self._model.outbound_relationships)
 
     def dump(self, out_stream):
-        out_stream.write('Node: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Node: {0}'.format(out_stream.node_style(self._model.name)))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
-            out_stream.write('Template: {0}'.format(
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Template: {0}'.format(
                 out_stream.node_style(self._model.node_template.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.attributes, out_stream, title='Attributes')
@@ -359,28 +359,28 @@ class Operation(common.ActorHandlerBase):
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
             if self._model.implementation is not None:
-                out_stream.write('Implementation: {0}'.format(
+                out_stream.write(u'Implementation: {0}'.format(
                     out_stream.literal_style(self._model.implementation)))
             if self._model.dependencies:
                 out_stream.write(
-                    'Dependencies: {0}'.format(', '.join((str(out_stream.literal_style(v))
-                                                          for v in self._model.dependencies))))
+                    u'Dependencies: {0}'.format(u', '.join((str(out_stream.literal_style(v))
+                                                            for v in self._model.dependencies))))
             self._topology.dump(self._model.inputs, out_stream, title='Inputs')
             if self._model.executor is not None:
-                out_stream.write('Executor: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Executor: {0}'.format(out_stream.literal_style(
                     self._model.executor)))
             if self._model.max_attempts is not None:
-                out_stream.write('Max attempts: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Max attempts: {0}'.format(out_stream.literal_style(
                     self._model.max_attempts)))
             if self._model.retry_interval is not None:
-                out_stream.write('Retry interval: {0}'.format(
+                out_stream.write(u'Retry interval: {0}'.format(
                     out_stream.literal_style(self._model.retry_interval)))
             if self._model.plugin is not None:
-                out_stream.write('Plugin: {0}'.format(
+                out_stream.write(u'Plugin: {0}'.format(
                     out_stream.literal_style(self._model.plugin.name)))
             self._topology.dump(self._model.configurations, out_stream, title='Configuration')
             if self._model.function is not None:
-                out_stream.write('Function: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Function: {0}'.format(out_stream.literal_style(
                     self._model.function)))
             self._topology.dump(self._model.arguments, out_stream, title='Arguments')
 
@@ -431,9 +431,9 @@ class Policy(common.InstanceHandlerBase):
         self._topology.validate(self._model.properties, **kwargs)
 
     def dump(self, out_stream):
-        out_stream.write('Policy: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Policy: {0}'.format(out_stream.node_style(self._model.name)))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             if self._model.nodes:
                 out_stream.write('Target nodes:')
@@ -460,21 +460,21 @@ class Relationship(common.ActorHandlerBase):
 
     def dump(self, out_stream):
         if self._model.name:
-            out_stream.write('{0} ->'.format(out_stream.node_style(self._model.name)))
+            out_stream.write(u'{0} ->'.format(out_stream.node_style(self._model.name)))
         else:
             out_stream.write('->')
         with out_stream.indent():
-            out_stream.write('Node: {0}'.format(out_stream.node_style(
+            out_stream.write(u'Node: {0}'.format(out_stream.node_style(
                 self._model.target_node.name)))
             if self._model.target_capability:
-                out_stream.write('Capability: {0}'.format(out_stream.node_style(
+                out_stream.write(u'Capability: {0}'.format(out_stream.node_style(
                     self._model.target_capability.name)))
             if self._model.type is not None:
-                out_stream.write('Relationship type: {0}'.format(
+                out_stream.write(u'Relationship type: {0}'.format(
                     out_stream.type_style(self._model.type.name)))
             if (self._model.relationship_template is not None and
                     self._model.relationship_template.name):
-                out_stream.write('Relationship template: {0}'.format(
+                out_stream.write(u'Relationship template: {0}'.format(
                     out_stream.node_style(self._model.relationship_template.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.interfaces, out_stream, title='Interfaces')
@@ -549,7 +549,7 @@ class Substitution(common.InstanceHandlerBase):
     def dump(self, out_stream):
         out_stream.write('Substitution:')
         with out_stream.indent():
-            out_stream.write('Node type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Node type: {0}'.format(out_stream.type_style(
                 self._model.node_type.name)))
             self._topology.dump(self._model.mappings, out_stream, title='Mappings')
 
@@ -569,12 +569,12 @@ class SubstitutionMapping(common.InstanceHandlerBase):
 
     def dump(self, out_stream):
         if self._model.capability is not None:
-            out_stream.write('{0} -> {1}.{2}'.format(
+            out_stream.write(u'{0} -> {1}.{2}'.format(
                 out_stream.node_style(self._model.name),
                 out_stream.node_style(self._model.capability.node.name),
                 out_stream.node_style(self._model.capability.name)))
         else:
-            out_stream.write('{0} -> {1}.{2}'.format(
+            out_stream.write(u'{0} -> {1}.{2}'.format(
                 out_stream.node_style(self._model.name),
                 out_stream.node_style(self._model.node.name),
                 out_stream.node_style(self._model.requirement_template.name)))
@@ -583,7 +583,7 @@ class SubstitutionMapping(common.InstanceHandlerBase):
 class Metadata(common.InstanceHandlerBase):
 
     def dump(self, out_stream):
-        out_stream.write('{0}: {1}'.format(
+        out_stream.write(u'{0}: {1}'.format(
             out_stream.property_style(self._model.name),
             out_stream.literal_style(self._model.value)))
 
@@ -601,12 +601,12 @@ class _Parameter(common.InstanceHandlerBase):
 
     def dump(self, out_stream):
         if self._model.type_name is not None:
-            out_stream.write('{0}: {1} ({2})'.format(
+            out_stream.write(u'{0}: {1} ({2})'.format(
                 out_stream.property_style(self._model.name),
                 out_stream.literal_style(formatting.as_raw(self._model.value)),
                 out_stream.type_style(self._model.type_name)))
         else:
-            out_stream.write('{0}: {1}'.format(
+            out_stream.write(u'{0}: {1}'.format(
                 out_stream.property_style(self._model.name),
                 out_stream.literal_style(formatting.as_raw(self._model.value))))
         if self._model.description:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/topology/template_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/topology/template_handler.py b/aria/orchestrator/topology/template_handler.py
index 067869d..3b1948a 100644
--- a/aria/orchestrator/topology/template_handler.py
+++ b/aria/orchestrator/topology/template_handler.py
@@ -151,18 +151,18 @@ class ArtifactTemplate(common.TemplateHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Artifact type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Artifact type: {0}'.format(out_stream.type_style(
                 self._model.type.name)))
-            out_stream.write('Source path: {0}'.format(out_stream.literal_style(
+            out_stream.write(u'Source path: {0}'.format(out_stream.literal_style(
                 self._model.source_path)))
             if self._model.target_path is not None:
-                out_stream.write('Target path: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Target path: {0}'.format(out_stream.literal_style(
                     self._model.target_path)))
             if self._model.repository_url is not None:
-                out_stream.write('Repository URL: {0}'.format(
+                out_stream.write(u'Repository URL: {0}'.format(
                     out_stream.literal_style(self._model.repository_url)))
             if self._model.repository_credential:
-                out_stream.write('Repository credential: {0}'.format(
+                out_stream.write(u'Repository credential: {0}'.format(
                     out_stream.literal_style(self._model.repository_credential)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
 
@@ -190,17 +190,17 @@ class CapabilityTemplate(common.TemplateHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             out_stream.write(
-                'Occurrences: {0:d}{1}'.format(
+                u'Occurrences: {0:d}{1}'.format(
                     self._model.min_occurrences or 0,
-                    ' to {0:d}'.format(self._model.max_occurrences)
+                    u' to {0:d}'.format(self._model.max_occurrences)
                     if self._model.max_occurrences is not None
                     else ' or more'))
             if self._model.valid_source_node_types:
-                out_stream.write('Valid source node types: {0}'.format(
-                    ', '.join((str(out_stream.type_style(v.name))
-                               for v in self._model.valid_source_node_types))))
+                out_stream.write(u'Valid source node types: {0}'.format(
+                    u', '.join((str(out_stream.type_style(v.name))
+                                for v in self._model.valid_source_node_types))))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
 
     def coerce(self, **kwargs):
@@ -229,19 +229,19 @@ class RequirementTemplate(common.TemplateHandlerBase):
             out_stream.write('Requirement:')
         with out_stream.indent():
             if self._model.target_node_type is not None:
-                out_stream.write('Target node type: {0}'.format(
+                out_stream.write(u'Target node type: {0}'.format(
                     out_stream.type_style(self._model.target_node_type.name)))
             elif self._model.target_node_template is not None:
-                out_stream.write('Target node template: {0}'.format(
+                out_stream.write(u'Target node template: {0}'.format(
                     out_stream.node_style(self._model.target_node_template.name)))
             if self._model.target_capability_type is not None:
-                out_stream.write('Target capability type: {0}'.format(
+                out_stream.write(u'Target capability type: {0}'.format(
                     out_stream.type_style(self._model.target_capability_type.name)))
             elif self._model.target_capability_name is not None:
-                out_stream.write('Target capability name: {0}'.format(
+                out_stream.write(u'Target capability name: {0}'.format(
                     out_stream.node_style(self._model.target_capability_name)))
             if self._model.target_node_template_constraints:
-                out_stream.write('Target node template constraints:')
+                out_stream.write(u'Target node template constraints:')
                 with out_stream.indent():
                     for constraint in self._model.target_node_template_constraints:
                         out_stream.write(out_stream.literal_style(constraint))
@@ -262,16 +262,16 @@ class RequirementTemplate(common.TemplateHandlerBase):
 
 class GroupTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
-        out_stream.write('Group template: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Group template: {0}'.format(out_stream.node_style(self._model.name)))
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.interface_templates, out_stream,
                                 title='Interface Templates')
             if self._model.node_templates:
-                out_stream.write('Member node templates: {0}'.format(', '.join(
+                out_stream.write(u'Member node templates: {0}'.format(u', '.join(
                     (str(out_stream.node_style(v.name)) for v in self._model.node_templates))))
 
     def coerce(self, **kwargs):
@@ -304,7 +304,7 @@ class InterfaceTemplate(common.TemplateHandlerBase):
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Interface type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Interface type: {0}'.format(out_stream.type_style(
                 self._model.type.name)))
             self._topology.dump(self._model.inputs, out_stream, title='Inputs')
             self._topology.dump(self._model.operation_templates, out_stream,
@@ -333,11 +333,11 @@ class InterfaceTemplate(common.TemplateHandlerBase):
 
 class NodeTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
-        out_stream.write('Node template: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Node template: {0}'.format(out_stream.node_style(self._model.name)))
         with out_stream.indent():
             if self._model.description:
                 out_stream.write(out_stream.meta_style(self._model.description))
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             self._topology.dump(self._model.attributes, out_stream, title='Attributes')
             self._topology.dump(
@@ -392,17 +392,17 @@ class NodeTemplate(common.TemplateHandlerBase):
 
 class PolicyTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
-        out_stream.write('Policy template: {0}'.format(out_stream.node_style(self._model.name)))
+        out_stream.write(u'Policy template: {0}'.format(out_stream.node_style(self._model.name)))
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
-            out_stream.write('Type: {0}'.format(out_stream.type_style(self._model.type.name)))
+            out_stream.write(u'Type: {0}'.format(out_stream.type_style(self._model.type.name)))
             self._topology.dump(self._model.properties, out_stream, title='Properties')
             if self._model.node_templates:
-                out_stream.write('Target node templates: {0}'.format(', '.join(
+                out_stream.write(u'Target node templates: {0}'.format(u', '.join(
                     (str(out_stream.node_style(v.name)) for v in self._model.node_templates))))
             if self._model.group_templates:
-                out_stream.write('Target group templates: {0}'.format(', '.join(
+                out_stream.write(u'Target group templates: {0}'.format(u', '.join(
                     (str(out_stream.node_style(v.name)) for v in self._model.group_templates))))
 
     def coerce(self, **kwargs):
@@ -433,7 +433,7 @@ class SubstitutionTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
         out_stream.write('Substitution template:')
         with out_stream.indent():
-            out_stream.write('Node type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Node type: {0}'.format(out_stream.type_style(
                 self._model.node_type.name)))
             self._topology.dump(self._model.mappings, out_stream, title='Mappings')
 
@@ -454,7 +454,7 @@ class SubstitutionTemplateMapping(common.TemplateHandlerBase):
             node_template = self._model.capability_template.node_template
         else:
             node_template = self._model.requirement_template.node_template
-        out_stream.write('{0} -> {1}.{2}'.format(
+        out_stream.write(u'{0} -> {1}.{2}'.format(
             out_stream.node_style(self._model.name),
             out_stream.node_style(node_template.name),
             out_stream.node_style(self._model.capability_template.name
@@ -503,10 +503,10 @@ class SubstitutionTemplateMapping(common.TemplateHandlerBase):
 class RelationshipTemplate(common.TemplateHandlerBase):
     def dump(self, out_stream):
         if self._model.type is not None:
-            out_stream.write('Relationship type: {0}'.format(out_stream.type_style(
+            out_stream.write(u'Relationship type: {0}'.format(out_stream.type_style(
                 self._model.type.name)))
         else:
-            out_stream.write('Relationship template: {0}'.format(
+            out_stream.write(u'Relationship template: {0}'.format(
                 out_stream.node_style(self._model.name)))
         if self._model.description:
             out_stream.write(out_stream.meta_style(self._model.description))
@@ -540,27 +540,27 @@ class OperationTemplate(common.TemplateHandlerBase):
             out_stream.write(out_stream.meta_style(self._model.description))
         with out_stream.indent():
             if self._model.implementation is not None:
-                out_stream.write('Implementation: {0}'.format(
+                out_stream.write(u'Implementation: {0}'.format(
                     out_stream.literal_style(self._model.implementation)))
             if self._model.dependencies:
-                out_stream.write('Dependencies: {0}'.format(', '.join(
+                out_stream.write(u'Dependencies: {0}'.format(u', '.join(
                     (str(out_stream.literal_style(v)) for v in self._model.dependencies))))
             self._topology.dump(self._model.inputs, out_stream, title='Inputs')
             if self._model.executor is not None:
-                out_stream.write('Executor: {0}'.format(
+                out_stream.write(u'Executor: {0}'.format(
                     out_stream.literal_style(self._model.executor)))
             if self._model.max_attempts is not None:
-                out_stream.write('Max attempts: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Max attempts: {0}'.format(out_stream.literal_style(
                     self._model.max_attempts)))
             if self._model.retry_interval is not None:
-                out_stream.write('Retry interval: {0}'.format(
+                out_stream.write(u'Retry interval: {0}'.format(
                     out_stream.literal_style(self._model.retry_interval)))
             if self._model.plugin_specification is not None:
-                out_stream.write('Plugin specification: {0}'.format(
+                out_stream.write(u'Plugin specification: {0}'.format(
                     out_stream.literal_style(self._model.plugin_specification.name)))
             self._topology.dump(self._model.configurations, out_stream, title='Configuration')
             if self._model.function is not None:
-                out_stream.write('Function: {0}'.format(out_stream.literal_style(
+                out_stream.write(u'Function: {0}'.format(out_stream.literal_style(
                     self._model.function)))
 
     def coerce(self, **kwargs):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/topology/topology.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/topology/topology.py b/aria/orchestrator/topology/topology.py
index f86c9dd..ef5322e 100644
--- a/aria/orchestrator/topology/topology.py
+++ b/aria/orchestrator/topology/topology.py
@@ -104,7 +104,7 @@ class Topology(issue.ReporterMixin):
 
         # if model is empty, no need to print out the section name
         if model and title:
-            out_stream.write('{0}:'.format(title))
+            out_stream.write(u'{0}:'.format(title))
 
         if isinstance(model, dict):
             if str(out_stream):
@@ -133,18 +133,18 @@ class Topology(issue.ReporterMixin):
     def _dump_graph_node(self, out_stream, node, capability=None):
         out_stream.write(out_stream.node_style(node.name))
         if capability is not None:
-            out_stream.write('{0} ({1})'.format(out_stream.property_style(capability.name),
-                                                out_stream.type_style(capability.type.name)))
+            out_stream.write(u'{0} ({1})'.format(out_stream.property_style(capability.name),
+                                                 out_stream.type_style(capability.type.name)))
         if node.outbound_relationships:
             with out_stream.indent():
                 for relationship_model in node.outbound_relationships:
                     styled_relationship_name = out_stream.property_style(relationship_model.name)
                     if relationship_model.type is not None:
-                        out_stream.write('-> {0} ({1})'.format(
+                        out_stream.write(u'-> {0} ({1})'.format(
                             styled_relationship_name,
                             out_stream.type_style(relationship_model.type.name)))
                     else:
-                        out_stream.write('-> {0}'.format(styled_relationship_name))
+                        out_stream.write(u'-> {0}'.format(styled_relationship_name))
                     with out_stream.indent(3):
                         self._dump_graph_node(out_stream,
                                               relationship_model.target_node,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflow_runner.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflow_runner.py b/aria/orchestrator/workflow_runner.py
index 4dbf29b..276fdba 100644
--- a/aria/orchestrator/workflow_runner.py
+++ b/aria/orchestrator/workflow_runner.py
@@ -152,14 +152,14 @@ class WorkflowRunner(object):
         if self._workflow_name not in self.service.workflows and \
                         self._workflow_name not in builtin.BUILTIN_WORKFLOWS:
             raise exceptions.UndeclaredWorkflowError(
-                'No workflow policy {0} declared in service {1}'
+                u'No workflow policy {0} declared in service {1}'
                 .format(self._workflow_name, self.service.name))
 
     def _validate_no_active_executions(self, execution):
         active_executions = [e for e in self.service.executions if e.is_active()]
         if active_executions:
             raise exceptions.ActiveExecutionsError(
-                "Can't start execution; Service {0} has an active execution with ID {1}"
+                u"Can't start execution; Service {0} has an active execution with ID {1}"
                 .format(self.service.name, active_executions[0].id))
 
     def _get_workflow_fn(self):
@@ -182,7 +182,7 @@ class WorkflowRunner(object):
             workflow_fn = import_fullname(workflow.function)
         except ImportError:
             raise exceptions.WorkflowImplementationNotFoundError(
-                'Could not find workflow {0} function at {1}'.format(
+                u'Could not find workflow {0} function at {1}'.format(
                     self._workflow_name, workflow.function))
 
         return workflow_fn

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py b/aria/orchestrator/workflows/api/task.py
index 6ce4a00..67adc0b 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -78,7 +78,7 @@ class OperationTask(BaseTask):
     :vartype retry_interval: float
     """
 
-    NAME_FORMAT = '{interface}:{operation}@{type}:{name}'
+    NAME_FORMAT = u'{interface}:{operation}@{type}:{name}'
 
     def __init__(self,
                  actor,
@@ -112,8 +112,8 @@ class OperationTask(BaseTask):
         # interface/operation.
         if not has_operation(actor, interface_name, operation_name):
             raise exceptions.OperationNotFoundException(
-                'Could not find operation "{operation_name}" on interface '
-                '"{interface_name}" for {actor_type} "{actor.name}"'.format(
+                u'Could not find operation "{operation_name}" on interface '
+                u'"{interface_name}" for {actor_type} "{actor.name}"'.format(
                     operation_name=operation_name,
                     interface_name=interface_name,
                     actor_type=type(actor).__name__.lower(),
@@ -149,8 +149,8 @@ class OperationTask(BaseTask):
         elif isinstance(actor, models.Relationship):
             self._context_cls = context.operation.RelationshipOperationContext
         else:
-            raise exceptions.TaskCreationException('Could not create valid context for '
-                                                   '{actor.__class__}'.format(actor=actor))
+            raise exceptions.TaskCreationException(u'Could not create valid context for '
+                                                   u'{actor.__class__}'.format(actor=actor))
 
     def __repr__(self):
         return self.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/api/task_graph.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task_graph.py b/aria/orchestrator/workflows/api/task_graph.py
index 900a0d1..20bf1bd 100644
--- a/aria/orchestrator/workflows/api/task_graph.py
+++ b/aria/orchestrator/workflows/api/task_graph.py
@@ -52,7 +52,7 @@ class TaskGraph(object):
         self._graph = DiGraph()
 
     def __repr__(self):
-        return '{name}(id={self._id}, name={self.name}, graph={self._graph!r})'.format(
+        return u'{name}(id={self._id}, name={self.name}, graph={self._graph!r})'.format(            # pylint: disable=redundant-keyword-arg
             name=self.__class__.__name__, self=self)
 
     @property
@@ -91,7 +91,7 @@ class TaskGraph(object):
          ``dependent_task`` is not in the graph
         """
         if not self.has_tasks(dependent_task):
-            raise TaskNotInGraphError('Task id: {0}'.format(dependent_task.id))
+            raise TaskNotInGraphError(u'Task id: {0}'.format(dependent_task.id))
         for _, dependency_id in self._graph.out_edges_iter(dependent_task.id):
             yield self.get_task(dependency_id)
 
@@ -104,7 +104,7 @@ class TaskGraph(object):
          ``dependency_task`` is not in the graph
         """
         if not self.has_tasks(dependency_task):
-            raise TaskNotInGraphError('Task id: {0}'.format(dependency_task.id))
+            raise TaskNotInGraphError(u'Task id: {0}'.format(dependency_task.id))
         for dependent_id, _ in self._graph.in_edges_iter(dependency_task.id):
             yield self.get_task(dependent_id)
 
@@ -119,7 +119,7 @@ class TaskGraph(object):
          the graph with the given ID
         """
         if not self._graph.has_node(task_id):
-            raise TaskNotInGraphError('Task id: {0}'.format(task_id))
+            raise TaskNotInGraphError(u'Task id: {0}'.format(task_id))
         data = self._graph.node[task_id]
         return data['task']
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/builtin/execute_operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/execute_operation.py b/aria/orchestrator/workflows/builtin/execute_operation.py
index 949f864..256927c 100644
--- a/aria/orchestrator/workflows/builtin/execute_operation.py
+++ b/aria/orchestrator/workflows/builtin/execute_operation.py
@@ -60,7 +60,7 @@ def execute_operation(
         for node in ctx.nodes:
             if node.id not in filtered_node_ids:
                 subgraphs[node.id] = ctx.task_graph(
-                    name='execute_operation_stub_{0}'.format(node.id))
+                    name=u'execute_operation_stub_{0}'.format(node.id))
 
     # registering actual tasks to sequences
     for node in filtered_nodes:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py b/aria/orchestrator/workflows/core/engine.py
index 0ec3cd8..71ef13a 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -28,7 +28,7 @@ from aria.orchestrator.context import operation
 from .. import exceptions
 from ..executor.base import StubTaskExecutor
 # Import required so all signals are registered
-from . import events_handler  # pylint: disable=unused-import
+from . import events_handler                                                                        # pylint: disable=unused-import
 
 
 class Engine(logger.LoggerMixin):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/core/events_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/events_handler.py b/aria/orchestrator/workflows/core/events_handler.py
index 473475e..067d0c3 100644
--- a/aria/orchestrator/workflows/core/events_handler.py
+++ b/aria/orchestrator/workflows/core/events_handler.py
@@ -166,5 +166,5 @@ def _update_node_state_if_necessary(ctx, is_transitional=False):
 
 def _log_tried_to_cancel_execution_but_it_already_ended(workflow_context, status):
     workflow_context.logger.info(
-        "'{workflow_name}' workflow execution {status} before the cancel request"
-        "was fully processed".format(workflow_name=workflow_context.workflow_name, status=status))
+        u"'{workflow_name}' workflow execution {status} before the cancel request"
+        u"was fully processed".format(workflow_name=workflow_context.workflow_name, status=status))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/core/graph_compiler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/graph_compiler.py b/aria/orchestrator/workflows/core/graph_compiler.py
index 81543d5..83fbfea 100644
--- a/aria/orchestrator/workflows/core/graph_compiler.py
+++ b/aria/orchestrator/workflows/core/graph_compiler.py
@@ -90,11 +90,11 @@ class GraphCompiler(object):
 
     @staticmethod
     def _start_graph_suffix(api_id):
-        return '{0}-Start'.format(api_id)
+        return u'{0}-Start'.format(api_id)
 
     @staticmethod
     def _end_graph_suffix(api_id):
-        return '{0}-End'.format(api_id)
+        return u'{0}-End'.format(api_id)
 
     @staticmethod
     def _get_non_dependent_tasks(execution):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/events_logging.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/events_logging.py b/aria/orchestrator/workflows/events_logging.py
index 9eee1e1..1099091 100644
--- a/aria/orchestrator/workflows/events_logging.py
+++ b/aria/orchestrator/workflows/events_logging.py
@@ -24,7 +24,7 @@ from ... import modeling
 
 def _get_task_name(task):
     if isinstance(task.actor, modeling.model_bases.service_instance.RelationshipBase):
-        return '{source_node.name}->{target_node.name}'.format(
+        return u'{source_node.name}->{target_node.name}'.format(
             source_node=task.actor.source_node, target_node=task.actor.target_node)
     else:
         return task.actor.name
@@ -40,7 +40,7 @@ def _start_task_handler(ctx, **kwargs):
         suffix = 'has no implementation'
         logger = ctx.logger.debug
 
-    logger('{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
+    logger(u'{name} {task.interface_name}.{task.operation_name} {suffix}'.format(
         name=_get_task_name(ctx.task), task=ctx.task, suffix=suffix))
 
 
@@ -48,38 +48,38 @@ def _start_task_handler(ctx, **kwargs):
 def _success_task_handler(ctx, **kwargs):
     if not ctx.task.function:
         return
-    ctx.logger.info('{name} {task.interface_name}.{task.operation_name} successful'
+    ctx.logger.info(u'{name} {task.interface_name}.{task.operation_name} successful'
                     .format(name=_get_task_name(ctx.task), task=ctx.task))
 
 
 @events.on_failure_task_signal.connect
 def _failure_operation_handler(ctx, traceback, **kwargs):
     ctx.logger.error(
-        '{name} {task.interface_name}.{task.operation_name} failed'
+        u'{name} {task.interface_name}.{task.operation_name} failed'
         .format(name=_get_task_name(ctx.task), task=ctx.task), extra=dict(traceback=traceback)
     )
 
 
 @events.start_workflow_signal.connect
 def _start_workflow_handler(context, **kwargs):
-    context.logger.info("Starting '{ctx.workflow_name}' workflow execution".format(ctx=context))
+    context.logger.info(u"Starting '{ctx.workflow_name}' workflow execution".format(ctx=context))
 
 
 @events.on_failure_workflow_signal.connect
 def _failure_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution failed".format(ctx=context))
+    context.logger.info(u"'{ctx.workflow_name}' workflow execution failed".format(ctx=context))
 
 
 @events.on_success_workflow_signal.connect
 def _success_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution succeeded".format(ctx=context))
+    context.logger.info(u"'{ctx.workflow_name}' workflow execution succeeded".format(ctx=context))
 
 
 @events.on_cancelled_workflow_signal.connect
 def _cancel_workflow_handler(context, **kwargs):
-    context.logger.info("'{ctx.workflow_name}' workflow execution canceled".format(ctx=context))
+    context.logger.info(u"'{ctx.workflow_name}' workflow execution canceled".format(ctx=context))
 
 
 @events.on_cancelling_workflow_signal.connect
 def _cancelling_workflow_handler(context, **kwargs):
-    context.logger.info("Cancelling '{ctx.workflow_name}' workflow execution".format(ctx=context))
+    context.logger.info(u"Cancelling '{ctx.workflow_name}' workflow execution".format(ctx=context))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/exceptions.py b/aria/orchestrator/workflows/exceptions.py
index 2a1d6b1..6fce81c 100644
--- a/aria/orchestrator/workflows/exceptions.py
+++ b/aria/orchestrator/workflows/exceptions.py
@@ -55,10 +55,10 @@ class ProcessException(ExecutorException):
         Describes the error in detail
         """
         return (
-            'Command "{error.command}" executed with an error.{0}'
-            'code: {error.return_code}{0}'
-            'error: {error.stderr}{0}'
-            'output: {error.stdout}'.format(os.linesep, error=self))
+            u'Command "{error.command}" executed with an error.{0}'
+            u'code: {error.return_code}{0}'
+            u'error: {error.stderr}{0}'
+            u'output: {error.stdout}'.format(os.linesep, error=self))
 
 
 class AriaEngineError(exceptions.AriaError):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/executor/celery.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/celery.py b/aria/orchestrator/workflows/executor/celery.py
index a2b3513..aab84ec 100644
--- a/aria/orchestrator/workflows/executor/celery.py
+++ b/aria/orchestrator/workflows/executor/celery.py
@@ -89,7 +89,7 @@ class CeleryExecutor(BaseExecutor):
             exception = async_result.result
         except BaseException as e:
             exception = RuntimeError(
-                'Could not de-serialize exception of task {0} --> {1}: {2}'
+                u'Could not de-serialize exception of task {0} --> {1}: {2}'
                 .format(task.name, type(e).__name__, str(e)))
         self._task_failed(task, exception=exception)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/executor/dry.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/dry.py b/aria/orchestrator/workflows/executor/dry.py
index 9314e5d..bdb0eaf 100644
--- a/aria/orchestrator/workflows/executor/dry.py
+++ b/aria/orchestrator/workflows/executor/dry.py
@@ -22,7 +22,7 @@ from datetime import datetime
 from . import base
 
 
-class DryExecutor(base.BaseExecutor):                                                                    # pylint: disable=abstract-method
+class DryExecutor(base.BaseExecutor):                                                               # pylint: disable=abstract-method
     """
     Dry task executor: prints task information without causing any side effects.
     """
@@ -33,11 +33,11 @@ class DryExecutor(base.BaseExecutor):
             ctx.task.started_at = datetime.utcnow()
             ctx.task.status = ctx.task.STARTED
 
-            dry_msg = '<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
+            dry_msg = u'<dry> {name} {task.interface_name}.{task.operation_name} {suffix}'
             logger = ctx.logger.info if ctx.task.function else ctx.logger.debug
 
             if hasattr(ctx.task.actor, 'source_node'):
-                name = '{source_node.name}->{target_node.name}'.format(
+                name = u'{source_node.name}->{target_node.name}'.format(
                     source_node=ctx.task.actor.source_node, target_node=ctx.task.actor.target_node)
             else:
                 name = ctx.task.actor.name

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/executor/process.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/process.py b/aria/orchestrator/workflows/executor/process.py
index 185f15f..f28db8d 100644
--- a/aria/orchestrator/workflows/executor/process.py
+++ b/aria/orchestrator/workflows/executor/process.py
@@ -201,11 +201,11 @@ class ProcessExecutor(base.BaseExecutor):
                         break
                     request_handler = self._request_handlers.get(request_type)
                     if not request_handler:
-                        raise RuntimeError('Invalid request type: {0}'.format(request_type))
+                        raise RuntimeError(u'Invalid request type: {0}'.format(request_type))
                     task_id = request['task_id']
                     request_handler(task_id=task_id, request=request, response=response)
             except BaseException as e:
-                self.logger.debug('Error in process executor listener: {0}'.format(e))
+                self.logger.debug(u'Error in process executor listener: {0}'.format(e))
 
     @contextlib.contextmanager
     def _accept_request(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/fa116d1d/aria/orchestrator/workflows/executor/thread.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/thread.py b/aria/orchestrator/workflows/executor/thread.py
index 170620e..5786a04 100644
--- a/aria/orchestrator/workflows/executor/thread.py
+++ b/aria/orchestrator/workflows/executor/thread.py
@@ -43,7 +43,7 @@ class ThreadExecutor(BaseExecutor):
         self._queue = Queue.Queue()
         self._pool = []
         for i in range(pool_size):
-            name = 'ThreadExecutor-{index}'.format(index=i+1)
+            name = 'ThreadExecutor-{0:d}'.format(i+1)
             thread = threading.Thread(target=self._processor, name=name)
             thread.daemon = True
             thread.start()



Mime
View raw message