ariatosca-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From arthurbere...@apache.org
Subject [64/70] incubator-ariatosca-website git commit: Adding Sphinx based Docs minisite
Date Wed, 06 Sep 2017 17:49:13 GMT
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/modeling/types.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/modeling/types.py b/apache-ariatosca-0.1.1/aria/modeling/types.py
deleted file mode 100644
index c34326e..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/types.py
+++ /dev/null
@@ -1,318 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Allows JSON-serializable collections to be used as SQLAlchemy column types.
-"""
-
-import json
-from collections import namedtuple
-
-from sqlalchemy import (
-    TypeDecorator,
-    VARCHAR,
-    event
-)
-from sqlalchemy.ext import mutable
-from ruamel import yaml
-
-from . import exceptions
-
-
-class _MutableType(TypeDecorator):
-    """
-    Dict representation of type.
-    """
-    @property
-    def python_type(self):
-        raise NotImplementedError
-
-    def process_literal_param(self, value, dialect):
-        pass
-
-    impl = VARCHAR
-
-    def process_bind_param(self, value, dialect):
-        if value is not None:
-            value = json.dumps(value)
-        return value
-
-    def process_result_value(self, value, dialect):
-        if value is not None:
-            value = json.loads(value)
-        return value
-
-
-class Dict(_MutableType):
-    """
-    JSON-serializable dict type for SQLAlchemy columns.
-    """
-    @property
-    def python_type(self):
-        return dict
-
-
-class List(_MutableType):
-    """
-    JSON-serializable list type for SQLAlchemy columns.
-    """
-    @property
-    def python_type(self):
-        return list
-
-
-class _StrictDictMixin(object):
-
-    @classmethod
-    def coerce(cls, key, value):
-        """
-        Convert plain dictionaries to MutableDict.
-        """
-        try:
-            if not isinstance(value, cls):
-                if isinstance(value, dict):
-                    for k, v in value.items():
-                        cls._assert_strict_key(k)
-                        cls._assert_strict_value(v)
-                    return cls(value)
-                return mutable.MutableDict.coerce(key, value)
-            else:
-                return value
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
-
-    def __setitem__(self, key, value):
-        self._assert_strict_key(key)
-        self._assert_strict_value(value)
-        super(_StrictDictMixin, self).__setitem__(key, value)
-
-    def setdefault(self, key, value):
-        self._assert_strict_key(key)
-        self._assert_strict_value(value)
-        super(_StrictDictMixin, self).setdefault(key, value)
-
-    def update(self, *args, **kwargs):
-        for k, v in kwargs.items():
-            self._assert_strict_key(k)
-            self._assert_strict_value(v)
-        super(_StrictDictMixin, self).update(*args, **kwargs)
-
-    @classmethod
-    def _assert_strict_key(cls, key):
-        if cls._key_cls is not None and not isinstance(key, cls._key_cls):
-            raise exceptions.ValueFormatException('key type was set strictly to {0}, but was {1}'
-                                                  .format(cls._key_cls, type(key)))
-
-    @classmethod
-    def _assert_strict_value(cls, value):
-        if cls._value_cls is not None and not isinstance(value, cls._value_cls):
-            raise exceptions.ValueFormatException('value type was set strictly to {0}, but was {1}'
-                                                  .format(cls._value_cls, type(value)))
-
-
-class _MutableDict(mutable.MutableDict):
-    """
-    Enables tracking for dict values.
-    """
-
-    @classmethod
-    def coerce(cls, key, value):
-        """
-        Convert plain dictionaries to MutableDict.
-        """
-        try:
-            return mutable.MutableDict.coerce(key, value)
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce value', cause=e)
-
-
-class _StrictListMixin(object):
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-        try:
-            if not isinstance(value, cls):
-                if isinstance(value, list):
-                    for item in value:
-                        cls._assert_item(item)
-                    return cls(value)
-                return mutable.MutableList.coerce(key, value)
-            else:
-                return value
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
-
-    def __setitem__(self, index, value):
-        """
-        Detect list set events and emit change events.
-        """
-        self._assert_item(value)
-        super(_StrictListMixin, self).__setitem__(index, value)
-
-    def append(self, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).append(item)
-
-    def extend(self, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).extend(item)
-
-    def insert(self, index, item):
-        self._assert_item(item)
-        super(_StrictListMixin, self).insert(index, item)
-
-    @classmethod
-    def _assert_item(cls, item):
-        if cls._item_cls is not None and not isinstance(item, cls._item_cls):
-            raise exceptions.ValueFormatException('key type was set strictly to {0}, but was {1}'
-                                                  .format(cls._item_cls, type(item)))
-
-
-class _MutableList(mutable.MutableList):
-
-    @classmethod
-    def coerce(cls, key, value):
-        """
-        Convert plain dictionaries to MutableDict.
-        """
-        try:
-            return mutable.MutableList.coerce(key, value)
-        except ValueError as e:
-            raise exceptions.ValueFormatException('could not coerce to MutableDict', cause=e)
-
-
-_StrictDictID = namedtuple('_StrictDictID', 'key_cls, value_cls')
-_StrictValue = namedtuple('_StrictValue', 'type_cls, listener_cls')
-
-class _StrictDict(object):
-    """
-    This entire class functions as a factory for strict dicts and their listeners. No type class,
-    and no listener type class is created more than once. If a relevant type class exists it is
-    returned.
-    """
-    _strict_map = {}
-
-    def __call__(self, key_cls=None, value_cls=None):
-        strict_dict_map_key = _StrictDictID(key_cls=key_cls, value_cls=value_cls)
-        if strict_dict_map_key not in self._strict_map:
-            key_cls_name = getattr(key_cls, '__name__', str(key_cls))
-            value_cls_name = getattr(value_cls, '__name__', str(value_cls))
-            # Creating the type class itself. this class would be returned (used by the SQLAlchemy
-            # Column).
-            strict_dict_cls = type(
-                'StrictDict_{0}_{1}'.format(key_cls_name, value_cls_name),
-                (Dict, ),
-                {}
-            )
-            # Creating the type listening class.
-            # The new class inherits from both the _MutableDict class and the _StrictDictMixin,
-            # while setting the necessary _key_cls and _value_cls as class attributes.
-            listener_cls = type(
-                'StrictMutableDict_{0}_{1}'.format(key_cls_name, value_cls_name),
-                (_StrictDictMixin, _MutableDict),
-                {'_key_cls': key_cls, '_value_cls': value_cls}
-            )
-            yaml.representer.RoundTripRepresenter.add_representer(
-                listener_cls, yaml.representer.RoundTripRepresenter.represent_list)
-            self._strict_map[strict_dict_map_key] = _StrictValue(type_cls=strict_dict_cls,
-                                                                 listener_cls=listener_cls)
-
-        return self._strict_map[strict_dict_map_key].type_cls
-
-
-StrictDict = _StrictDict()
-"""
-JSON-serializable strict dict type for SQLAlchemy columns.
-
-:param key_cls:
-:param value_cls:
-"""
-
-
-class _StrictList(object):
-    """
-    This entire class functions as a factory for strict lists and their listeners. No type class,
-    and no listener type class is created more than once. If a relevant type class exists it is
-    returned.
-    """
-    _strict_map = {}
-
-    def __call__(self, item_cls=None):
-
-        if item_cls not in self._strict_map:
-            item_cls_name = getattr(item_cls, '__name__', str(item_cls))
-            # Creating the type class itself. this class would be returned (used by the SQLAlchemy
-            # Column).
-            strict_list_cls = type(
-                'StrictList_{0}'.format(item_cls_name),
-                (List, ),
-                {}
-            )
-            # Creating the type listening class.
-            # The new class inherits from both the _MutableList class and the _StrictListMixin,
-            # while setting the necessary _item_cls as class attribute.
-            listener_cls = type(
-                'StrictMutableList_{0}'.format(item_cls_name),
-                (_StrictListMixin, _MutableList),
-                {'_item_cls': item_cls}
-            )
-            yaml.representer.RoundTripRepresenter.add_representer(
-                listener_cls, yaml.representer.RoundTripRepresenter.represent_list)
-            self._strict_map[item_cls] = _StrictValue(type_cls=strict_list_cls,
-                                                      listener_cls=listener_cls)
-
-        return self._strict_map[item_cls].type_cls
-
-
-StrictList = _StrictList()
-"""
-JSON-serializable strict list type for SQLAlchemy columns.
-
-:param item_cls:
-"""
-
-
-def _mutable_association_listener(mapper, cls):
-    strict_dict_type_to_listener = \
-        dict((v.type_cls, v.listener_cls) for v in _StrictDict._strict_map.values())
-
-    strict_list_type_to_listener = \
-        dict((v.type_cls, v.listener_cls) for v in _StrictList._strict_map.values())
-
-    for prop in mapper.column_attrs:
-        column_type = prop.columns[0].type
-        # Dict Listeners
-        if type(column_type) in strict_dict_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
-            strict_dict_type_to_listener[type(column_type)].associate_with_attribute(
-                getattr(cls, prop.key))
-        elif isinstance(column_type, Dict):
-            _MutableDict.associate_with_attribute(getattr(cls, prop.key))
-
-        # List Listeners
-        if type(column_type) in strict_list_type_to_listener:                                       # pylint: disable=unidiomatic-typecheck
-            strict_list_type_to_listener[type(column_type)].associate_with_attribute(
-                getattr(cls, prop.key))
-        elif isinstance(column_type, List):
-            _MutableList.associate_with_attribute(getattr(cls, prop.key))
-
-
-_LISTENER_ARGS = (mutable.mapper, 'mapper_configured', _mutable_association_listener)
-
-
-def _register_mutable_association_listener():
-    event.listen(*_LISTENER_ARGS)
-
-_register_mutable_association_listener()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/modeling/utils.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/modeling/utils.py b/apache-ariatosca-0.1.1/aria/modeling/utils.py
deleted file mode 100644
index 5193cd9..0000000
--- a/apache-ariatosca-0.1.1/aria/modeling/utils.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Miscellaneous modeling utilities.
-"""
-
-import os
-from json import JSONEncoder
-from StringIO import StringIO
-
-from . import exceptions
-from ..parser.consumption import ConsumptionContext
-from ..utils.console import puts
-from ..utils.type import validate_value_type
-from ..utils.collections import OrderedDict
-from ..utils.formatting import string_list_as_string
-
-
-class ModelJSONEncoder(JSONEncoder):
-    """
-    JSON encoder that automatically unwraps ``value`` attributes.
-    """
-    def __init__(self, *args, **kwargs):
-        # Just here to make sure Sphinx doesn't grab the base constructor's docstring
-        super(ModelJSONEncoder, self).__init__(*args, **kwargs)
-
-    def default(self, o):  # pylint: disable=method-hidden
-        from .mixins import ModelMixin
-        if isinstance(o, ModelMixin):
-            if hasattr(o, 'value'):
-                dict_to_return = o.to_dict(fields=('value',))
-                return dict_to_return['value']
-            else:
-                return o.to_dict()
-        else:
-            return JSONEncoder.default(self, o)
-
-
-class NodeTemplateContainerHolder(object):
-    """
-    Wrapper that allows using a :class:`~aria.modeling.models.NodeTemplate` model directly as the
-    ``container_holder`` input for :func:`~aria.modeling.functions.evaluate`.
-    """
-
-    def __init__(self, node_template):
-        self.container = node_template
-        self.service = None
-
-    @property
-    def service_template(self):
-        return self.container.service_template
-
-
-def merge_parameter_values(parameter_values, declared_parameters, model_cls):
-    """
-    Merges parameter values according to those declared by a type.
-
-    Exceptions will be raised for validation errors.
-
-    :param parameter_values: provided parameter values or None
-    :type parameter_values: {:obj:`basestring`: object}
-    :param declared_parameters: declared parameters
-    :type declared_parameters: {:obj:`basestring`: :class:`~aria.modeling.models.Parameter`}
-    :return: the merged parameters
-    :rtype: {:obj:`basestring`: :class:`~aria.modeling.models.Parameter`}
-    :raises ~aria.modeling.exceptions.UndeclaredParametersException: if a key in
-     ``parameter_values`` does not exist in ``declared_parameters``
-    :raises ~aria.modeling.exceptions.MissingRequiredParametersException: if a key in
-     ``declared_parameters`` does not exist in ``parameter_values`` and also has no default value
-    :raises ~aria.modeling.exceptions.ParametersOfWrongTypeException: if a value in
-      ``parameter_values`` does not match its type in ``declared_parameters``
-    """
-
-    parameter_values = parameter_values or {}
-
-    undeclared_names = list(set(parameter_values.keys()).difference(declared_parameters.keys()))
-    if undeclared_names:
-        raise exceptions.UndeclaredParametersException(
-            'Undeclared parameters have been provided: {0}; Declared: {1}'
-            .format(string_list_as_string(undeclared_names),
-                    string_list_as_string(declared_parameters.keys())))
-
-    parameters = OrderedDict()
-
-    missing_names = []
-    wrong_type_values = OrderedDict()
-    for declared_parameter_name, declared_parameter in declared_parameters.iteritems():
-        if declared_parameter_name in parameter_values:
-            # Value has been provided
-            value = parameter_values[declared_parameter_name]
-
-            # Validate type
-            type_name = declared_parameter.type_name
-            try:
-                validate_value_type(value, type_name)
-            except ValueError:
-                wrong_type_values[declared_parameter_name] = type_name
-            except RuntimeError:
-                # TODO: This error shouldn't be raised (or caught), but right now we lack support
-                # for custom data_types, which will raise this error. Skipping their validation.
-                pass
-
-            # Wrap in Parameter model
-            parameters[declared_parameter_name] = model_cls( # pylint: disable=unexpected-keyword-arg
-                name=declared_parameter_name,
-                type_name=type_name,
-                description=declared_parameter.description,
-                value=value)
-        elif declared_parameter.value is not None:
-            # Copy default value from declaration
-            parameters[declared_parameter_name] = declared_parameter.instantiate(None)
-        else:
-            # Required value has not been provided
-            missing_names.append(declared_parameter_name)
-
-    if missing_names:
-        raise exceptions.MissingRequiredParametersException(
-            'Declared parameters {0} have not been provided values'
-            .format(string_list_as_string(missing_names)))
-
-    if wrong_type_values:
-        error_message = StringIO()
-        for param_name, param_type in wrong_type_values.iteritems():
-            error_message.write('Parameter "{0}" is not of declared type "{1}"{2}'
-                                .format(param_name, param_type, os.linesep))
-        raise exceptions.ParametersOfWrongTypeException(error_message.getvalue())
-
-    return parameters
-
-
-def coerce_dict_values(the_dict, report_issues=False):
-    if not the_dict:
-        return
-    coerce_list_values(the_dict.itervalues(), report_issues)
-
-
-def coerce_list_values(the_list, report_issues=False):
-    if not the_list:
-        return
-    for value in the_list:
-        value.coerce_values(report_issues)
-
-
-def validate_dict_values(the_dict):
-    if not the_dict:
-        return
-    validate_list_values(the_dict.itervalues())
-
-
-def validate_list_values(the_list):
-    if not the_list:
-        return
-    for value in the_list:
-        value.validate()
-
-
-def instantiate_dict(container, the_dict, from_dict):
-    if not from_dict:
-        return
-    for name, value in from_dict.iteritems():
-        value = value.instantiate(container)
-        if value is not None:
-            the_dict[name] = value
-
-
-def instantiate_list(container, the_list, from_list):
-    if not from_list:
-        return
-    for value in from_list:
-        value = value.instantiate(container)
-        if value is not None:
-            the_list.append(value)
-
-
-def dump_list_values(the_list, name):
-    if not the_list:
-        return
-    puts('%s:' % name)
-    context = ConsumptionContext.get_thread_local()
-    with context.style.indent:
-        for value in the_list:
-            value.dump()
-
-
-def dump_dict_values(the_dict, name):
-    if not the_dict:
-        return
-    dump_list_values(the_dict.itervalues(), name)
-
-
-def dump_interfaces(interfaces, name='Interfaces'):
-    if not interfaces:
-        return
-    puts('%s:' % name)
-    context = ConsumptionContext.get_thread_local()
-    with context.style.indent:
-        for interface in interfaces.itervalues():
-            interface.dump()
-
-
-class classproperty(object):                                                                        # pylint: disable=invalid-name
-    def __init__(self, f):
-        self._func = f
-        self.__doct__ = f.__doc__
-
-    def __get__(self, instance, owner):
-        return self._func(owner)
-
-
-def fix_doc(cls):
-    """
-    Class decorator to use the last base class's docstring and make sure Sphinx doesn't grab the
-    base constructor's docstring.
-    """
-    original_init = cls.__init__
-    def init(*args, **kwargs):
-        original_init(*args, **kwargs)
-
-    cls.__init__ = init
-    cls.__doc__ = cls.__bases__[-1].__doc__
-
-    return cls

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py
deleted file mode 100644
index f400142..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/common.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Common code for contexts.
-"""
-
-import logging
-from contextlib import contextmanager
-from functools import partial
-
-import jinja2
-
-from aria import (
-    logger as aria_logger,
-    modeling
-)
-from aria.storage import exceptions
-
-from ...utils.uuid import generate_uuid
-
-
-class BaseContext(object):
-    """
-    Base class for contexts.
-    """
-
-    INSTRUMENTATION_FIELDS = (
-        modeling.models.Node.attributes,
-        modeling.models.Node.properties,
-        modeling.models.NodeTemplate.attributes,
-        modeling.models.NodeTemplate.properties
-    )
-
-    class PrefixedLogger(object):
-        def __init__(self, base_logger, task_id=None):
-            self._logger = base_logger
-            self._task_id = task_id
-
-        def __getattr__(self, attribute):
-            if attribute.upper() in logging._levelNames:
-                return partial(self._logger_with_task_id, _level=attribute)
-            else:
-                return getattr(self._logger, attribute)
-
-        def _logger_with_task_id(self, *args, **kwargs):
-            level = kwargs.pop('_level')
-            kwargs.setdefault('extra', {})['task_id'] = self._task_id
-            return getattr(self._logger, level)(*args, **kwargs)
-
-    def __init__(self,
-                 name,
-                 service_id,
-                 model_storage,
-                 resource_storage,
-                 execution_id,
-                 workdir=None,
-                 **kwargs):
-        super(BaseContext, self).__init__(**kwargs)
-        self._name = name
-        self._id = generate_uuid(variant='uuid')
-        self._model = model_storage
-        self._resource = resource_storage
-        self._service_id = service_id
-        self._workdir = workdir
-        self._execution_id = execution_id
-        self.logger = None
-
-    def _register_logger(self, level=None, task_id=None):
-        self.logger = self.PrefixedLogger(
-            logging.getLogger(aria_logger.TASK_LOGGER_NAME), task_id=task_id)
-        self.logger.setLevel(level or logging.DEBUG)
-        if not self.logger.handlers:
-            self.logger.addHandler(self._get_sqla_handler())
-
-    def _get_sqla_handler(self):
-        return aria_logger.create_sqla_log_handler(model=self._model,
-                                                   log_cls=modeling.models.Log,
-                                                   execution_id=self._execution_id)
-
-    def __repr__(self):
-        return (
-            '{name}(name={self.name}, '
-            'deployment_id={self._service_id}, '
-            .format(name=self.__class__.__name__, self=self))
-
-    @contextmanager
-    def logging_handlers(self, handlers=None):
-        handlers = handlers or []
-        try:
-            for handler in handlers:
-                self.logger.addHandler(handler)
-            yield self.logger
-        finally:
-            for handler in handlers:
-                self.logger.removeHandler(handler)
-
-    @property
-    def model(self):
-        """
-        Storage model API ("MAPI").
-        """
-        return self._model
-
-    @property
-    def resource(self):
-        """
-        Storage resource API ("RAPI").
-        """
-        return self._resource
-
-    @property
-    def service_template(self):
-        """
-        Service template model.
-        """
-        return self.service.service_template
-
-    @property
-    def service(self):
-        """
-        Service instance model.
-        """
-        return self.model.service.get(self._service_id)
-
-    @property
-    def name(self):
-        """
-        Operation name.
-        """
-        return self._name
-
-    @property
-    def id(self):
-        """
-        Operation ID.
-        """
-        return self._id
-
-    def download_resource(self, destination, path=None):
-        """
-        Download a service template resource from the storage resource API ("RAPI").
-        """
-        try:
-            self.resource.service.download(entry_id=str(self.service.id),
-                                           destination=destination,
-                                           path=path)
-        except exceptions.StorageError:
-            self.resource.service_template.download(entry_id=str(self.service_template.id),
-                                                    destination=destination,
-                                                    path=path)
-
-    def download_resource_and_render(self, destination, path=None, variables=None):
-        """
-        Downloads a service template resource from the resource storage and renders its content as a
-        Jinja template using the provided variables. ``ctx`` is available to the template without
-        providing it explicitly.
-        """
-        resource_content = self.get_resource(path=path)
-        resource_content = self._render_resource(resource_content=resource_content,
-                                                 variables=variables)
-        with open(destination, 'wb') as f:
-            f.write(resource_content)
-
-    def get_resource(self, path=None):
-        """
-        Reads a service instance resource as string from the resource storage.
-        """
-        try:
-            return self.resource.service.read(entry_id=str(self.service.id), path=path)
-        except exceptions.StorageError:
-            return self.resource.service_template.read(entry_id=str(self.service_template.id),
-                                                       path=path)
-
-    def get_resource_and_render(self, path=None, variables=None):
-        """
-        Reads a service instance resource as string from the resource storage and renders it as a
-        Jinja template using the provided variables. ``ctx`` is available to the template without
-        providing it explicitly.
-        """
-        resource_content = self.get_resource(path=path)
-        return self._render_resource(resource_content=resource_content, variables=variables)
-
-    def _render_resource(self, resource_content, variables):
-        variables = variables or {}
-        variables.setdefault('ctx', self)
-        resource_template = jinja2.Template(resource_content)
-        return resource_template.render(variables)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py
deleted file mode 100644
index e46e2b1..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/exceptions.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Context exceptions.
-"""
-
-from ..exceptions import OrchestratorError
-
-
-class ContextException(OrchestratorError):
-    """
-    Context based exception
-    """
-    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py
deleted file mode 100644
index 7d5f40c..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/operation.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Operation contexts.
-"""
-
-import threading
-from contextlib import contextmanager
-
-import aria
-from aria.utils import file
-from . import common
-
-
-class BaseOperationContext(common.BaseContext):
-    """
-    Base class for contexts used during operation creation and execution.
-    """
-
-    def __init__(self, task_id, actor_id, **kwargs):
-        self._task_id = task_id
-        self._actor_id = actor_id
-        self._thread_local = threading.local()
-        self._destroy_session = kwargs.pop('destroy_session', False)
-        logger_level = kwargs.pop('logger_level', None)
-        super(BaseOperationContext, self).__init__(**kwargs)
-        self._register_logger(task_id=self.task.id, level=logger_level)
-
-    def __repr__(self):
-        details = 'function={task.function}; ' \
-                  'operation_arguments={task.arguments}'\
-            .format(task=self.task)
-        return '{name}({0})'.format(details, name=self.name)
-
-    @property
-    def task(self):
-        """
-        The task in the model storage
-        :return: Task model
-        """
-        # SQLAlchemy prevents from accessing an object which was created on a different thread.
-        # So we retrieve the object from the storage if the current thread isn't the same as the
-        # original thread.
-
-        if not hasattr(self._thread_local, 'task'):
-            self._thread_local.task = self.model.task.get(self._task_id)
-        return self._thread_local.task
-
-    @property
-    def plugin_workdir(self):
-        """
-        A work directory that is unique to the plugin and the deployment id
-        """
-        if self.task.plugin is None:
-            return None
-        plugin_workdir = '{0}/plugins/{1}/{2}'.format(self._workdir,
-                                                      self.service.id,
-                                                      self.task.plugin.name)
-        file.makedirs(plugin_workdir)
-        return plugin_workdir
-
-    @property
-    def serialization_dict(self):
-        context_dict = {
-            'name': self.name,
-            'service_id': self._service_id,
-            'task_id': self._task_id,
-            'actor_id': self._actor_id,
-            'workdir': self._workdir,
-            'model_storage': self.model.serialization_dict if self.model else None,
-            'resource_storage': self.resource.serialization_dict if self.resource else None,
-            'execution_id': self._execution_id,
-            'logger_level': self.logger.level
-        }
-        return {
-            'context_cls': self.__class__,
-            'context': context_dict
-        }
-
-    @classmethod
-    def instantiate_from_dict(cls, model_storage=None, resource_storage=None, **kwargs):
-        if model_storage:
-            model_storage = aria.application_model_storage(**model_storage)
-        if resource_storage:
-            resource_storage = aria.application_resource_storage(**resource_storage)
-
-        return cls(model_storage=model_storage,
-                   resource_storage=resource_storage,
-                   destroy_session=True,
-                   **kwargs)
-
-    def close(self):
-        if self._destroy_session:
-            self.model.log._session.remove()
-            self.model.log._engine.dispose()
-
-    @property
-    @contextmanager
-    def persist_changes(self):
-        yield
-        self.model.task.update(self.task)
-
-
-class NodeOperationContext(BaseOperationContext):
-    """
-    Context for node operations.
-    """
-
-    @property
-    def node_template(self):
-        """
-        the node of the current operation
-        :return:
-        """
-        return self.node.node_template
-
-    @property
-    def node(self):
-        """
-        The node instance of the current operation
-        :return:
-        """
-        return self.model.node.get(self._actor_id)
-
-
-class RelationshipOperationContext(BaseOperationContext):
-    """
-    Context for relationship operations.
-    """
-
-    @property
-    def source_node_template(self):
-        """
-        The source node
-        :return:
-        """
-        return self.source_node.node_template
-
-    @property
-    def source_node(self):
-        """
-        The source node instance
-        :return:
-        """
-        return self.relationship.source_node
-
-    @property
-    def target_node_template(self):
-        """
-        The target node
-        :return:
-        """
-        return self.target_node.node_template
-
-    @property
-    def target_node(self):
-        """
-        The target node instance
-        :return:
-        """
-        return self.relationship.target_node
-
-    @property
-    def relationship(self):
-        """
-        The relationship instance of the current operation
-        :return:
-        """
-        return self.model.relationship.get(self._actor_id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py
deleted file mode 100644
index a2e1122..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/toolbelt.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Tools for operations.
-"""
-
-from . import operation
-
-
-class NodeToolBelt(object):
-    """
-    Node operation tool belt.
-    """
-    def __init__(self, operation_context):
-        self._op_context = operation_context
-
-    @property
-    def host_ip(self):
-        """
-        The host ip of the current node
-        :return:
-        """
-        assert isinstance(self._op_context, operation.NodeOperationContext)
-        return self._op_context.node.host.attributes.get('ip')
-
-
-class RelationshipToolBelt(object):
-    """
-    Relationship operation tool belt.
-    """
-    def __init__(self, operation_context):
-        self._op_context = operation_context
-
-
-def toolbelt(operation_context):
-    """
-    Get a toolbelt from to the current operation executor.
-
-    :param operation_context:
-    """
-    if isinstance(operation_context, operation.NodeOperationContext):
-        return NodeToolBelt(operation_context)
-    elif isinstance(operation_context, operation.RelationshipOperationContext):
-        return RelationshipToolBelt(operation_context)
-    else:
-        raise RuntimeError("Operation context not supported")

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py b/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py
deleted file mode 100644
index 738d2fd..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/context/workflow.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow context.
-"""
-
-import threading
-from contextlib import contextmanager
-
-from .exceptions import ContextException
-from .common import BaseContext
-
-
-class WorkflowContext(BaseContext):
-    """
-    Context used during workflow creation and execution.
-    """
-    def __init__(self,
-                 workflow_name,
-                 parameters=None,
-                 task_max_attempts=1,
-                 task_retry_interval=0,
-                 task_ignore_failure=False,
-                 *args, **kwargs):
-        super(WorkflowContext, self).__init__(*args, **kwargs)
-        self._workflow_name = workflow_name
-        self._parameters = parameters or {}
-        self._task_max_attempts = task_max_attempts
-        self._task_retry_interval = task_retry_interval
-        self._task_ignore_failure = task_ignore_failure
-        self._execution_graph = None
-        self._register_logger()
-
-    def __repr__(self):
-        return (
-            '{name}(deployment_id={self._service_id}, '
-            'workflow_name={self._workflow_name}, execution_id={self._execution_id})'.format(
-                name=self.__class__.__name__, self=self))
-
-    @property
-    def workflow_name(self):
-        return self._workflow_name
-
-    @property
-    def execution(self):
-        """
-        Execution model.
-        """
-        return self.model.execution.get(self._execution_id)
-
-    @execution.setter
-    def execution(self, value):
-        """
-        Stores the execution in the storage model API ("MAPI").
-        """
-        self.model.execution.put(value)
-
-    @property
-    def node_templates(self):
-        """
-        Iterates over nodes templates.
-        """
-        key = 'service_{0}'.format(self.model.node_template.model_cls.name_column_name())
-
-        return self.model.node_template.iter(
-            filters={
-                key: getattr(self.service, self.service.name_column_name())
-            }
-        )
-
-    @property
-    def nodes(self):
-        """
-        Iterates over nodes.
-        """
-        key = 'service_{0}'.format(self.model.node.model_cls.name_column_name())
-        return self.model.node.iter(
-            filters={
-                key: getattr(self.service, self.service.name_column_name())
-            }
-        )
-
-    @property
-    @contextmanager
-    def persist_changes(self):
-        yield
-        self._model.execution.update(self.execution)
-
-
-class _CurrentContext(threading.local):
-    """
-    Provides a thread-level context, with sugar for the task MAPI.
-    """
-
-    def __init__(self):
-        super(_CurrentContext, self).__init__()
-        self._workflow_context = None
-
-    def _set(self, value):
-        self._workflow_context = value
-
-    def get(self):
-        """
-        Retrieves the current workflow context.
-        """
-        if self._workflow_context is not None:
-            return self._workflow_context
-        raise ContextException("No context was set")
-
-    @contextmanager
-    def push(self, workflow_context):
-        """
-        Switches the current context to the provided context.
-        """
-        prev_workflow_context = self._workflow_context
-        self._set(workflow_context)
-        try:
-            yield self
-        finally:
-            self._set(prev_workflow_context)
-
-current = _CurrentContext()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py b/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py
deleted file mode 100644
index 4b163d6..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/decorators.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Workflow and operation decorators.
-"""
-
-from functools import partial, wraps
-
-from ..utils.validation import validate_function_arguments
-from ..utils.uuid import generate_uuid
-
-from . import context
-from .workflows.api import task_graph
-
-
-WORKFLOW_DECORATOR_RESERVED_ARGUMENTS = set(('ctx', 'graph'))
-OPERATION_DECORATOR_RESERVED_ARGUMENTS = set(('ctx', 'toolbelt'))
-
-
-def workflow(func=None, suffix_template=''):
-    """
-    Workflow decorator.
-    """
-    if func is None:
-        return partial(workflow, suffix_template=suffix_template)
-
-    @wraps(func)
-    def _wrapper(ctx, **workflow_parameters):
-
-        workflow_name = _generate_name(
-            func_name=func.__name__,
-            suffix_template=suffix_template,
-            ctx=ctx,
-            **workflow_parameters)
-
-        workflow_parameters.setdefault('ctx', ctx)
-        workflow_parameters.setdefault('graph', task_graph.TaskGraph(workflow_name))
-        validate_function_arguments(func, workflow_parameters)
-        with ctx.model.instrument(*ctx.INSTRUMENTATION_FIELDS):
-            with context.workflow.current.push(ctx):
-                func(**workflow_parameters)
-        return workflow_parameters['graph']
-    return _wrapper
-
-
-def operation(func=None, toolbelt=False, suffix_template='', logging_handlers=None):
-    """
-    Operation decorator.
-    """
-
-    if func is None:
-        return partial(operation,
-                       suffix_template=suffix_template,
-                       toolbelt=toolbelt,
-                       logging_handlers=logging_handlers)
-
-    @wraps(func)
-    def _wrapper(**func_kwargs):
-        ctx = func_kwargs['ctx']
-        if toolbelt:
-            operation_toolbelt = context.toolbelt(ctx)
-            func_kwargs.setdefault('toolbelt', operation_toolbelt)
-        validate_function_arguments(func, func_kwargs)
-        with ctx.model.instrument(*ctx.INSTRUMENTATION_FIELDS):
-            return func(**func_kwargs)
-    return _wrapper
-
-
-def _generate_name(func_name, ctx, suffix_template, **custom_kwargs):
-    return '{func_name}.{suffix}'.format(
-        func_name=func_name,
-        suffix=suffix_template.format(ctx=ctx, **custom_kwargs) or generate_uuid(variant='uuid'))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/events.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/events.py b/apache-ariatosca-0.1.1/aria/orchestrator/events.py
deleted file mode 100644
index ef84e5d..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/events.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Orchestrator events.
-"""
-
-from blinker import signal
-
-# workflow engine task signals:
-sent_task_signal = signal('sent_task_signal')
-start_task_signal = signal('start_task_signal')
-on_success_task_signal = signal('success_task_signal')
-on_failure_task_signal = signal('failure_task_signal')
-
-# workflow engine workflow signals:
-start_workflow_signal = signal('start_workflow_signal')
-on_cancelling_workflow_signal = signal('on_cancelling_workflow_signal')
-on_cancelled_workflow_signal = signal('on_cancelled_workflow_signal')
-on_success_workflow_signal = signal('on_success_workflow_signal')
-on_failure_workflow_signal = signal('on_failure_workflow_signal')
-on_resume_workflow_signal = signal('on_resume_workflow_signal')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py
deleted file mode 100644
index 384458f..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/exceptions.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Orchestrator exceptions.
-"""
-
-from aria.exceptions import AriaError
-
-
-class OrchestratorError(AriaError):
-    """
-    Orchestrator based exception
-    """
-    pass
-
-
-class InvalidPluginError(AriaError):
-    """
-    Raised when an invalid plugin is validated unsuccessfully
-    """
-    pass
-
-
-class PluginAlreadyExistsError(AriaError):
-    """
-    Raised when a plugin with the same package name and package version already exists
-    """
-    pass
-
-
-class TaskRetryException(RuntimeError):
-    """
-    Used internally when ctx.task.retry is called
-    """
-    def __init__(self, message, retry_interval=None):
-        super(TaskRetryException, self).__init__(message)
-        self.retry_interval = retry_interval
-
-
-class TaskAbortException(RuntimeError):
-    """
-    Used internally when ctx.task.abort is called
-    """
-    pass
-
-
-class UndeclaredWorkflowError(AriaError):
-    """
-    Raised when attempting to execute an undeclared workflow
-    """
-    pass
-
-
-class ActiveExecutionsError(AriaError):
-    """
-    Raised when attempting to execute a workflow on a service which already has an active execution
-    """
-    pass
-
-
-class WorkflowImplementationNotFoundError(AriaError):
-    """
-    Raised when attempting to import a workflow's code but the implementation is not found
-    """
-    pass
-
-
-class InvalidWorkflowRunnerParams(AriaError):
-    """
-    Raised when invalid combination of arguments is passed to the workflow runner
-    """
-    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py
deleted file mode 100644
index ce6746c..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/common.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Execution plugin utilities.
-"""
-
-import json
-import os
-import tempfile
-
-import requests
-
-from . import constants
-from . import exceptions
-
-
-def is_windows():
-    return os.name == 'nt'
-
-
-def download_script(ctx, script_path):
-    split = script_path.split('://')
-    schema = split[0]
-    suffix = script_path.split('/')[-1]
-    file_descriptor, dest_script_path = tempfile.mkstemp(suffix='-{0}'.format(suffix))
-    os.close(file_descriptor)
-    try:
-        if schema in ('http', 'https'):
-            response = requests.get(script_path)
-            if response.status_code == 404:
-                ctx.task.abort('Failed to download script: {0} (status code: {1})'
-                               .format(script_path, response.status_code))
-            content = response.text
-            with open(dest_script_path, 'wb') as f:
-                f.write(content)
-        else:
-            ctx.download_resource(destination=dest_script_path, path=script_path)
-    except:
-        os.remove(dest_script_path)
-        raise
-    return dest_script_path
-
-
-def create_process_config(script_path, process, operation_kwargs, quote_json_env_vars=False):
-    """
-    Updates a process with its environment variables, and return it.
-
-    Gets a dict representing a process and a dict representing the environment variables. Converts
-    each environment variable to a format of::
-
-        <string representing the name of the variable>:
-        <json formatted string representing the value of the variable>.
-
-    Finally, updates the process with the newly formatted environment variables, and return the
-    process.
-
-    :param process: dict representing a process
-    :type process: dict
-    :param operation_kwargs: dict representing environment variables that should exist in the
-     process's running environment.
-    :type operation_kwargs: dict
-    :return: process updated with its environment variables
-    :rtype: dict
-    """
-    process = process or {}
-    env_vars = operation_kwargs.copy()
-    if 'ctx' in env_vars:
-        del env_vars['ctx']
-    env_vars.update(process.get('env', {}))
-    for k, v in env_vars.items():
-        if isinstance(v, (dict, list, tuple, bool, int, float)):
-            v = json.dumps(v)
-            if quote_json_env_vars:
-                v = "'{0}'".format(v)
-        if is_windows():
-            # These <k,v> environment variables will subsequently
-            # be used in a subprocess.Popen() call, as the `env` parameter.
-            # In some windows python versions, if an environment variable
-            # name is not of type str (e.g. unicode), the Popen call will
-            # fail.
-            k = str(k)
-            # The windows shell removes all double quotes - escape them
-            # to still be able to pass JSON in env vars to the shell.
-            v = v.replace('"', '\\"')
-        del env_vars[k]
-        env_vars[k] = str(v)
-    process['env'] = env_vars
-    args = process.get('args')
-    command = script_path
-    command_prefix = process.get('command_prefix')
-    if command_prefix:
-        command = '{0} {1}'.format(command_prefix, command)
-    if args:
-        command = ' '.join([command] + [str(a) for a in args])
-    process['command'] = command
-    return process
-
-
-def patch_ctx(ctx):
-    ctx._error = None
-    task = ctx.task
-
-    def _validate_legal_action():
-        if ctx._error is not None:
-            ctx._error = RuntimeError(constants.ILLEGAL_CTX_OPERATION_MESSAGE)
-            raise ctx._error
-
-    def abort_operation(message=None):
-        _validate_legal_action()
-        ctx._error = exceptions.ScriptException(message=message, retry=False)
-        return ctx._error
-    task.abort = abort_operation
-
-    def retry_operation(message=None, retry_interval=None):
-        _validate_legal_action()
-        ctx._error = exceptions.ScriptException(message=message,
-                                                retry=True,
-                                                retry_interval=retry_interval)
-        return ctx._error
-    task.retry = retry_operation
-
-
-def check_error(ctx, error_check_func=None, reraise=False):
-    _error = ctx._error
-    # this happens when a script calls task.abort/task.retry more than once
-    if isinstance(_error, RuntimeError):
-        ctx.task.abort(str(_error))
-    # ScriptException is populated by the ctx proxy server when task.abort or task.retry
-    # are called
-    elif isinstance(_error, exceptions.ScriptException):
-        if _error.retry:
-            ctx.task.retry(_error.message, _error.retry_interval)
-        else:
-            ctx.task.abort(_error.message)
-    # local and ssh operations may pass an additional logic check for errors here
-    if error_check_func:
-        error_check_func()
-    # if this function is called from within an ``except`` clause, a re-raise maybe required
-    if reraise:
-        raise  # pylint: disable=misplaced-bare-raise
-    return _error

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py
deleted file mode 100644
index 1953912..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/constants.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Execution plugin constants.
-"""
-import os
-import tempfile
-
-from . import exceptions
-
-# related to local
-PYTHON_SCRIPT_FILE_EXTENSION = '.py'
-POWERSHELL_SCRIPT_FILE_EXTENSION = '.ps1'
-DEFAULT_POWERSHELL_EXECUTABLE = 'powershell'
-
-# related to both local and ssh
-ILLEGAL_CTX_OPERATION_MESSAGE = 'ctx may only abort or retry once'
-
-# related to ssh
-DEFAULT_BASE_DIR = os.path.join(tempfile.gettempdir(), 'aria-ctx')
-FABRIC_ENV_DEFAULTS = {
-    'connection_attempts': 5,
-    'timeout': 10,
-    'forward_agent': False,
-    'abort_on_prompts': True,
-    'keepalive': 0,
-    'linewise': False,
-    'pool_size': 0,
-    'skip_bad_hosts': False,
-    'status': False,
-    'disable_known_hosts': True,
-    'combine_stderr': True,
-    'abort_exception': exceptions.TaskException,
-}
-VALID_FABRIC_GROUPS = set([
-    'status',
-    'aborts',
-    'warnings',
-    'running',
-    'stdout',
-    'stderr',
-    'user',
-    'everything'
-])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py
deleted file mode 100644
index 1310c21..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/client.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#! /usr/bin/env python
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-``ctx`` proxy client implementation.
-"""
-
-import argparse
-import json
-import os
-import sys
-import urllib2
-
-
-# Environment variable for the socket url (used by clients to locate the socket)
-CTX_SOCKET_URL = 'CTX_SOCKET_URL'
-
-
-class _RequestError(RuntimeError):
-
-    def __init__(self, ex_message, ex_type, ex_traceback):
-        super(_RequestError, self).__init__(self, '{0}: {1}'.format(ex_type, ex_message))
-        self.ex_type = ex_type
-        self.ex_message = ex_message
-        self.ex_traceback = ex_traceback
-
-
-def _http_request(socket_url, request, method, timeout):
-    opener = urllib2.build_opener(urllib2.HTTPHandler)
-    request = urllib2.Request(socket_url, data=json.dumps(request))
-    request.get_method = lambda: method
-    response = opener.open(request, timeout=timeout)
-
-    if response.code != 200:
-        raise RuntimeError('Request failed: {0}'.format(response))
-    return json.loads(response.read())
-
-
-def _client_request(socket_url, args, timeout, method='POST'):
-    response = _http_request(
-        socket_url=socket_url,
-        request={'args': args},
-        method=method,
-        timeout=timeout
-    )
-    payload = response.get('payload')
-    response_type = response.get('type')
-    if response_type == 'error':
-        ex_type = payload['type']
-        ex_message = payload['message']
-        ex_traceback = payload['traceback']
-        raise _RequestError(ex_message, ex_type, ex_traceback)
-    elif response_type == 'stop_operation':
-        raise SystemExit(payload['message'])
-    else:
-        return payload
-
-
-def _parse_args(args):
-    parser = argparse.ArgumentParser()
-    parser.add_argument('-t', '--timeout', type=int, default=30)
-    parser.add_argument('--socket-url', default=os.environ.get(CTX_SOCKET_URL))
-    parser.add_argument('--json-arg-prefix', default='@')
-    parser.add_argument('-j', '--json-output', action='store_true')
-    parser.add_argument('args', nargs='*')
-    args = parser.parse_args(args=args)
-    if not args.socket_url:
-        raise RuntimeError('Missing CTX_SOCKET_URL environment variable '
-                           'or socket_url command line argument. (ctx is supposed to be executed '
-                           'within an operation context)')
-    return args
-
-
-def _process_args(json_prefix, args):
-    processed_args = []
-    for arg in args:
-        if arg.startswith(json_prefix):
-            arg = json.loads(arg[1:])
-        processed_args.append(arg)
-    return processed_args
-
-
-def main(args=None):
-    args = _parse_args(args)
-    response = _client_request(
-        args.socket_url,
-        args=_process_args(args.json_arg_prefix, args.args),
-        timeout=args.timeout)
-    if args.json_output:
-        response = json.dumps(response)
-    else:
-        if not response:
-            response = ''
-        response = str(response)
-    sys.stdout.write(response)
-
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py
deleted file mode 100644
index ca910e0..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/ctx_proxy/server.py
+++ /dev/null
@@ -1,260 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-``ctx`` proxy server implementation.
-"""
-
-import collections
-import json
-import re
-import socket
-import threading
-import traceback
-import Queue
-import StringIO
-import wsgiref.simple_server
-
-import bottle
-from aria import modeling
-
-from .. import exceptions
-
-
-class CtxProxy(object):
-
-    def __init__(self, ctx, ctx_patcher=(lambda *args, **kwargs: None)):
-        self.ctx = ctx
-        self._ctx_patcher = ctx_patcher
-        self.port = _get_unused_port()
-        self.socket_url = 'http://localhost:{0}'.format(self.port)
-        self.server = None
-        self._started = Queue.Queue(1)
-        self.thread = self._start_server()
-        self._started.get(timeout=5)
-
-    def _start_server(self):
-
-        class BottleServerAdapter(bottle.ServerAdapter):
-            proxy = self
-
-            def close_session(self):
-                self.proxy.ctx.model.log._session.remove()
-
-            def run(self, app):
-
-                class Server(wsgiref.simple_server.WSGIServer):
-                    allow_reuse_address = True
-                    bottle_server = self
-
-                    def handle_error(self, request, client_address):
-                        pass
-
-                    def serve_forever(self, poll_interval=0.5):
-                        try:
-                            wsgiref.simple_server.WSGIServer.serve_forever(self, poll_interval)
-                        finally:
-                            # Once shutdown is called, we need to close the session.
-                            # If the session is not closed properly, it might raise warnings,
-                            # or even lock the database.
-                            self.bottle_server.close_session()
-
-                class Handler(wsgiref.simple_server.WSGIRequestHandler):
-                    def address_string(self):
-                        return self.client_address[0]
-
-                    def log_request(*args, **kwargs):  # pylint: disable=no-method-argument
-                        if not self.quiet:
-                            return wsgiref.simple_server.WSGIRequestHandler.log_request(*args,
-                                                                                        **kwargs)
-                server = wsgiref.simple_server.make_server(
-                    host=self.host,
-                    port=self.port,
-                    app=app,
-                    server_class=Server,
-                    handler_class=Handler)
-                self.proxy.server = server
-                self.proxy._started.put(True)
-                server.serve_forever(poll_interval=0.1)
-
-        def serve():
-            # Since task is a thread_local object, we need to patch it inside the server thread.
-            self._ctx_patcher(self.ctx)
-
-            bottle_app = bottle.Bottle()
-            bottle_app.post('/', callback=self._request_handler)
-            bottle.run(
-                app=bottle_app,
-                host='localhost',
-                port=self.port,
-                quiet=True,
-                server=BottleServerAdapter)
-        thread = threading.Thread(target=serve)
-        thread.daemon = True
-        thread.start()
-        return thread
-
-    def close(self):
-        if self.server:
-            self.server.shutdown()
-            self.server.server_close()
-
-    def _request_handler(self):
-        request = bottle.request.body.read()  # pylint: disable=no-member
-        response = self._process(request)
-        return bottle.LocalResponse(
-            body=json.dumps(response, cls=modeling.utils.ModelJSONEncoder),
-            status=200,
-            headers={'content-type': 'application/json'}
-        )
-
-    def _process(self, request):
-        try:
-            with self.ctx.model.instrument(*self.ctx.INSTRUMENTATION_FIELDS):
-                typed_request = json.loads(request)
-                args = typed_request['args']
-                payload = _process_ctx_request(self.ctx, args)
-                result_type = 'result'
-                if isinstance(payload, exceptions.ScriptException):
-                    payload = dict(message=str(payload))
-                    result_type = 'stop_operation'
-                result = {'type': result_type, 'payload': payload}
-        except Exception as e:
-            traceback_out = StringIO.StringIO()
-            traceback.print_exc(file=traceback_out)
-            payload = {
-                'type': type(e).__name__,
-                'message': str(e),
-                'traceback': traceback_out.getvalue()
-            }
-            result = {'type': 'error', 'payload': payload}
-
-        return result
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *args, **kwargs):
-        self.close()
-
-
-def _process_ctx_request(ctx, args):
-    current = ctx
-    num_args = len(args)
-    index = 0
-    while index < num_args:
-        arg = args[index]
-        attr = _desugar_attr(current, arg)
-        if attr:
-            current = getattr(current, attr)
-        elif isinstance(current, collections.MutableMapping):
-            key = arg
-            path_dict = _PathDictAccess(current)
-            if index + 1 == num_args:
-                # read dict prop by path
-                value = path_dict.get(key)
-                current = value
-            elif index + 2 == num_args:
-                # set dict prop by path
-                value = args[index + 1]
-                path_dict.set(key, value)
-                current = None
-            else:
-                raise RuntimeError('Illegal argument while accessing dict')
-            break
-        elif callable(current):
-            kwargs = {}
-            remaining_args = args[index:]
-            if isinstance(remaining_args[-1], collections.MutableMapping):
-                kwargs = remaining_args[-1]
-                remaining_args = remaining_args[:-1]
-            current = current(*remaining_args, **kwargs)
-            break
-        else:
-            raise RuntimeError('{0} cannot be processed in {1}'.format(arg, args))
-        index += 1
-    if callable(current):
-        current = current()
-    return current
-
-
-def _desugar_attr(obj, attr):
-    if not isinstance(attr, basestring):
-        return None
-    if hasattr(obj, attr):
-        return attr
-    attr = attr.replace('-', '_')
-    if hasattr(obj, attr):
-        return attr
-    return None
-
-
-class _PathDictAccess(object):
-    pattern = re.compile(r"(.+)\[(\d+)\]")
-
-    def __init__(self, obj):
-        self.obj = obj
-
-    def set(self, prop_path, value):
-        obj, prop_name = self._get_parent_obj_prop_name_by_path(prop_path)
-        obj[prop_name] = value
-
-    def get(self, prop_path):
-        value = self._get_object_by_path(prop_path)
-        return value
-
-    def _get_object_by_path(self, prop_path, fail_on_missing=True):
-        # when setting a nested object, make sure to also set all the
-        # intermediate path objects
-        current = self.obj
-        for prop_segment in prop_path.split('.'):
-            match = self.pattern.match(prop_segment)
-            if match:
-                index = int(match.group(2))
-                property_name = match.group(1)
-                if property_name not in current:
-                    self._raise_illegal(prop_path)
-                if not isinstance(current[property_name], list):
-                    self._raise_illegal(prop_path)
-                current = current[property_name][index]
-            else:
-                if prop_segment not in current:
-                    if fail_on_missing:
-                        self._raise_illegal(prop_path)
-                    else:
-                        current[prop_segment] = {}
-                current = current[prop_segment]
-        return current
-
-    def _get_parent_obj_prop_name_by_path(self, prop_path):
-        split = prop_path.split('.')
-        if len(split) == 1:
-            return self.obj, prop_path
-        parent_path = '.'.join(split[:-1])
-        parent_obj = self._get_object_by_path(parent_path, fail_on_missing=False)
-        prop_name = split[-1]
-        return parent_obj, prop_name
-
-    @staticmethod
-    def _raise_illegal(prop_path):
-        raise RuntimeError('illegal path: {0}'.format(prop_path))
-
-
-def _get_unused_port():
-    sock = socket.socket()
-    sock.bind(('127.0.0.1', 0))
-    _, port = sock.getsockname()
-    sock.close()
-    return port

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py
deleted file mode 100644
index 6dec293..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/environment_globals.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utilities for managing globals for the environment.
-"""
-
-def create_initial_globals(path):
-    """
-    Emulates a ``globals()`` call in a freshly loaded module.
-
-    The implementation of this function is likely to raise a couple of questions. If you read the
-    implementation and nothing bothered you, feel free to skip the rest of this docstring.
-
-    First, why is this function in its own module and not, say, in the same module of the other
-    environment-related functions? Second, why is it implemented in such a way that copies the
-    globals, then deletes the item that represents this function, and then changes some other
-    entries?
-
-    Well, these two questions can be answered with one (elaborate) explanation. If this function was
-    in the same module with the other environment-related functions, then we would have had to
-    delete more items in globals than just ``create_initial_globals``. That is because all of the
-    other function names would also be in globals, and since there is no built-in mechanism that
-    return the name of the user-defined objects, this approach is quite an overkill.
-
-    *But why do we rely on the copy-existing-globals-and-delete-entries method, when it seems to
-    force us to put ``create_initial_globals`` in its own file?*
-
-    Well, because there is no easier method of creating globals of a newly loaded module.
-
-    *How about hard coding a ``globals`` dict? It seems that there are very few entries:
-    ``__doc__``, ``__file__``, ``__name__``, ``__package__`` (but don't forget ``__builtins__``).*
-
-    That would be coupling our implementation to a specific ``globals`` implementation. What if
-    ``globals`` were to change?
-    """
-    copied_globals = globals().copy()
-    copied_globals.update({
-        '__doc__': 'Dynamically executed script',
-        '__file__': path,
-        '__name__': '__main__',
-        '__package__': None
-    })
-    del copied_globals[create_initial_globals.__name__]
-    return copied_globals

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py
deleted file mode 100644
index f201fae..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/exceptions.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Execution plugin exceptions.
-"""
-
-class ProcessException(Exception):
-    """
-    Raised when local scripts and remote SSH commands fail.
-    """
-
-    def __init__(self, stderr=None, stdout=None, command=None, exit_code=None):
-        super(ProcessException, self).__init__(stderr)
-        self.command = command
-        self.exit_code = exit_code
-        self.stdout = stdout
-        self.stderr = stderr
-
-
-class TaskException(Exception):
-    """
-    Raised when remote ssh scripts fail.
-    """
-
-
-class ScriptException(Exception):
-    """
-    Used by the ``ctx`` proxy server when task.retry or task.abort are called by scripts.
-    """
-
-    def __init__(self, message=None, retry=None, retry_interval=None):
-        super(ScriptException, self).__init__(message)
-        self.retry = retry
-        self.retry_interval = retry_interval

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca-website/blob/23d6ba76/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py
----------------------------------------------------------------------
diff --git a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py b/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py
deleted file mode 100644
index f55aa50..0000000
--- a/apache-ariatosca-0.1.1/aria/orchestrator/execution_plugin/instantiation.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Instantiation of :class:`~aria.modeling.models.Operation` models.
-"""
-
-# TODO: this module will eventually be moved to a new "aria.instantiation" package
-
-from ...utils.type import full_type_name
-from ...utils.formatting import safe_repr
-from ...utils.collections import OrderedDict
-from ...parser import validation
-from ...parser.consumption import ConsumptionContext
-from ...modeling.functions import Function
-
-
-def configure_operation(operation):
-    host = None
-    interface = operation.interface
-    if interface.node is not None:
-        host = interface.node.host
-    elif interface.relationship is not None:
-        if operation.relationship_edge is True:
-            host = interface.relationship.target_node.host
-        else: # either False or None (None meaning that edge was not specified)
-            host = interface.relationship.source_node.host
-
-    _configure_common(operation)
-    if host is None:
-        _configure_local(operation)
-    else:
-        _configure_remote(operation)
-
-    # Any remaining un-handled configuration parameters will become extra arguments, available as
-    # kwargs in either "run_script_locally" or "run_script_with_ssh"
-    for key, value in operation.configurations.iteritems():
-        if key not in ('process', 'ssh'):
-            operation.arguments[key] = value.instantiate(None)
-
-
-def _configure_common(operation):
-    """
-    Local and remote operations.
-    """
-
-    from ...modeling.models import Argument
-    operation.arguments['script_path'] = Argument.wrap('script_path', operation.implementation,
-                                                       'Relative path to the executable file.')
-    operation.arguments['process'] = Argument.wrap('process', _get_process(operation),
-                                                   'Sub-process configuration.')
-
-
-def _configure_local(operation):
-    """
-    Local operation.
-    """
-
-    from . import operations
-    operation.function = '{0}.{1}'.format(operations.__name__,
-                                          operations.run_script_locally.__name__)
-
-
-def _configure_remote(operation):
-    """
-    Remote SSH operation via Fabric.
-    """
-
-    from ...modeling.models import Argument
-    from . import operations
-
-    ssh = _get_ssh(operation)
-
-    # Defaults
-    # TODO: find a way to configure these generally in the service template
-    default_user = ''
-    default_password = ''
-    if 'user' not in ssh:
-        ssh['user'] = default_user
-    if ('password' not in ssh) and ('key' not in ssh) and ('key_filename' not in ssh):
-        ssh['password'] = default_password
-
-    operation.arguments['use_sudo'] = Argument.wrap('use_sudo', ssh.get('use_sudo', False),
-                                                    'Whether to execute with sudo.')
-
-    operation.arguments['hide_output'] = Argument.wrap('hide_output', ssh.get('hide_output', []),
-                                                       'Hide output of these Fabric groups.')
-
-    fabric_env = {}
-    if 'warn_only' in ssh:
-        fabric_env['warn_only'] = ssh['warn_only']
-    fabric_env['user'] = ssh.get('user')
-    fabric_env['password'] = ssh.get('password')
-    fabric_env['key'] = ssh.get('key')
-    fabric_env['key_filename'] = ssh.get('key_filename')
-    if 'address' in ssh:
-        fabric_env['host_string'] = ssh['address']
-
-    # Make sure we have a user
-    if fabric_env.get('user') is None:
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('must configure "ssh.user" for "{0}"'
-                                  .format(operation.implementation),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-    # Make sure we have an authentication value
-    if (fabric_env.get('password') is None) and \
-        (fabric_env.get('key') is None) and \
-        (fabric_env.get('key_filename') is None):
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('must configure "ssh.password", "ssh.key", or "ssh.key_filename" '
-                                  'for "{0}"'
-                                  .format(operation.implementation),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-    operation.arguments['fabric_env'] = Argument.wrap('fabric_env', fabric_env,
-                                                      'Fabric configuration.')
-
-    operation.function = '{0}.{1}'.format(operations.__name__,
-                                          operations.run_script_with_ssh.__name__)
-
-
-def _get_process(operation):
-    value = (operation.configurations.get('process')._value
-             if 'process' in operation.configurations
-             else None)
-    if value is None:
-        return {}
-    _validate_type(value, dict, 'process')
-    value = OrderedDict(value)
-    for k, v in value.iteritems():
-        if k == 'eval_python':
-            value[k] = _coerce_bool(v, 'process.eval_python')
-        elif k == 'cwd':
-            _validate_type(v, basestring, 'process.cwd')
-        elif k == 'command_prefix':
-            _validate_type(v, basestring, 'process.command_prefix')
-        elif k == 'args':
-            value[k] = _dict_to_list_of_strings(v, 'process.args')
-        elif k == 'env':
-            _validate_type(v, dict, 'process.env')
-        else:
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('unsupported configuration parameter: "process.{0}"'
-                                      .format(k),
-                                      level=validation.Issue.BETWEEN_TYPES)
-    return value
-
-
-def _get_ssh(operation):
-    value = (operation.configurations.get('ssh')._value
-             if 'ssh' in operation.configurations
-             else None)
-    if value is None:
-        return {}
-    _validate_type(value, dict, 'ssh')
-    value = OrderedDict(value)
-    for k, v in value.iteritems():
-        if k == 'use_sudo':
-            value[k] = _coerce_bool(v, 'ssh.use_sudo')
-        elif k == 'hide_output':
-            value[k] = _dict_to_list_of_strings(v, 'ssh.hide_output')
-        elif k == 'warn_only':
-            value[k] = _coerce_bool(v, 'ssh.warn_only')
-        elif k == 'user':
-            _validate_type(v, basestring, 'ssh.user')
-        elif k == 'password':
-            _validate_type(v, basestring, 'ssh.password')
-        elif k == 'key':
-            _validate_type(v, basestring, 'ssh.key')
-        elif k == 'key_filename':
-            _validate_type(v, basestring, 'ssh.key_filename')
-        elif k == 'address':
-            _validate_type(v, basestring, 'ssh.address')
-        else:
-            context = ConsumptionContext.get_thread_local()
-            context.validation.report('unsupported configuration parameter: "ssh.{0}"'.format(k),
-                                      level=validation.Issue.BETWEEN_TYPES)
-    return value
-
-
-def _validate_type(value, the_type, name):
-    if isinstance(value, Function):
-        return
-    if not isinstance(value, the_type):
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('"{0}" configuration is not a {1}: {2}'
-                                  .format(name, full_type_name(the_type), safe_repr(value)),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-
-def _coerce_bool(value, name):
-    if value is None:
-        return None
-    if isinstance(value, bool):
-        return value
-    _validate_type(value, basestring, name)
-    if value == 'true':
-        return True
-    elif value == 'false':
-        return False
-    else:
-        context = ConsumptionContext.get_thread_local()
-        context.validation.report('"{0}" configuration is not "true" or "false": {1}'
-                                  .format(name, safe_repr(value)),
-                                  level=validation.Issue.BETWEEN_TYPES)
-
-
-def _dict_to_list_of_strings(the_dict, name):
-    _validate_type(the_dict, dict, name)
-    value = []
-    for k in sorted(the_dict):
-        v = the_dict[k]
-        _validate_type(v, basestring, '{0}.{1}'.format(name, k))
-        value.append(v)
-    return value



Mime
View raw message