ariatosca-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mxm...@apache.org
Subject incubator-ariatosca git commit: code review 2 - wip [Forced Update!]
Date Mon, 05 Dec 2016 13:11:35 GMT
Repository: incubator-ariatosca
Updated Branches:
  refs/heads/ARIA-30-SQL-based-storage-implementation 9d7431bd2 -> 72a4e48c8 (forced update)


code review 2 - wip


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/72a4e48c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/72a4e48c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/72a4e48c

Branch: refs/heads/ARIA-30-SQL-based-storage-implementation
Commit: 72a4e48c8f0ae74f2518f17192f001732a206bb2
Parents: 5ea0b63
Author: mxmrlv <mxmrlv@gmail.com>
Authored: Sun Dec 4 18:56:02 2016 +0200
Committer: mxmrlv <mxmrlv@gmail.com>
Committed: Mon Dec 5 15:11:24 2016 +0200

----------------------------------------------------------------------
 aria/__init__.py                                |   4 +-
 aria/orchestrator/context/common.py             |  14 +-
 aria/orchestrator/context/operation.py          |   4 +-
 aria/orchestrator/context/toolbelt.py           |   7 +-
 aria/orchestrator/context/workflow.py           |  18 +-
 aria/orchestrator/workflows/core/task.py        |  17 +-
 aria/storage/api.py                             |  31 ---
 aria/storage/core.py                            |   8 +-
 aria/storage/filesystem_rapi.py                 |  24 ++
 aria/storage/models.py                          | 246 +++++++++----------
 aria/storage/sql_mapi.py                        |  68 +++--
 aria/storage/structures.py                      |  41 +---
 tests/mock/context.py                           |  15 +-
 tests/mock/models.py                            |  73 +++---
 tests/orchestrator/context/test_operation.py    |  22 +-
 tests/orchestrator/context/test_toolbelt.py     |  30 ++-
 tests/orchestrator/context/test_workflow.py     |  22 +-
 tests/orchestrator/workflows/api/test_task.py   |  15 +-
 .../orchestrator/workflows/builtin/__init__.py  |   2 -
 .../workflows/builtin/test_execute_operation.py |  22 +-
 .../orchestrator/workflows/builtin/test_heal.py |  27 +-
 .../workflows/builtin/test_install.py           |  11 +-
 .../workflows/builtin/test_uninstall.py         |  12 +-
 .../orchestrator/workflows/core/test_engine.py  |   5 +-
 tests/orchestrator/workflows/core/test_task.py  |  11 +-
 .../test_task_graph_into_exececution_graph.py   |   3 +-
 tests/storage/__init__.py                       |   4 +-
 tests/storage/test_model_storage.py             |  37 +--
 tests/storage/test_models.py                    |  37 ++-
 tests/storage/test_resource_storage.py          |   6 +-
 30 files changed, 423 insertions(+), 413 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 5317afa..f5151e3 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -56,7 +56,7 @@ def install_aria_extensions():
             del sys.modules[module_name]
 
 
-def application_model_storage(api, api_params=None):
+def application_model_storage(api, api_kwargs=None):
     """
     Initiate model storage for the supplied storage driver
     """
@@ -76,7 +76,7 @@ def application_model_storage(api, api_params=None):
         storage.models.Task,
     ]
     # if api not in _model_storage:
-    return storage.ModelStorage(api, items=models, api_params=api_params or {})
+    return storage.ModelStorage(api, items=models, api_kwargs=api_kwargs or {})
 
 
 def application_resource_storage(driver):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/orchestrator/context/common.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/common.py b/aria/orchestrator/context/common.py
index a4cc4a4..75bb0fb 100644
--- a/aria/orchestrator/context/common.py
+++ b/aria/orchestrator/context/common.py
@@ -32,8 +32,8 @@ class BaseContext(logger.LoggerMixin):
             model_storage,
             resource_storage,
             deployment_id,
-            workflow_id,
-            execution_id=None,
+            workflow_name,
+            execution_name=None,
             task_max_attempts=1,
             task_retry_interval=0,
             task_ignore_failure=False,
@@ -44,8 +44,8 @@ class BaseContext(logger.LoggerMixin):
         self._model = model_storage
         self._resource = resource_storage
         self._deployment_id = deployment_id
-        self._workflow_id = workflow_id
-        self._execution_id = execution_id or str(uuid4())
+        self._workflow_name = workflow_name
+        self._execution_name = execution_name or str(uuid4())
         self._task_max_attempts = task_max_attempts
         self._task_retry_interval = task_retry_interval
         self._task_ignore_failure = task_ignore_failure
@@ -54,8 +54,8 @@ class BaseContext(logger.LoggerMixin):
         return (
             '{name}(name={self.name}, '
             'deployment_id={self._deployment_id}, '
-            'workflow_id={self._workflow_id}, '
-            'execution_id={self._execution_id})'
+            'workflow_name={self._workflow_name}, '
+            'execution_name={self._execution_name})'
             .format(name=self.__class__.__name__, self=self))
 
     @property
@@ -93,7 +93,7 @@ class BaseContext(logger.LoggerMixin):
         """
         The execution model
         """
-        return self.model.execution.get(self._execution_id)
+        return self.model.execution.get_by_name(self._execution_name)
 
     @execution.setter
     def execution(self, value):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/orchestrator/context/operation.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/operation.py b/aria/orchestrator/context/operation.py
index f522111..8410c46 100644
--- a/aria/orchestrator/context/operation.py
+++ b/aria/orchestrator/context/operation.py
@@ -32,8 +32,8 @@ class BaseOperationContext(BaseContext):
             model_storage=workflow_context.model,
             resource_storage=workflow_context.resource,
             deployment_id=workflow_context._deployment_id,
-            workflow_id=workflow_context._workflow_id,
-            execution_id=workflow_context._execution_id,
+            workflow_name=workflow_context._workflow_name,
+            execution_name=workflow_context._execution_name,
             **kwargs)
         self._task_model = task
         self._actor = self.task.actor

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/orchestrator/context/toolbelt.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/toolbelt.py b/aria/orchestrator/context/toolbelt.py
index ceaeb72..298d334 100644
--- a/aria/orchestrator/context/toolbelt.py
+++ b/aria/orchestrator/context/toolbelt.py
@@ -33,7 +33,7 @@ class NodeToolBelt(object):
         :return:
         """
         assert isinstance(self._op_context, operation.NodeOperationContext)
-        filters = {'target_node_instance_fk': self._op_context.node_instance.storage_id}
+        filters = {'target_node_instance_id': self._op_context.node_instance.id}
         for relationship_instance in \
                 self._op_context.model.relationship_instance.iter(filters=filters):
             yield relationship_instance.source_node_instance
@@ -45,9 +45,8 @@ class NodeToolBelt(object):
         :return:
         """
         assert isinstance(self._op_context, operation.NodeOperationContext)
-        host_id = self._op_context.node_instance.host_id
-        host_instance = self._op_context.model.node_instance.get(host_id)
-        return host_instance.runtime_properties.get('ip')
+        host = self._op_context.node_instance.host
+        return host.runtime_properties.get('ip')
 
 
 class RelationshipToolBelt(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py b/aria/orchestrator/context/workflow.py
index 0540590..52ea630 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -37,25 +37,25 @@ class WorkflowContext(BaseContext):
         # TODO: execution creation should happen somewhere else
         # should be moved there, when such logical place exists
         try:
-            self.model.execution.get(self._execution_id)
+            self.model.execution.get_by_name(self._execution_name)
         except storage.exceptions.StorageError:
             self._create_execution()
 
     def __repr__(self):
         return (
             '{name}(deployment_id={self._deployment_id}, '
-            'workflow_id={self._workflow_id}, '
-            'execution_id={self._execution_id})'.format(
+            'workflow_name={self._workflow_name}, '
+            'execution_name={self._execution_name})'.format(
                 name=self.__class__.__name__, self=self))
 
     def _create_execution(self):
         execution_cls = self.model.execution.model_cls
         now = datetime.utcnow()
         execution = self.model.execution.model_cls(
-            id=self._execution_id,
-            blueprint_fk=self.blueprint.storage_id,
-            deployment_fk=self.deployment.storage_id,
-            workflow_id=self._workflow_id,
+            name=self._execution_name,
+            blueprint_id=self.blueprint.id,
+            deployment_id=self.deployment.id,
+            workflow_name=self._workflow_name,
             created_at=now,
             status=execution_cls.PENDING,
             parameters=self.parameters,
@@ -69,7 +69,7 @@ class WorkflowContext(BaseContext):
         """
         return self.model.node.iter(
             filters={
-                'deployment_fk': self.deployment.storage_id
+                'deployment_id': self.deployment.id
             }
         )
 
@@ -80,7 +80,7 @@ class WorkflowContext(BaseContext):
         """
         return self.model.node_instance.iter(
             filters={
-                'deployment_fk': self.deployment.storage_id
+                'deployment_id': self.deployment.id
             }
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py b/aria/orchestrator/workflows/core/task.py
index 3b18965..1163722 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -106,24 +106,25 @@ class OperationTask(BaseTask):
     def __init__(self, api_task, *args, **kwargs):
         super(OperationTask, self).__init__(id=api_task.id, **kwargs)
         self._workflow_context = api_task._workflow_context
-        task_model = api_task._workflow_context.model.task.model_cls
+        base_task_model = api_task._workflow_context.model.task.model_cls
 
         if isinstance(api_task.actor, models.NodeInstance):
             context_class = operation_context.NodeOperationContext
+            task_model_cls = base_task_model.as_node_instance
         elif isinstance(api_task.actor, models.RelationshipInstance):
             context_class = operation_context.RelationshipOperationContext
+            task_model_cls = base_task_model.as_relationship_instance
         else:
-            raise RuntimeError('No operation context could be created for {0}'
-                               .format(api_task.actor.model_cls))
+            raise RuntimeError('No operation context could be created for {actor.model_cls}'
+                               .format(actor=api_task.actor))
 
-        operation_task = task_model(
-            id=api_task.id,
+        operation_task = task_model_cls(
             name=api_task.name,
             operation_mapping=api_task.operation_mapping,
-            actor=api_task.actor,
+            instance_id=api_task.actor.id,
             inputs=api_task.inputs,
-            status=task_model.PENDING,
-            execution_id=self._workflow_context._execution_id,
+            status=base_task_model.PENDING,
+            execution_id=self._workflow_context._execution_name,
             max_attempts=api_task.max_attempts,
             retry_interval=api_task.retry_interval,
             ignore_failure=api_task.ignore_failure,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/storage/api.py
----------------------------------------------------------------------
diff --git a/aria/storage/api.py b/aria/storage/api.py
index 1a7ca45..d6fc3b8 100644
--- a/aria/storage/api.py
+++ b/aria/storage/api.py
@@ -15,9 +15,6 @@
 """
 General storage API
 """
-from contextlib import contextmanager
-
-from . import exceptions
 
 
 class StorageAPI(object):
@@ -32,34 +29,6 @@ class StorageAPI(object):
         """
         raise NotImplementedError('Subclass must implement abstract create method')
 
-    @contextmanager
-    def connect(self):
-        """
-        Established a connection and destroys it after use.
-        :return:
-        """
-        try:
-            self._establish_connection()
-            yield self
-        except BaseException as e:
-            raise exceptions.StorageError(str(e))
-        finally:
-            self._destroy_connection()
-
-    def _establish_connection(self):
-        """
-        Establish a conenction. used in the 'connect' contextmanager.
-        :return:
-        """
-        pass
-
-    def _destroy_connection(self):
-        """
-        Destroy a connection. used in the 'connect' contextmanager.
-        :return:
-        """
-        pass
-
 
 class ModelAPI(StorageAPI):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index e452698..a5d3210 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -52,8 +52,8 @@ class Storage(LoggerMixin):
     """
     Represents the storage
     """
-    def __init__(self, api_cls, api_params=None, items=(), **kwargs):
-        self._api_params = api_params or {}
+    def __init__(self, api_cls, api_kwargs=None, items=(), **kwargs):
+        self._api_kwargs = api_kwargs or {}
         super(Storage, self).__init__(**kwargs)
         self.api = api_cls
         self.registered = {}
@@ -90,7 +90,7 @@ class ResourceStorage(Storage):
         :param name:
         :return:
         """
-        self.registered[name] = self.api(name=name, **self._api_params)
+        self.registered[name] = self.api(name=name, **self._api_kwargs)
         self.registered[name].create()
         self.logger.debug('setup {name} in storage {self!r}'.format(name=name, self=self))
 
@@ -112,7 +112,7 @@ class ModelStorage(Storage):
             return
         self.registered[model_name] = self.api(name=model_name,
                                                model_cls=model_cls,
-                                               **self._api_params)
+                                               **self._api_kwargs)
         self.registered[model_name].create()
         self.logger.debug('setup {name} in storage {self!r}'.format(name=model_name, self=self))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/storage/filesystem_rapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/filesystem_rapi.py b/aria/storage/filesystem_rapi.py
index af30e1d..f810f58 100644
--- a/aria/storage/filesystem_rapi.py
+++ b/aria/storage/filesystem_rapi.py
@@ -17,6 +17,7 @@ SQLalchemy based RAPI
 """
 import os
 import shutil
+from contextlib import contextmanager
 from functools import partial
 from distutils import dir_util                                # https://github.com/PyCQA/pylint/issues/73; pylint: disable=no-name-in-module
 from multiprocessing import RLock
@@ -43,10 +44,33 @@ class FileSystemResourceAPI(api.ResourceAPI):
         self._join_path = partial(os.path.join, self.base_path)
         self._lock = RLock()
 
+    @contextmanager
+    def connect(self):
+        """
+        Established a connection and destroys it after use.
+        :return:
+        """
+        try:
+            self._establish_connection()
+            yield self
+        except BaseException as e:
+            raise exceptions.StorageError(str(e))
+        finally:
+            self._destroy_connection()
+
     def _establish_connection(self):
+        """
+        Establish a conenction. used in the 'connect' contextmanager.
+        :return:
+        """
         self._lock.acquire()
 
+
     def _destroy_connection(self):
+        """
+        Destroy a connection. used in the 'connect' contextmanager.
+        :return:
+        """
         self._lock.release()
 
     def __repr__(self):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/storage/models.py
----------------------------------------------------------------------
diff --git a/aria/storage/models.py b/aria/storage/models.py
index 29dfdc9..d58420f 100644
--- a/aria/storage/models.py
+++ b/aria/storage/models.py
@@ -38,7 +38,6 @@ classes:
 """
 from collections import namedtuple
 from datetime import datetime
-from uuid import uuid4
 
 from sqlalchemy.ext.declarative.base import declared_attr
 from sqlalchemy.orm import validates
@@ -54,11 +53,11 @@ from .structures import (
     String,
     PickleType,
     Float,
-    MutableDict,
     Dict,
     foreign_key,
     one_to_many_relationship,
-    association_proxy
+    relationship,
+    orm,
 )
 
 __all__ = (
@@ -77,10 +76,6 @@ __all__ = (
 )
 
 
-def uuid_generator():
-    return str(uuid4())
-
-
 #pylint: disable=no-self-argument
 
 
@@ -92,7 +87,7 @@ class Blueprint(SQLModelBase):
 
     created_at = Column(DateTime, nullable=False, index=True)
     main_file_name = Column(Text, nullable=False)
-    plan = Column(MutableDict.as_mutable(Dict), nullable=False)
+    plan = Column(Dict, nullable=False)
     updated_at = Column(DateTime)
     description = Column(Text)
 
@@ -103,25 +98,25 @@ class Deployment(SQLModelBase):
     """
     __tablename__ = 'deployments'
 
-    _private_fields = ['blueprint_fk']
+    _private_fields = ['blueprint_id']
 
-    blueprint_fk = foreign_key(Blueprint.storage_id)
+    blueprint_id = foreign_key(Blueprint.id)
 
     created_at = Column(DateTime, nullable=False, index=True)
     description = Column(Text)
-    inputs = Column(MutableDict.as_mutable(Dict))
-    groups = Column(MutableDict.as_mutable(Dict))
+    inputs = Column(Dict)
+    groups = Column(Dict)
     permalink = Column(Text)
-    policy_triggers = Column(MutableDict.as_mutable(Dict))
-    policy_types = Column(MutableDict.as_mutable(Dict))
-    outputs = Column(MutableDict.as_mutable(Dict))
-    scaling_groups = Column(MutableDict.as_mutable(Dict))
+    policy_triggers = Column(Dict)
+    policy_types = Column(Dict)
+    outputs = Column(Dict)
+    scaling_groups = Column(Dict)
     updated_at = Column(DateTime)
-    workflows = Column(MutableDict.as_mutable(Dict))
+    workflows = Column(Dict)
 
     @declared_attr
     def blueprint(cls):
-        return one_to_many_relationship(cls, Blueprint, cls.blueprint_fk)
+        return one_to_many_relationship(cls, Blueprint, cls.blueprint_id)
 
 
 class Execution(SQLModelBase):
@@ -164,36 +159,31 @@ class Execution(SQLModelBase):
                 new=value))
         return value
 
-    deployment_id = association_proxy('deployment', 'id')
-    blueprint_id = association_proxy('blueprint', 'id')
-
-    deployment_fk = foreign_key(Deployment.storage_id)
-    blueprint_fk = foreign_key(Blueprint.storage_id)
-    _private_fields = ['deployment_fk', 'blueprint_fk']
+    deployment_id = foreign_key(Deployment.id)
+    blueprint_id = foreign_key(Blueprint.id)
+    _private_fields = ['deployment_id', 'blueprint_id']
 
     created_at = Column(DateTime, index=True)
     started_at = Column(DateTime, nullable=True, index=True)
     ended_at = Column(DateTime, nullable=True, index=True)
     error = Column(Text, nullable=True)
     is_system_workflow = Column(Boolean, nullable=False, default=False)
-    parameters = Column(MutableDict.as_mutable(Dict))
+    parameters = Column(Dict)
     status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
-    workflow_id = Column(Text, nullable=False)
+    workflow_name = Column(Text, nullable=False)
 
     @declared_attr
     def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_fk)
+        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
 
     @declared_attr
     def blueprint(cls):
-        return one_to_many_relationship(cls, Blueprint, cls.blueprint_fk)
+        return one_to_many_relationship(cls, Blueprint, cls.blueprint_id)
 
     def __str__(self):
-        id_name, id_value = self.unique_id()
-        return '<{0} {1}=`{2}` (status={3})>'.format(
+        return '<{0} id=`{1}` (status={2})>'.format(
             self.__class__.__name__,
-            id_name,
-            id_value,
+            self.id,
             self.status
         )
 
@@ -204,28 +194,25 @@ class DeploymentUpdate(SQLModelBase):
     """
     __tablename__ = 'deployment_updates'
 
-    deployment_id = association_proxy('deployment', 'id')
-    execution_id = association_proxy('execution', 'id')
-
-    deployment_fk = foreign_key(Deployment.storage_id)
-    execution_fk = foreign_key(Execution.storage_id, nullable=True)
-    _private_fields = ['execution_fk', 'deployment_fk']
+    deployment_id = foreign_key(Deployment.id)
+    execution_id = foreign_key(Execution.id, nullable=True)
+    _private_fields = ['execution_id', 'deployment_id']
 
     created_at = Column(DateTime, nullable=False, index=True)
-    deployment_plan = Column(MutableDict.as_mutable(Dict))
-    deployment_update_node_instances = Column(MutableDict.as_mutable(Dict))
-    deployment_update_deployment = Column(MutableDict.as_mutable(Dict))
-    deployment_update_nodes = Column(MutableDict.as_mutable(Dict))
-    modified_entity_ids = Column(MutableDict.as_mutable(Dict))
+    deployment_plan = Column(Dict)
+    deployment_update_node_instances = Column(Dict)
+    deployment_update_deployment = Column(Dict)
+    deployment_update_nodes = Column(Dict)
+    modified_entity_ids = Column(Dict)
     state = Column(Text)
 
     @declared_attr
     def execution(cls):
-        return one_to_many_relationship(cls, Execution, cls.execution_fk)
+        return one_to_many_relationship(cls, Execution, cls.execution_id)
 
     @declared_attr
     def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_fk)
+        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
 
     def to_dict(self, suppress_error=False, **kwargs):
         dep_update_dict = super(DeploymentUpdate, self).to_dict(suppress_error)
@@ -259,9 +246,8 @@ class DeploymentUpdateStep(SQLModelBase):
         PLUGIN='plugin'
     )
 
-    deployment_update_id = association_proxy('deployment_update', 'id')
-    deployment_update_fk = foreign_key(DeploymentUpdate.storage_id)
-    _private_fields = ['deployment_update_fk']
+    deployment_update_id = foreign_key(DeploymentUpdate.id)
+    _private_fields = ['deployment_update_id']
 
     action = Column(Enum(*ACTION_TYPES, name='action_type'))
     entity_id = Column(Text, nullable=False)
@@ -271,7 +257,7 @@ class DeploymentUpdateStep(SQLModelBase):
     def deployment_update(cls):
         return one_to_many_relationship(cls,
                                         DeploymentUpdate,
-                                        cls.deployment_update_fk,
+                                        cls.deployment_update_id,
                                         backreference='steps')
 
     def __hash__(self):
@@ -315,22 +301,21 @@ class DeploymentModification(SQLModelBase):
     STATES = [STARTED, FINISHED, ROLLEDBACK]
     END_STATES = [FINISHED, ROLLEDBACK]
 
-    deployment_fk = foreign_key(Deployment.storage_id)
-    _private_fields = ['deployment_fk']
-    deployment_id = association_proxy('deployment', 'id')
+    deployment_id = foreign_key(Deployment.id)
+    _private_fields = ['deployment_id']
 
-    context = Column(MutableDict.as_mutable(Dict))
+    context = Column(Dict)
     created_at = Column(DateTime, nullable=False, index=True)
     ended_at = Column(DateTime, index=True)
-    modified_nodes = Column(MutableDict.as_mutable(Dict))
-    node_instances = Column(MutableDict.as_mutable(Dict))
+    modified_nodes = Column(Dict)
+    node_instances = Column(Dict)
     status = Column(Enum(*STATES, name='deployment_modification_status'))
 
     @declared_attr
     def deployment(cls):
         return one_to_many_relationship(cls,
                                         Deployment,
-                                        cls.deployment_fk,
+                                        cls.deployment_id,
                                         backreference='modifications')
 
 
@@ -339,35 +324,38 @@ class Node(SQLModelBase):
     Node model representation.
     """
     __tablename__ = 'nodes'
+    id = Column(Integer, primary_key=True)
 
     # See base class for an explanation on these properties
     is_id_unique = False
 
-    _private_fields = ['deployment_fk']
-    deployment_fk = foreign_key(Deployment.storage_id)
-
-    deployment_id = association_proxy('deployment', 'id')
-    blueprint_id = association_proxy('blueprint', 'id')
+    _private_fields = ['deployment_id', 'host_id']
+    deployment_id = foreign_key(Deployment.id)
+    host_id = foreign_key('nodes.id', nullable=True)
 
     @declared_attr
     def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_fk)
+        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
 
     deploy_number_of_instances = Column(Integer, nullable=False)
     # TODO: This probably should be a foreign key, but there's no guarantee
     # in the code, currently, that the host will be created beforehand
-    host_id = Column(Text)
+    _host_id = foreign_key('nodes.id', nullable=True)
     max_number_of_instances = Column(Integer, nullable=False)
     min_number_of_instances = Column(Integer, nullable=False)
     number_of_instances = Column(Integer, nullable=False)
     planned_number_of_instances = Column(Integer, nullable=False)
-    plugins = Column(MutableDict.as_mutable(Dict))
-    plugins_to_install = Column(MutableDict.as_mutable(Dict))
-    properties = Column(MutableDict.as_mutable(Dict))
-    operations = Column(MutableDict.as_mutable(Dict))
+    plugins = Column(Dict)
+    plugins_to_install = Column(Dict)
+    properties = Column(Dict)
+    operations = Column(Dict)
     type = Column(Text, nullable=False, index=True)
     type_hierarchy = Column(PickleType)
 
+    host = relationship('Node',
+                        foreign_keys=[host_id],
+                        remote_side=[id],
+                        backref=orm.backref('guests'))
 
 class Relationship(SQLModelBase):
     """
@@ -375,29 +363,32 @@ class Relationship(SQLModelBase):
     """
     __tablename__ = 'relationships'
 
-    blueprint_id = association_proxy('blueprint', 'id')
-    deployment_id = association_proxy('deployment', 'id')
+    _private_fields = ['source_node_id', 'target_node_id']
 
-    _private_fields = ['source_node_fk', 'target_node_fk']
-
-    source_node_fk = foreign_key(Node.storage_id)
-    target_node_fk = foreign_key(Node.storage_id)
+    source_node_id = foreign_key(Node.id)
+    target_node_id = foreign_key(Node.id)
 
     @declared_attr
     def source_node(cls):
-        return one_to_many_relationship(cls, Node, cls.source_node_fk, 'outbound_relationships')
+        return one_to_many_relationship(cls,
+                                        Node,
+                                        cls.source_node_id,
+                                        'outbound_relationships')
 
     @declared_attr
     def target_node(cls):
-        return one_to_many_relationship(cls, Node, cls.target_node_fk, 'inbound_relationships')
-
-    source_interfaces = Column(MutableDict.as_mutable(Dict))
-    source_operations = Column(MutableDict.as_mutable(Dict))
-    target_interfaces = Column(MutableDict.as_mutable(Dict))
-    target_operations = Column(MutableDict.as_mutable(Dict))
+        return one_to_many_relationship(cls,
+                                        Node,
+                                        cls.target_node_id,
+                                        'inbound_relationships')
+
+    source_interfaces = Column(Dict)
+    source_operations = Column(Dict)
+    target_interfaces = Column(Dict)
+    target_operations = Column(Dict)
     type = Column(String)
     type_hierarchy = Column(PickleType)
-    properties = Column(MutableDict.as_mutable(Dict))
+    properties = Column(Dict)
 
 
 class NodeInstance(SQLModelBase):
@@ -405,28 +396,28 @@ class NodeInstance(SQLModelBase):
     Node instance model representation.
     """
     __tablename__ = 'node_instances'
+    id = Column(Integer, primary_key=True)
 
-    node_fk = foreign_key(Node.storage_id)
-    deployment_fk = foreign_key(Deployment.storage_id)
-    _private_fields = ['node_fk', 'deployment_fk']
-
-    node_id = association_proxy('node', 'id')
-    deployment_id = association_proxy('node', 'deployment_id')
+    node_id = foreign_key(Node.id)
+    deployment_id = foreign_key(Deployment.id)
+    host_id = foreign_key('node_instances.id', nullable=True)
 
-    storage_id = Column(Integer, primary_key=True, autoincrement=True)
-    id = Column(Text, index=True)
+    _private_fields = ['node_id', 'host_id']
 
     # TODO: This probably should be a foreign key, but there's no guarantee
     # in the code, currently, that the host will be created beforehand
-    host_id = Column(Text)
-    runtime_properties = Column(MutableDict.as_mutable(Dict))
-    scaling_groups = Column(MutableDict.as_mutable(Dict))
+    runtime_properties = Column(Dict)
+    scaling_groups = Column(Dict)
     state = Column(Text, nullable=False)
     version = Column(Integer, default=1)
+    host = relationship('NodeInstance',
+                        foreign_keys=[host_id],
+                        remote_side=[id],
+                        backref=orm.backref('guests'))
 
     @declared_attr
     def node(cls):
-        return one_to_many_relationship(cls, Node, cls.node_fk)
+        return one_to_many_relationship(cls, Node, cls.node_id)
 
 
 class RelationshipInstance(SQLModelBase):
@@ -435,34 +426,31 @@ class RelationshipInstance(SQLModelBase):
     """
     __tablename__ = 'relationship_instances'
 
-    blueprint_id = association_proxy('blueprint', 'id')
-    deployment_id = association_proxy('deployment', 'id')
+    relationship_id = foreign_key(Relationship.id)
+    source_node_instance_id = foreign_key(NodeInstance.id)
+    target_node_instance_id = foreign_key(NodeInstance.id)
 
-    relationship_fk = foreign_key(Relationship.storage_id)
-    source_node_instance_fk = foreign_key(NodeInstance.storage_id)
-    target_node_instance_fk = foreign_key(NodeInstance.storage_id)
-
-    _private_fields = ['relationship_storage_fk',
-                       'source_node_instance_fk',
-                       'target_node_instance_fk']
+    _private_fields = ['relationship_storage_id',
+                       'source_node_instance_id',
+                       'target_node_instance_id']
 
     @declared_attr
     def source_node_instance(cls):
         return one_to_many_relationship(cls,
                                         NodeInstance,
-                                        cls.source_node_instance_fk,
+                                        cls.source_node_instance_id,
                                         'outbound_relationship_instances')
 
     @declared_attr
     def target_node_instance(cls):
         return one_to_many_relationship(cls,
                                         NodeInstance,
-                                        cls.target_node_instance_fk,
+                                        cls.target_node_instance_id,
                                         'inbound_relationship_instances')
 
     @declared_attr
     def relationship(cls):
-        return one_to_many_relationship(cls, Relationship, cls.relationship_fk)
+        return one_to_many_relationship(cls, Relationship, cls.relationship_id)
 
 
 class ProviderContext(SQLModelBase):
@@ -472,7 +460,7 @@ class ProviderContext(SQLModelBase):
     __tablename__ = 'provider_context'
 
     name = Column(Text, nullable=False)
-    context = Column(MutableDict.as_mutable(Dict), nullable=False)
+    context = Column(Dict, nullable=False)
 
 
 class Plugin(SQLModelBase):
@@ -481,21 +469,18 @@ class Plugin(SQLModelBase):
     """
     __tablename__ = 'plugins'
 
-    storage_id = Column(Integer, primary_key=True, autoincrement=True)
-    id = Column(Text, index=True)
-
     archive_name = Column(Text, nullable=False, index=True)
     distribution = Column(Text)
     distribution_release = Column(Text)
     distribution_version = Column(Text)
-    excluded_wheels = Column(MutableDict.as_mutable(Dict))
+    excluded_wheels = Column(Dict)
     package_name = Column(Text, nullable=False, index=True)
     package_source = Column(Text)
     package_version = Column(Text)
-    supported_platform = Column(MutableDict.as_mutable(Dict))
-    supported_py_versions = Column(MutableDict.as_mutable(Dict))
+    supported_platform = Column(Dict)
+    supported_py_versions = Column(Dict)
     uploaded_at = Column(DateTime, nullable=False, index=True)
-    wheels = Column(MutableDict.as_mutable(Dict), nullable=False)
+    wheels = Column(Dict, nullable=False)
 
 
 class Task(SQLModelBase):
@@ -504,18 +489,20 @@ class Task(SQLModelBase):
     """
 
     __tablename__ = 'task'
-    node_instance_fk = foreign_key(NodeInstance.storage_id, nullable=True)
-    relationship_instance_fk = foreign_key(RelationshipInstance.storage_id, nullable=True)
+    node_instance_id = foreign_key(NodeInstance.id, nullable=True)
+    relationship_instance_id = foreign_key(RelationshipInstance.id, nullable=True)
 
-    _private_fields = ['node_instance_fk', 'relationship_instance_fk']
+    _private_fields = ['node_instance_id', 'relationship_instance_id']
 
     @declared_attr
     def node_instance(cls):
-        return one_to_many_relationship(cls, NodeInstance, cls.node_instance_fk)
+        return one_to_many_relationship(cls, NodeInstance, cls.node_instance_id)
 
     @declared_attr
     def relationship_instance(cls):
-        return one_to_many_relationship(cls, RelationshipInstance, cls.relationship_instance_fk)
+        return one_to_many_relationship(cls,
+                                        RelationshipInstance,
+                                        cls.relationship_instance_id)
 
     PENDING = 'pending'
     RETRYING = 'retrying'
@@ -559,15 +546,7 @@ class Task(SQLModelBase):
     # Operation specific fields
     name = Column(String)
     operation_mapping = Column(String)
-    inputs = Column(MutableDict.as_mutable(Dict))
-
-    @property
-    def actor_storage_id(self):
-        """
-        Return the actor storage id of the task
-        :return:
-        """
-        return self.node_instance_fk or self.relationship_instance_fk
+    inputs = Column(Dict)
 
     @property
     def actor(self):
@@ -577,9 +556,10 @@ class Task(SQLModelBase):
         """
         return self.node_instance or self.relationship_instance
 
-    def __init__(self, actor=None, **kwargs):
-        if isinstance(actor, RelationshipInstance):
-            kwargs['relationship_instance_fk'] = actor.storage_id
-        elif isinstance(actor, NodeInstance):
-            kwargs['node_instance_fk'] = actor.storage_id
-        super(Task, self).__init__(**kwargs)
+    @classmethod
+    def as_node_instance(cls, instance_id, **kwargs):
+        return cls(node_instance_id=instance_id, **kwargs)
+
+    @classmethod
+    def as_relationship_instance(cls, instance_id, **kwargs):
+        return cls(relationship_instance_id=instance_id, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/storage/sql_mapi.py
----------------------------------------------------------------------
diff --git a/aria/storage/sql_mapi.py b/aria/storage/sql_mapi.py
index 25cc82f..51e4ae1 100644
--- a/aria/storage/sql_mapi.py
+++ b/aria/storage/sql_mapi.py
@@ -38,11 +38,10 @@ class SQLAlchemyModelAPI(api.ModelAPI):
         self._engine = engine
         self._session = session
 
-    def get(self, entry_id, include=None, filters=None, locking=False, **kwargs):
+    def get(self, entry_id, include=None, locking=False, **kwargs):
         """Return a single result based on the model class and element ID
         """
-        filters = filters or {'id': entry_id}
-        query = self._get_query(include, filters)
+        query = self._get_query(include, {'id': entry_id})
         if locking:
             query = query.with_for_update()
         result = query.first()
@@ -54,19 +53,47 @@ class SQLAlchemyModelAPI(api.ModelAPI):
             )
         return result
 
-    def iter(self,
+    def get_by_name(self, entry_name, include=None, **kwargs):
+        result = self.list(include=include, filters={'name': entry_name})
+        if not result:
+            raise exceptions.StorageError(
+                'Requested {0} with NAME `{1}` was not found'
+                .format(self.model_cls.__name__, entry_name)
+            )
+        elif len(result) > 1:
+            raise exceptions.StorageError(
+                'Requested {0} with NAME `{1}` returned more than 1 value'
+                .format(self.model_cls.__name__, entry_name)
+            )
+        else:
+            return result[0]
+
+    def list(self,
              include=None,
              filters=None,
              pagination=None,
              sort=None,
              **kwargs):
-        """Return a (possibly empty) list of `model_class` results
-        """
         query = self._get_query(include, filters, sort)
 
-        results, _, _, _ = self._paginate(query, pagination)
+        results, total, size, offset = self._paginate(query, pagination)
 
-        for result in results:
+        return ListResult(
+            items=results,
+            metadata=dict(total=total,
+                          size=size,
+                          offset=offset)
+        )
+
+    def iter(self,
+             include=None,
+             filters=None,
+             sort=None,
+             **kwargs):
+        """Return a (possibly empty) list of `model_class` results
+        """
+        query = self._get_query(include, filters, sort)
+        for result in query:
             yield result
 
     def put(self, entry, **kwargs):
@@ -81,26 +108,13 @@ class SQLAlchemyModelAPI(api.ModelAPI):
         self._safe_commit()
         return entry
 
-    def delete(self, entry_id, filters=None, **kwargs):
+    def delete(self, entry, **kwargs):
         """Delete a single result based on the model class and element ID
         """
-        try:
-            instance = self.get(
-                entry_id,
-                filters=filters
-            )
-        except exceptions.StorageError:
-            raise exceptions.StorageError(
-                'Could not delete {0} with ID `{1}` - element not found'
-                .format(
-                    self.model_cls.__name__,
-                    entry_id
-                )
-            )
-        self._load_relationships(instance)
-        self._session.delete(instance)
+        self._load_relationships(entry)
+        self._session.delete(entry)
         self._safe_commit()
-        return instance
+        return entry
 
     def update(self, entry, **kwargs):
         """Add `instance` to the DB session, and attempt to commit
@@ -125,8 +139,8 @@ class SQLAlchemyModelAPI(api.ModelAPI):
     def _establish_connection(self):
         pass
 
-    def create(self):
-        self.model_cls.__table__.create(self._engine)
+    def create(self, checkfirst=True, **kwargs):
+        self.model_cls.__table__.create(self._engine, checkfirst=checkfirst)
 
     def drop(self):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/aria/storage/structures.py
----------------------------------------------------------------------
diff --git a/aria/storage/structures.py b/aria/storage/structures.py
index d9a5322..317daec 100644
--- a/aria/storage/structures.py
+++ b/aria/storage/structures.py
@@ -80,14 +80,14 @@ def one_to_many_relationship(child_class,
     backreference = backreference or child_class.__tablename__
     return relationship(
         parent_class,
-        primaryjoin=lambda: parent_class.storage_id == foreign_key_column,
+        primaryjoin=lambda: parent_class.id == foreign_key_column,
         # The following line make sure that when the *parent* is
         # deleted, all its connected children are deleted as well
         backref=backref(backreference, cascade='all')
     )
 
 
-class Dict(TypeDecorator):
+class _DictType(TypeDecorator):
     """
     Dict representation of type.
     """
@@ -112,7 +112,7 @@ class Dict(TypeDecorator):
         return value
 
 
-class MutableDict(Mutable, dict):
+class _MutableDict(Mutable, dict):
     """
     Enables tracking for dict values.
     """
@@ -120,9 +120,9 @@ class MutableDict(Mutable, dict):
     def coerce(cls, key, value):
         "Convert plain dictionaries to MutableDict."
 
-        if not isinstance(value, MutableDict):
+        if not isinstance(value, _MutableDict):
             if isinstance(value, dict):
-                return MutableDict(value)
+                return _MutableDict(value)
 
             # this call will raise ValueError
             return Mutable.coerce(key, value)
@@ -141,6 +141,8 @@ class MutableDict(Mutable, dict):
         dict.__delitem__(self, key)
         self.changed()
 
+Dict = _MutableDict.as_mutable(_DictType)
+
 
 class SQLModelBase(Model):
     """
@@ -149,20 +151,13 @@ class SQLModelBase(Model):
     # SQLAlchemy syntax
     __abstract__ = True
 
-    # Does the class represent a resource (Blueprint, Deployment, etc.) or a
-    # management table (User, Tenant, etc.), as they are handled differently
-    is_resource = False
-
-    # This would be overridden once the models are created.
+    # This would be overridden once the models are created. Created for pylint.
     __table__ = None
 
     _private_fields = []
 
-    # Indicates whether the `id` column in this class should be unique
-    is_id_unique = True
-
-    storage_id = Column(Integer, primary_key=True, autoincrement=True)
-    id = Column(Text, index=True)
+    id = Column(Integer, primary_key=True, autoincrement=True)
+    name = Column(Text, index=True)
 
     @classmethod
     def unique_id(cls):
@@ -197,19 +192,5 @@ class SQLModelBase(Model):
         """
         return set(cls.__table__.columns.keys()) - set(cls._private_fields)
 
-    def __str__(self):
-        id_name, id_value = self.unique_id()
-        return '<{0} {1}=`{2}`>'.format(
-            self.__class__.__name__,
-            id_name,
-            id_value
-        )
-
     def __repr__(self):
-        return str(self)
-
-    def __unicode__(self):
-        return str(self)
-
-    def __eq__(self, other):
-        return isinstance(other, self.__class__) and self.to_dict() == other.to_dict()
+        return '<{0} id=`{1}`>'.format(self.__class__.__name__, self.id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index 0ab18bf..8231d08 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -20,17 +20,17 @@ from aria import application_model_storage
 from aria.orchestrator import context
 from aria.storage.sql_mapi import SQLAlchemyModelAPI
 
-from tests.storage import get_sqlite_api_params
+from tests.storage import get_sqlite_api_kwargs
 
 from . import models
 
 
 @pytest.fixture
 def simple(**kwargs):
-    api_params = get_sqlite_api_params()
-    model_storage = application_model_storage(SQLAlchemyModelAPI, api_params=api_params)
-    model_storage.blueprint.put(models.get_blueprint())
-    blueprint = model_storage.blueprint.get(models.BLUEPRINT_ID)
+    api_kwargs = get_sqlite_api_kwargs()
+    model_storage = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
+    blueprint = models.get_blueprint()
+    model_storage.blueprint.put(blueprint)
     deployment = models.get_deployment(blueprint)
     model_storage.deployment.put(deployment)
 
@@ -67,9 +67,8 @@ def simple(**kwargs):
         name='simple_context',
         model_storage=model_storage,
         resource_storage=None,
-        deployment_id=models.DEPLOYMENT_ID,
-        workflow_id=models.WORKFLOW_ID,
-        execution_id=models.EXECUTION_ID,
+        deployment_id=deployment.id,
+        workflow_name=models.WORKFLOW_NAME,
         task_max_attempts=models.TASK_MAX_ATTEMPTS,
         task_retry_interval=models.TASK_RETRY_INTERVAL
     )

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 72ec7e4..cdda614 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -19,24 +19,24 @@ from aria.storage import models
 
 from . import operations
 
-DEPLOYMENT_ID = 'test_deployment_id'
-BLUEPRINT_ID = 'test_blueprint_id'
-WORKFLOW_ID = 'test_workflow_id'
-EXECUTION_ID = 'test_execution_id'
+DEPLOYMENT_NAME = 'test_deployment_id'
+BLUEPRINT_NAME = 'test_blueprint_id'
+WORKFLOW_NAME = 'test_workflow_id'
+EXECUTION_NAME = 'test_execution_id'
 TASK_RETRY_INTERVAL = 1
 TASK_MAX_ATTEMPTS = 1
 
-DEPENDENCY_NODE_ID = 'dependency_node'
-DEPENDENCY_NODE_INSTANCE_ID = 'dependency_node_instance'
-DEPENDENT_NODE_ID = 'dependent_node'
-DEPENDENT_NODE_INSTANCE_ID = 'dependent_node_instance'
-RELATIONSHIP_ID = 'relationship'
-RELATIONSHIP_INSTANCE_ID = 'relationship_instance'
+DEPENDENCY_NODE_NAME = 'dependency_node'
+DEPENDENCY_NODE_INSTANCE_NAME = 'dependency_node_instance'
+DEPENDENT_NODE_NAME = 'dependent_node'
+DEPENDENT_NODE_INSTANCE_NAME = 'dependent_node_instance'
+RELATIONSHIP_NAME = 'relationship'
+RELATIONSHIP_INSTANCE_NAME = 'relationship_instance'
+
 
 def get_dependency_node(deployment):
     return models.Node(
-        id=DEPENDENCY_NODE_ID,
-        host_id=DEPENDENCY_NODE_ID,
+        name=DEPENDENCY_NODE_NAME,
         type='test_node_type',
         type_hierarchy=[],
         number_of_instances=1,
@@ -46,18 +46,17 @@ def get_dependency_node(deployment):
         operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
         min_number_of_instances=1,
         max_number_of_instances=1,
-        deployment_fk=deployment.storage_id
+        deployment_id=deployment.id
     )
 
 
 def get_dependency_node_instance(dependency_node):
     return models.NodeInstance(
-        id=DEPENDENCY_NODE_INSTANCE_ID,
-        host_id=DEPENDENCY_NODE_INSTANCE_ID,
+        name=DEPENDENCY_NODE_INSTANCE_NAME,
         runtime_properties={'ip': '1.1.1.1'},
         version=None,
-        node_fk=dependency_node.storage_id,
-        deployment_fk=dependency_node.deployment.storage_id,
+        node_id=dependency_node.id,
+        deployment_id=dependency_node.deployment.id,
         state='',
         scaling_groups={}
     )
@@ -65,9 +64,9 @@ def get_dependency_node_instance(dependency_node):
 
 def get_relationship(source=None, target=None):
     return models.Relationship(
-        id=RELATIONSHIP_ID,
-        source_node_fk=source.storage_id,
-        target_node_fk=target.storage_id,
+        name=RELATIONSHIP_NAME,
+        source_node_id=source.id,
+        target_node_id=target.id,
         source_interfaces={},
         source_operations=dict((key, {}) for key in operations.RELATIONSHIP_OPERATIONS),
         target_interfaces={},
@@ -80,18 +79,17 @@ def get_relationship(source=None, target=None):
 
 def get_relationship_instance(source_instance, target_instance, relationship):
     return models.RelationshipInstance(
-        id=RELATIONSHIP_INSTANCE_ID,
-        relationship_fk=relationship.storage_id,
-        target_node_instance_fk=target_instance.storage_id,
-        source_node_instance_fk=source_instance.storage_id,
+        name=RELATIONSHIP_INSTANCE_NAME,
+        relationship_id=relationship.id,
+        target_node_instance_id=target_instance.id,
+        source_node_instance_id=source_instance.id,
     )
 
 
 def get_dependent_node(deployment):
     return models.Node(
-        id=DEPENDENT_NODE_ID,
-        deployment_fk=deployment.storage_id,
-        host_id=DEPENDENT_NODE_ID,
+        name=DEPENDENT_NODE_NAME,
+        deployment_id=deployment.id,
         type='test_node_type',
         type_hierarchy=[],
         number_of_instances=1,
@@ -106,12 +104,11 @@ def get_dependent_node(deployment):
 
 def get_dependent_node_instance(dependent_node):
     return models.NodeInstance(
-        id=DEPENDENT_NODE_INSTANCE_ID,
-        host_id=DEPENDENT_NODE_INSTANCE_ID,
+        name=DEPENDENT_NODE_INSTANCE_NAME,
         runtime_properties={},
         version=None,
-        node_fk=dependent_node.storage_id,
-        deployment_fk=dependent_node.deployment.storage_id,
+        node_id=dependent_node.id,
+        deployment_id=dependent_node.deployment.id,
         state='',
         scaling_groups={}
     )
@@ -121,7 +118,7 @@ def get_blueprint():
     now = datetime.now()
     return models.Blueprint(
         plan={},
-        id=BLUEPRINT_ID,
+        name=BLUEPRINT_NAME,
         description=None,
         created_at=now,
         updated_at=now,
@@ -131,11 +128,11 @@ def get_blueprint():
 
 def get_execution(deployment):
     return models.Execution(
-        id=EXECUTION_ID,
-        deployment_fk=deployment.storage_id,
-        blueprint_fk=deployment.blueprint.storage_id,
+        name=EXECUTION_NAME,
+        deployment_id=deployment.id,
+        blueprint_id=deployment.blueprint.id,
         status=models.Execution.STARTED,
-        workflow_id=WORKFLOW_ID,
+        workflow_name=WORKFLOW_NAME,
         started_at=datetime.utcnow(),
         parameters=None
     )
@@ -144,8 +141,8 @@ def get_execution(deployment):
 def get_deployment(blueprint):
     now = datetime.utcnow()
     return models.Deployment(
-        id=DEPLOYMENT_ID,
-        blueprint_fk=blueprint.storage_id,
+        name=DEPLOYMENT_NAME,
+        blueprint_id=blueprint.id,
         description='',
         created_at=now,
         updated_at=now,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py
index ec13154..5e81f26 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -52,13 +52,13 @@ def executor():
 def test_node_operation_task_execution(ctx, executor):
     operation_name = 'aria.interfaces.lifecycle.create'
 
-    node = ctx.model.node.get(mock.models.DEPENDENCY_NODE_ID)
+    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
     node.operations[operation_name] = {
         'operation': op_path(my_operation, module_path=__name__)
 
     }
     ctx.model.node.update(node)
-    node_instance = ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+    node_instance = ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
     inputs = {'putput': True}
 
@@ -92,18 +92,20 @@ def test_node_operation_task_execution(ctx, executor):
 def test_relationship_operation_task_execution(ctx, executor):
     operation_name = 'aria.interfaces.relationship_lifecycle.postconfigure'
 
-    relationship = ctx.model.relationship.get(mock.models.RELATIONSHIP_ID)
+    relationship = ctx.model.relationship.get_by_name(mock.models.RELATIONSHIP_NAME)
     relationship.source_operations[operation_name] = {
         'operation': op_path(my_operation, module_path=__name__)
     }
     ctx.model.relationship.update(relationship)
-    relationship_instance = ctx.model.relationship_instance.get(
-        mock.models.RELATIONSHIP_INSTANCE_ID)
-
-    dependency_node = ctx.model.node.get(mock.models.DEPENDENCY_NODE_ID)
-    dependency_node_instance = ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
-    dependent_node = ctx.model.node.get(mock.models.DEPENDENT_NODE_ID)
-    dependent_node_instance = ctx.model.node_instance.get(mock.models.DEPENDENT_NODE_INSTANCE_ID)
+    relationship_instance = ctx.model.relationship_instance.get_by_name(
+        mock.models.RELATIONSHIP_INSTANCE_NAME)
+
+    dependency_node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    dependency_node_instance = \
+        ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    dependent_node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+    dependent_node_instance = \
+        ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
 
     inputs = {'putput': True}
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py
index 5cad219..6a742dc 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -48,15 +48,27 @@ def executor():
 
 
 def _get_elements(workflow_context):
-    dependency_node = workflow_context.model.node.get(mock.models.DEPENDENCY_NODE_ID)
-    dependency_node_instance = workflow_context.model.node_instance.get(
-        mock.models.DEPENDENCY_NODE_INSTANCE_ID)
-    dependent_node = workflow_context.model.node.get(mock.models.DEPENDENT_NODE_ID)
-    dependent_node_instance = workflow_context.model.node_instance.get(
-        mock.models.DEPENDENT_NODE_INSTANCE_ID)
-    relationship = workflow_context.model.relationship.get(mock.models.RELATIONSHIP_ID)
-    relationship_instance = workflow_context.model.relationship_instance.get(
-        mock.models.RELATIONSHIP_INSTANCE_ID)
+    dependency_node = workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    dependency_node.host_id = dependency_node.id
+    workflow_context.model.node.update(dependency_node)
+
+    dependency_node_instance = workflow_context.model.node_instance.get_by_name(
+        mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    dependency_node_instance.host_id = dependency_node_instance.id
+    workflow_context.model.node_instance.update(dependency_node_instance)
+
+    dependent_node = workflow_context.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+    dependent_node.host_id = dependency_node.id
+    workflow_context.model.node.update(dependent_node)
+
+    dependent_node_instance = workflow_context.model.node_instance.get_by_name(
+        mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+    dependent_node_instance.host_id = dependent_node_instance.id
+    workflow_context.model.node_instance.update(dependent_node_instance)
+
+    relationship = workflow_context.model.relationship.get_by_name(mock.models.RELATIONSHIP_NAME)
+    relationship_instance = workflow_context.model.relationship_instance.get_by_name(
+        mock.models.RELATIONSHIP_INSTANCE_NAME)
     return dependency_node, dependency_node_instance, dependent_node, dependent_node_instance, \
         relationship, relationship_instance
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py
index fbe5d75..2cb24fd 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -28,11 +28,11 @@ class TestWorkflowContext(object):
 
     def test_execution_creation_on_workflow_context_creation(self, storage):
         self._create_ctx(storage)
-        execution = storage.execution.get(models.EXECUTION_ID)
-        assert execution.id == models.EXECUTION_ID
-        assert execution.deployment_id == models.DEPLOYMENT_ID
-        assert execution.workflow_id == models.WORKFLOW_ID
-        assert execution.blueprint_id == models.BLUEPRINT_ID
+        execution = storage.execution.get_by_name(models.EXECUTION_NAME)
+        assert execution.name == models.EXECUTION_NAME
+        assert execution.deployment == storage.deployment.get_by_name(models.DEPLOYMENT_NAME)
+        assert execution.workflow_name == models.WORKFLOW_NAME
+        assert execution.blueprint == storage.blueprint.get_by_name(models.BLUEPRINT_NAME)
         assert execution.status == storage.execution.model_cls.PENDING
         assert execution.parameters == {}
         assert execution.created_at <= datetime.utcnow()
@@ -47,9 +47,9 @@ class TestWorkflowContext(object):
             name='simple_context',
             model_storage=storage,
             resource_storage=None,
-            deployment_id=models.DEPLOYMENT_ID,
-            workflow_id=models.WORKFLOW_ID,
-            execution_id=models.EXECUTION_ID,
+            deployment_id=storage.deployment.get_by_name(models.DEPLOYMENT_NAME).id,
+            workflow_name=models.WORKFLOW_NAME,
+            execution_name=models.EXECUTION_NAME,
             task_max_attempts=models.TASK_MAX_ATTEMPTS,
             task_retry_interval=models.TASK_RETRY_INTERVAL
         )
@@ -57,9 +57,9 @@ class TestWorkflowContext(object):
 
 @pytest.fixture(scope='function')
 def storage():
-    api_params = test_storage.get_sqlite_api_params()
-    result = application_model_storage(SQLAlchemyModelAPI, api_params=api_params)
+    api_kwargs = test_storage.get_sqlite_api_kwargs()
+    result = application_model_storage(SQLAlchemyModelAPI, api_kwargs=api_kwargs)
     result.blueprint.put(models.get_blueprint())
-    blueprint = result.blueprint.get(models.BLUEPRINT_ID)
+    blueprint = result.blueprint.get_by_name(models.BLUEPRINT_NAME)
     result.deployment.put(models.get_deployment(blueprint))
     return result

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py b/tests/orchestrator/workflows/api/test_task.py
index 3ae700e..6b89257 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -40,10 +40,11 @@ class TestOperationTask(object):
     def test_node_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.lifecycle.create'
         op_details = {'operation': True}
-        node = ctx.model.node.get(mock.models.DEPENDENT_NODE_ID)
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
         node.operations[operation_name] = op_details
         ctx.model.node.update(node)
-        node_instance = ctx.model.node_instance.get(mock.models.DEPENDENT_NODE_INSTANCE_ID)
+        node_instance = \
+            ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
         inputs = {'inputs': True}
         max_attempts = 10
         retry_interval = 10
@@ -69,10 +70,10 @@ class TestOperationTask(object):
     def test_relationship_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.relationship_lifecycle.preconfigure'
         op_details = {'operation': True}
-        relationship = ctx.model.relationship.get(mock.models.RELATIONSHIP_ID)
+        relationship = ctx.model.relationship.get_by_name(mock.models.RELATIONSHIP_NAME)
         relationship.source_operations[operation_name] = op_details
-        relationship_instance = ctx.model.relationship_instance.get(
-            mock.models.RELATIONSHIP_INSTANCE_ID)
+        relationship_instance = ctx.model.relationship_instance.get_by_name(
+            mock.models.RELATIONSHIP_INSTANCE_NAME)
         inputs = {'inputs': True}
         max_attempts = 10
         retry_interval = 10
@@ -94,8 +95,8 @@ class TestOperationTask(object):
         assert api_task.max_attempts == max_attempts
 
     def test_operation_task_default_values(self, ctx):
-        dependency_node_instance = ctx.model.node_instance.get(
-            mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+        dependency_node_instance = ctx.model.node_instance.get_by_name(
+            mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
         with context.workflow.current.push(ctx):
             task = api.task.OperationTask(
                 name='stub',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/__init__.py b/tests/orchestrator/workflows/builtin/__init__.py
index 7649a2a..26ba82f 100644
--- a/tests/orchestrator/workflows/builtin/__init__.py
+++ b/tests/orchestrator/workflows/builtin/__init__.py
@@ -13,8 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import pytest
-
 from tests import mock
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index f034046..435ad0f 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -13,16 +13,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import pytest
+
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.execute_operation import execute_operation
 
 from tests import mock
 
 
-def test_execute_operation():
-    ctx = mock.context.simple()
+@pytest.fixture
+def ctx():
+    context = mock.context.simple()
+    yield context
+    context.model.drop()
+
+
+def test_execute_operation(ctx):
+    node_instance = ctx.model.node_instance.list(filters={
+        'name': mock.models.DEPENDENCY_NODE_INSTANCE_NAME
+    })[0]
     operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
-    node_instance_id = 'dependency_node_instance'
 
     execute_tasks = list(
         task.WorkflowTask(
@@ -34,11 +44,13 @@ def test_execute_operation():
             run_by_dependency_order=False,
             type_names=[],
             node_ids=[],
-            node_instance_ids=[node_instance_id]
+            node_instance_ids=[node_instance.id]
         ).topological_order()
     )
 
     assert len(execute_tasks) == 1
-    assert execute_tasks[0].name == '{0}.{1}'.format(operation_name, node_instance_id)
+    assert execute_tasks[0].name == '{0}.{1}'.format(operation_name, node_instance.id)
+
+
 
 # TODO: add more scenarios

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/builtin/test_heal.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_heal.py b/tests/orchestrator/workflows/builtin/test_heal.py
index 7982f42..e874e9e 100644
--- a/tests/orchestrator/workflows/builtin/test_heal.py
+++ b/tests/orchestrator/workflows/builtin/test_heal.py
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import pytest
+
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.heal import heal
 
@@ -22,9 +24,19 @@ from . import (assert_node_install_operations,
                assert_node_uninstall_operations)
 
 
-def test_heal_dependent_node():
-    ctx = mock.context.simple()
-    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id='dependent_node_instance')
+@pytest.fixture
+def ctx():
+    context = mock.context.simple()
+    yield context
+    context.model.drop()
+
+
+def test_heal_dependent_node(ctx):
+    dependent_node_instance = \
+        ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+    dependent_node_instance.host_id = dependent_node_instance.id
+    ctx.model.node_instance.update(dependent_node_instance)
+    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id=dependent_node_instance.id)
 
     assert len(list(heal_graph.tasks)) == 2
     uninstall_subgraph, install_subgraph = list(heal_graph.topological_order(reverse=True))
@@ -47,10 +59,13 @@ def test_heal_dependent_node():
     assert_node_uninstall_operations(dependent_node_uninstall_tasks, with_relationships=True)
     assert_node_install_operations(dependent_node_install_tasks, with_relationships=True)
 
-def test_heal_dependency_node():
-    ctx = mock.context.simple()
 
-    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id='dependency_node_instance')
+def test_heal_dependency_node(ctx):
+    dependency_node_instance = \
+        ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    dependency_node_instance.host_id = dependency_node_instance.id
+    ctx.model.node_instance.update(dependency_node_instance)
+    heal_graph = task.WorkflowTask(heal, ctx=ctx, node_instance_id=dependency_node_instance.id)
     # both subgraphs should contain un\install for both the dependent and the dependency
     assert len(list(heal_graph.tasks)) == 2
     uninstall_subgraph, install_subgraph = list(heal_graph.topological_order(reverse=True))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/builtin/test_install.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_install.py b/tests/orchestrator/workflows/builtin/test_install.py
index e2e0e4c..d6c2a97 100644
--- a/tests/orchestrator/workflows/builtin/test_install.py
+++ b/tests/orchestrator/workflows/builtin/test_install.py
@@ -12,6 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+import pytest
 
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.install import install
@@ -21,8 +22,14 @@ from tests import mock
 from . import assert_node_install_operations
 
 
-def test_install():
-    ctx = mock.context.simple()
+@pytest.fixture
+def ctx():
+    context = mock.context.simple()
+    yield context
+    context.model.drop()
+
+
+def test_install(ctx):
 
     install_tasks = list(task.WorkflowTask(install, ctx=ctx).topological_order(True))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/builtin/test_uninstall.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_uninstall.py b/tests/orchestrator/workflows/builtin/test_uninstall.py
index 7d788f4..3b1ab4f 100644
--- a/tests/orchestrator/workflows/builtin/test_uninstall.py
+++ b/tests/orchestrator/workflows/builtin/test_uninstall.py
@@ -13,6 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import pytest
+
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.uninstall import uninstall
 
@@ -21,8 +23,14 @@ from tests import mock
 from . import assert_node_uninstall_operations
 
 
-def test_uninstall():
-    ctx = mock.context.simple()
+@pytest.fixture
+def ctx():
+    context = mock.context.simple()
+    yield context
+    context.model.drop()
+
+
+def test_uninstall(ctx):
 
     uninstall_tasks = list(task.WorkflowTask(uninstall, ctx=ctx).topological_order(True))
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py b/tests/orchestrator/workflows/core/test_engine.py
index 43c3641..5864495 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -60,7 +60,8 @@ class BaseTest(object):
             max_attempts=None,
             retry_interval=None,
             ignore_failure=None):
-        node_instance = ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+        node_instance = \
+            ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
         node_instance.node.operations['aria.interfaces.lifecycle.create'] = {
             'operation': '{name}.{func.__name__}'.format(name=__name__, func=func)
         }
@@ -218,7 +219,7 @@ class TestCancel(BaseTest):
 
         @workflow
         def mock_workflow(ctx, graph):
-            return graph.sequence(*(self._op(mock_sleep_task, ctx, inputs={'seconds': 1})
+            return graph.sequence(*(self._op(mock_sleep_task, ctx, inputs={'seconds': 0.1})
                                     for _ in range(number_of_tasks)))
         eng = self._engine(workflow_func=mock_workflow,
                            workflow_context=workflow_context,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 6d790f0..bc86576 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -48,9 +48,10 @@ class TestOperationTask(object):
         return api_task, core_task
 
     def test_operation_task_creation(self, ctx):
-        node_instance = ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+        node_instance = \
+            ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
         api_task, core_task = self._create_operation_task(ctx, node_instance)
-        storage_task = ctx.model.task.get(core_task.id)
+        storage_task = ctx.model.task.get_by_name(core_task.name)
 
         assert core_task.model_task == storage_task
         assert core_task.name == api_task.name
@@ -59,7 +60,8 @@ class TestOperationTask(object):
         assert core_task.inputs == api_task.inputs == storage_task.inputs
 
     def test_operation_task_edit_locked_attribute(self, ctx):
-        node_instance = ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+        node_instance = \
+            ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
         _, core_task = self._create_operation_task(ctx, node_instance)
         now = datetime.utcnow()
@@ -75,7 +77,8 @@ class TestOperationTask(object):
             core_task.due_at = now
 
     def test_operation_task_edit_attributes(self, ctx):
-        node_instance = ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+        node_instance = \
+            ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
         _, core_task = self._create_operation_task(ctx, node_instance)
         future_time = datetime.utcnow() + timedelta(seconds=3)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index 5506c40..e452cc6 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -24,7 +24,8 @@ from tests import mock
 def test_task_graph_into_execution_graph():
     operation_name = 'aria.interfaces.lifecycle.create'
     task_context = mock.context.simple()
-    node_instance = task_context.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+    node_instance = \
+        task_context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
     def sub_workflow(name, **_):
         return api.task_graph.TaskGraph(name)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 235c05c..2c848b4 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -18,6 +18,7 @@ from shutil import rmtree
 
 from sqlalchemy import (
     create_engine,
+    MetaData,
     orm)
 from sqlalchemy.pool import StaticPool
 
@@ -31,9 +32,10 @@ class TestFileSystem(object):
         rmtree(self.path, ignore_errors=True)
 
 
-def get_sqlite_api_params():
+def get_sqlite_api_kwargs():
     engine = create_engine('sqlite:///:memory:',
                            connect_args={'check_same_thread': False},
                            poolclass=StaticPool)
     session = orm.sessionmaker(bind=engine)()
+    MetaData().create_all(engine)
     return dict(engine=engine, session=session)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index 4d610f3..4b93a8b 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -13,12 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import tempfile
-import shutil
-
 import pytest
 
-
 from aria.storage import (
     ModelStorage,
     models,
@@ -26,23 +22,12 @@ from aria.storage import (
     sql_mapi,
 )
 from aria import application_model_storage
-from tests.storage import get_sqlite_api_params
-
-temp_dir = tempfile.mkdtemp()
+from tests.storage import get_sqlite_api_kwargs
 
 
 @pytest.fixture
 def storage():
-    return ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_params=get_sqlite_api_params())
-
-
-@pytest.fixture(autouse=True)
-def cleanup():
-    yield
-    try:
-        shutil.rmtree(temp_dir, ignore_errors=True)
-    except BaseException:
-        pass
+    return ModelStorage(sql_mapi.SQLAlchemyModelAPI, api_kwargs=get_sqlite_api_kwargs())
 
 
 def test_storage_base(storage):
@@ -53,10 +38,10 @@ def test_storage_base(storage):
 def test_model_storage(storage):
     storage.register(models.ProviderContext)
 
-    pc = models.ProviderContext(context={}, name='context_name', id='id1')
+    pc = models.ProviderContext(context={}, name='context_name')
     storage.provider_context.put(pc)
 
-    assert storage.provider_context.get('id1') == pc
+    assert storage.provider_context.get_by_name('context_name') == pc
 
     assert [pc_from_storage for pc_from_storage in storage.provider_context.iter()] == [pc]
     assert [pc_from_storage for pc_from_storage in storage.provider_context] == [pc]
@@ -66,31 +51,31 @@ def test_model_storage(storage):
     storage.provider_context.update(pc)
     assert storage.provider_context.get(pc.id).context == new_context
 
-    storage.provider_context.delete('id1')
+    storage.provider_context.delete(pc)
     with pytest.raises(exceptions.StorageError):
-        storage.provider_context.get('id1')
+        storage.provider_context.get(pc.id)
 
 
 def test_storage_driver(storage):
     storage.register(models.ProviderContext)
 
-    pc = models.ProviderContext(context={}, name='context_name', id='id2')
+    pc = models.ProviderContext(context={}, name='context_name')
     storage.registered['provider_context'].put(entry=pc)
 
-    assert storage.registered['provider_context'].get(entry_id='id2') == pc
+    assert storage.registered['provider_context'].get_by_name('context_name') == pc
 
     assert next(i for i in storage.registered['provider_context'].iter()) == pc
     assert [i for i in storage.provider_context] == [pc]
 
-    storage.registered['provider_context'].delete('id2')
+    storage.registered['provider_context'].delete(pc)
 
     with pytest.raises(exceptions.StorageError):
-        storage.registered['provider_context'].get('id2')
+        storage.registered['provider_context'].get(pc.id)
 
 
 def test_application_storage_factory():
     storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                        api_params=get_sqlite_api_params())
+                                        api_kwargs=get_sqlite_api_kwargs())
     assert storage.node
     assert storage.node_instance
     assert storage.plugin

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
index 82a1377..23a1549 100644
--- a/tests/storage/test_models.py
+++ b/tests/storage/test_models.py
@@ -26,36 +26,35 @@ from aria.storage.models import (
     Task)
 
 
-from tests.storage import get_sqlite_api_params
+from tests.storage import get_sqlite_api_kwargs
 
 
 @pytest.fixture
 def empty_storage():
     return application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                     api_params=get_sqlite_api_params())
+                                     api_kwargs=get_sqlite_api_kwargs())
 
 
 @pytest.mark.parametrize(
-    'is_valid, plan, id, description, created_at, updated_at, main_file_name',
+    'is_valid, plan, description, created_at, updated_at, main_file_name',
     [
-        (False, None, 'id', 'description', datetime.utcnow(), datetime.utcnow(), '/path'),
-        (False, {}, type('Class'), 'description', datetime.utcnow(), datetime.utcnow(), '/path'),
-        (False, {}, 'id', {}, datetime.utcnow(), datetime.utcnow(), '/path'),
-        (False, {}, 'id', 'description', 'error', datetime.utcnow(), '/path'),
-        (False, {}, 'id', 'description', datetime.utcnow(), 'error', '/path'),
-        (False, {}, 'id', 'description', datetime.utcnow(), datetime.utcnow(), {}),
-        (True, {}, 'id', 'description', datetime.utcnow(), datetime.utcnow(), '/path'),
+        (False, None, 'description', datetime.utcnow(), datetime.utcnow(), '/path'),
+        (False, {}, {}, datetime.utcnow(), datetime.utcnow(), '/path'),
+        (False, {}, 'description', 'error', datetime.utcnow(), '/path'),
+        (False, {}, 'description', datetime.utcnow(), 'error', '/path'),
+        (False, {}, 'description', datetime.utcnow(), datetime.utcnow(), {}),
+        (True, {}, 'description', datetime.utcnow(), datetime.utcnow(), '/path'),
     ]
 )
-def test_blueprint_model(empty_storage, is_valid, plan, id, description,
+def test_blueprint_model(empty_storage, is_valid, plan, description,
                          created_at, updated_at, main_file_name):
     if not is_valid:
         with pytest.raises(exceptions.StorageError):
-            empty_storage.blueprint.put(Blueprint(plan=plan, id=id, description=description,
+            empty_storage.blueprint.put(Blueprint(plan=plan, description=description,
                                                   created_at=created_at, updated_at=updated_at,
                                                   main_file_name=main_file_name))
     else:
-        empty_storage.blueprint.put(Blueprint(plan=plan, id=id, description=description,
+        empty_storage.blueprint.put(Blueprint(plan=plan, description=description,
                                               created_at=created_at, updated_at=updated_at,
                                               main_file_name=main_file_name))
 
@@ -105,7 +104,7 @@ def test_execution_status_transition():
     def create_execution(status):
         execution = Execution(
             id='e_id',
-            workflow_id='w_id',
+            workflow_name='w_name',
             status=status,
             parameters={},
             created_at=datetime.utcnow(),
@@ -168,11 +167,11 @@ def test_execution_status_transition():
 
 def test_task_max_attempts_validation():
     def create_task(max_attempts):
-        return Task(execution_id='eid',
-                    name='name',
-                    operation_mapping='',
-                    inputs={},
-                    max_attempts=max_attempts)
+        Task(execution_id='eid',
+             name='name',
+             operation_mapping='',
+             inputs={},
+             max_attempts=max_attempts)
     create_task(max_attempts=1)
     create_task(max_attempts=2)
     create_task(max_attempts=Task.INFINITE_RETRIES)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/72a4e48c/tests/storage/test_resource_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_resource_storage.py b/tests/storage/test_resource_storage.py
index 4347512..9b5f782 100644
--- a/tests/storage/test_resource_storage.py
+++ b/tests/storage/test_resource_storage.py
@@ -45,13 +45,13 @@ class TestResourceStorage(TestFileSystem):
 
     def _create_storage(self):
         return ResourceStorage(FileSystemResourceAPI,
-                               api_params=dict(directory=self.path))
+                               api_kwargs=dict(directory=self.path))
 
     def test_name(self):
         api = FileSystemResourceAPI
         storage = ResourceStorage(FileSystemResourceAPI,
                                   items=['blueprint'],
-                                  api_params=dict(directory=self.path))
+                                  api_kwargs=dict(directory=self.path))
         assert repr(storage) == 'ResourceStorage(api={api})'.format(api=api)
         assert 'directory={resource_dir}'.format(resource_dir=self.path) in \
                repr(storage.registered['blueprint'])
@@ -62,7 +62,7 @@ class TestResourceStorage(TestFileSystem):
         assert os.path.exists(os.path.join(self.path, 'blueprint'))
 
     def test_upload_file(self):
-        storage = ResourceStorage(FileSystemResourceAPI, api_params=dict(directory=self.path))
+        storage = ResourceStorage(FileSystemResourceAPI, api_kwargs=dict(directory=self.path))
         self._create(storage)
         tmpfile_path = tempfile.mkstemp(suffix=self.__class__.__name__, dir=self.path)[1]
         self._upload(storage, tmpfile_path, id='blueprint_id')


Mime
View raw message