ariatosca-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mxm...@apache.org
Subject [2/6] incubator-ariatosca git commit: ARIA-44-Merge-parser-and-storage-models
Date Tue, 31 Jan 2017 13:04:30 GMT
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py b/tests/orchestrator/workflows/core/test_task.py
index 061a3f2..8d07b09 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -28,30 +28,41 @@ from aria.orchestrator.workflows import (
 
 from tests import mock, storage
 
+OP_NAME = 'tosca.interfaces.node.lifecycle.Standard.create'
+RELATIONSHIP_OP_NAME = 'tosca.interfaces.relationship.Configure.pre_configure'
+
 
 @pytest.fixture
 def ctx(tmpdir):
     context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+
+    relationship = context.model.relationship.list()[0]
+    relationship.source_interfaces = [mock.models.get_interface(RELATIONSHIP_OP_NAME)]
+    relationship.target_interfaces = [mock.models.get_interface(RELATIONSHIP_OP_NAME)]
+    context.model.relationship.update(relationship)
+
+    dependent_node = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    dependent_node.interfaces = [mock.models.get_interface(OP_NAME)]
+    context.model.node.update(dependent_node)
+
     yield context
     storage.release_sqlite_storage(context.model)
 
 
 class TestOperationTask(object):
 
-    def _create_node_operation_task(self, ctx, node_instance):
+    def _create_node_operation_task(self, ctx, node):
         with workflow_context.current.push(ctx):
             api_task = api.task.OperationTask.node_instance(
-                instance=node_instance,
+                instance=node,
                 name='tosca.interfaces.node.lifecycle.Standard.create')
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
-    def _create_relationship_operation_task(self, ctx, relationship_instance, operation_end):
+    def _create_relationship_operation_task(self, ctx, relationship, operation_name):
         with workflow_context.current.push(ctx):
             api_task = api.task.OperationTask.relationship_instance(
-                instance=relationship_instance,
-                name='tosca.interfaces.relationship.Configure.pre_configure_source',
-                operation_end=operation_end)
+                instance=relationship, name=operation_name)
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
@@ -60,45 +71,47 @@ class TestOperationTask(object):
         storage_plugin_other = mock.models.get_plugin(package_name='p0', package_version='0.0')
         ctx.model.plugin.put(storage_plugin_other)
         ctx.model.plugin.put(storage_plugin)
-        node_instance = ctx.model.node_instance.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node = node_instance.node
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node_template = node.node_template
         plugin_name = 'plugin1'
-        node.plugins = [{'name': plugin_name,
-                         'package_name': 'p1',
-                         'package_version': '0.1'}]
-        node.operations['tosca.interfaces.node.lifecycle.Standard.create'] = {'plugin': plugin_name}
-        api_task, core_task = self._create_node_operation_task(ctx, node_instance)
+        node_template.plugins = [{'name': 'plugin1',
+                                  'package_name': 'p1',
+                                  'package_version': '0.1'}]
+        node.interfaces = [mock.models.get_interface(
+            'tosca.interfaces.node.lifecycle.Standard.create',
+            operation_kwargs=dict(plugin='plugin1')
+        )]
+        ctx.model.node_template.update(node_template)
+        ctx.model.node.update(node)
+        api_task, core_task = self._create_node_operation_task(ctx, node)
         storage_task = ctx.model.task.get_by_name(core_task.name)
         assert storage_task.plugin_name == plugin_name
         assert storage_task.execution_name == ctx.execution.name
-        assert storage_task.runs_on.id == core_task.context.node_instance.id
+        assert storage_task.runs_on == core_task.context.node
         assert core_task.model_task == storage_task
         assert core_task.name == api_task.name
-        assert core_task.operation_mapping == api_task.operation_mapping
-        assert core_task.actor == api_task.actor == node_instance
+        assert core_task.implementation == api_task.implementation
+        assert core_task.actor == api_task.actor == node
         assert core_task.inputs == api_task.inputs == storage_task.inputs
         assert core_task.plugin == storage_plugin
 
     def test_source_relationship_operation_task_creation(self, ctx):
-        relationship_instance = ctx.model.relationship_instance.list()[0]
+        relationship = ctx.model.relationship.list()[0]
+        ctx.model.relationship.update(relationship)
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship_instance,
-            api.task.OperationTask.SOURCE_OPERATION)
-        assert core_task.model_task.runs_on.id == relationship_instance.source_node_instance.id
+            ctx, relationship, '{0}_source'.format(RELATIONSHIP_OP_NAME))
+        assert core_task.model_task.runs_on == relationship.source_node
 
     def test_target_relationship_operation_task_creation(self, ctx):
-        relationship_instance = ctx.model.relationship_instance.list()[0]
+        relationship = ctx.model.relationship.list()[0]
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship_instance,
-            api.task.OperationTask.TARGET_OPERATION)
-        assert core_task.model_task.runs_on.id == relationship_instance.target_node_instance.id
+            ctx, relationship, '{0}_target'.format(RELATIONSHIP_OP_NAME))
+        assert core_task.model_task.runs_on == relationship.target_node
 
     def test_operation_task_edit_locked_attribute(self, ctx):
-        node_instance = \
-            ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
-        _, core_task = self._create_node_operation_task(ctx, node_instance)
+        _, core_task = self._create_node_operation_task(ctx, node)
         now = datetime.utcnow()
         with pytest.raises(exceptions.TaskException):
             core_task.status = core_task.STARTED
@@ -112,10 +125,9 @@ class TestOperationTask(object):
             core_task.due_at = now
 
     def test_operation_task_edit_attributes(self, ctx):
-        node_instance = \
-            ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
-        _, core_task = self._create_node_operation_task(ctx, node_instance)
+        _, core_task = self._create_node_operation_task(ctx, node)
         future_time = datetime.utcnow() + timedelta(seconds=3)
 
         with core_task._update():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index cd37bde..acdcb1d 100644
--- a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -25,20 +25,22 @@ from tests import storage
 def test_task_graph_into_execution_graph():
     operation_name = 'tosca.interfaces.node.lifecycle.Standard.create'
     task_context = mock.context.simple(storage.get_sqlite_api_kwargs())
-    node_instance = \
-        task_context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    node = task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    node.interfaces = [mock.models.get_interface(operation_name)]
+    task_context.model.node.update(node)
+
     def sub_workflow(name, **_):
         return api.task_graph.TaskGraph(name)
 
     with context.workflow.current.push(task_context):
         test_task_graph = api.task.WorkflowTask(sub_workflow, name='test_task_graph')
-        simple_before_task = api.task.OperationTask.node_instance(instance=node_instance,
+        simple_before_task = api.task.OperationTask.node_instance(instance=node,
                                                                   name=operation_name)
-        simple_after_task = api.task.OperationTask.node_instance(instance=node_instance,
+        simple_after_task = api.task.OperationTask.node_instance(instance=node,
                                                                  name=operation_name)
 
         inner_task_graph = api.task.WorkflowTask(sub_workflow, name='test_inner_task_graph')
-        inner_task = api.task.OperationTask.node_instance(instance=node_instance,
+        inner_task = api.task.OperationTask.node_instance(instance=node,
                                                           name=operation_name)
         inner_task_graph.add_tasks(inner_task)
 
@@ -91,7 +93,7 @@ def test_task_graph_into_execution_graph():
 def _assert_execution_is_api_task(execution_task, api_task):
     assert execution_task.id == api_task.id
     assert execution_task.name == api_task.name
-    assert execution_task.operation_mapping == api_task.operation_mapping
+    assert execution_task.implementation == api_task.implementation
     assert execution_task.actor == api_task.actor
     assert execution_task.inputs == api_task.inputs
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py b/tests/orchestrator/workflows/executor/test_executor.py
index 2486a1e..97e1393 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -28,7 +28,7 @@ except ImportError:
     _celery = None
     app = None
 
-from aria.storage import model
+from aria.storage.modeling import model
 from aria.orchestrator import events
 from aria.orchestrator.workflows.executor import (
     thread,
@@ -43,7 +43,7 @@ def test_execute(executor):
     expected_value = 'value'
     successful_task = MockTask(mock_successful_task)
     failing_task = MockTask(mock_failing_task)
-    task_with_inputs = MockTask(mock_task_with_input, inputs={'input': expected_value})
+    task_with_inputs = MockTask(mock_task_with_input, inputs=dict(input='value'))
 
     for task in [successful_task, failing_task, task_with_inputs]:
         executor.execute(task)
@@ -98,8 +98,9 @@ class MockTask(object):
         self.exception = None
         self.id = str(uuid.uuid4())
         name = func.__name__
-        operation = 'tests.orchestrator.workflows.executor.test_executor.{name}'.format(name=name)
-        self.operation_mapping = operation
+        implementation = 'tests.orchestrator.workflows.executor.test_executor.{name}'.format(
+            name=name)
+        self.implementation = implementation
         self.logger = logging.getLogger()
         self.name = name
         self.inputs = inputs or {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py b/tests/orchestrator/workflows/executor/test_process_executor.py
index 687e245..ec01d60 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -22,7 +22,7 @@ from contextlib import contextmanager
 import pytest
 
 from aria import application_model_storage
-from aria.storage import model as aria_model
+from aria.storage.modeling import model as aria_model
 from aria.utils.plugin import create as create_plugin
 from aria.storage.sql_mapi import SQLAlchemyModelAPI
 from aria.orchestrator import events
@@ -38,7 +38,7 @@ class TestProcessExecutor(object):
 
     def test_plugin_execution(self, executor, mock_plugin):
         task = MockTask(plugin=mock_plugin,
-                        operation='mock_plugin1.operation')
+                        implementation='mock_plugin1.operation')
 
         queue = Queue.Queue()
 
@@ -119,11 +119,11 @@ class MockTask(object):
 
     INFINITE_RETRIES = aria_model.Task.INFINITE_RETRIES
 
-    def __init__(self, plugin, operation):
+    def __init__(self, plugin, implementation):
         self.id = str(uuid.uuid4())
-        self.operation_mapping = operation
+        self.implementation = implementation
         self.logger = logging.getLogger()
-        self.name = operation
+        self.name = implementation
         self.inputs = {}
         self.context = MockContext()
         self.retry_count = 0

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_extension.py b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 4a8ef57..3c3ffb0 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -30,14 +30,17 @@ def test_decorate_extension(context, executor):
     inputs = {'input1': 1, 'input2': 2}
 
     def get_node_instance(ctx):
-        return ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        return ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
     @workflow
     def mock_workflow(ctx, graph):
         node_instance = get_node_instance(ctx)
         op = 'test.op'
-        op_dict = {'operation': '{0}.{1}'.format(__name__, _mock_operation.__name__)}
-        node_instance.node.operations['test.op'] = op_dict
+        node_instance.interfaces = [mock.models.get_interface(
+            op,
+            operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
+                                                                  _mock_operation.__name__))
+        )]
         task = api.task.OperationTask.node_instance(instance=node_instance, name=op, inputs=inputs)
         graph.add_tasks(task)
         return graph
@@ -55,7 +58,7 @@ class MockProcessExecutorExtension(object):
     def decorate(self):
         def decorator(function):
             def wrapper(ctx, **operation_inputs):
-                ctx.node_instance.runtime_properties['out'] = {'wrapper_inputs': operation_inputs}
+                ctx.node.runtime_properties['out'] = {'wrapper_inputs': operation_inputs}
                 function(ctx=ctx, **operation_inputs)
             return wrapper
         return decorator
@@ -63,7 +66,7 @@ class MockProcessExecutorExtension(object):
 
 @operation
 def _mock_operation(ctx, **operation_inputs):
-    ctx.node_instance.runtime_properties['out']['function_inputs'] = operation_inputs
+    ctx.node.runtime_properties['out']['function_inputs'] = operation_inputs
 
 
 @pytest.fixture

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index bd1fa96..af318c3 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -45,13 +45,13 @@ def test_track_changes_of_failed_operation(context, executor):
 
 
 def _assert_tracked_changes_are_applied(context):
-    instance = context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    instance = context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
     assert instance.runtime_properties == _TEST_RUNTIME_PROPERTIES
 
 
 def _update_runtime_properties(context):
-    context.node_instance.runtime_properties.clear()
-    context.node_instance.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
+    context.node.runtime_properties.clear()
+    context.node.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
 
 
 def test_refresh_state_of_tracked_attributes(context, executor):
@@ -66,7 +66,7 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
         'changed_but_refreshed': {'some': 'newer', 'properties': 'right there'}
     }
 
-    expected_initial = context.model.node_instance.get_by_name(
+    expected_initial = context.model.node.get_by_name(
         mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
 
     out = _run_workflow(context=context, executor=executor, op_func=_mock_updating_operation,
@@ -87,17 +87,18 @@ def test_apply_tracked_changes_during_an_operation(context, executor):
 def _run_workflow(context, executor, op_func, inputs=None):
     @workflow
     def mock_workflow(ctx, graph):
-        node_instance = ctx.model.node_instance.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node_instance.node.operations['test.op'] = {'operation': _operation_mapping(op_func)}
-        task = api.task.OperationTask.node_instance(instance=node_instance, name='test.op',
+        node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node.interfaces = [mock.models.get_interface(
+            'test.op', operation_kwargs=dict(implementation=_operation_mapping(op_func)))]
+        task = api.task.OperationTask.node_instance(instance=node,
+                                                    name='test.op',
                                                     inputs=inputs or {})
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph)
     eng.execute()
-    return context.model.node_instance.get_by_name(
+    return context.model.node.get_by_name(
         mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties.get('out')
 
 
@@ -114,25 +115,25 @@ def _mock_fail_operation(ctx):
 
 @operation
 def _mock_refreshing_operation(ctx):
-    out = {'initial': copy.deepcopy(ctx.node_instance.runtime_properties)}
-    ctx.node_instance.runtime_properties.update({'some': 'new', 'properties': 'right here'})
-    out['after_change'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.model.node_instance.refresh(ctx.node_instance)
-    out['after_refresh'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.node_instance.runtime_properties['out'] = out
+    out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
+    ctx.node.runtime_properties.update({'some': 'new', 'properties': 'right here'})
+    out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.model.node.refresh(ctx.node)
+    out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.node.runtime_properties['out'] = out
 
 
 @operation
 def _mock_updating_operation(ctx, committed, changed_but_refreshed):
-    out = {'initial': copy.deepcopy(ctx.node_instance.runtime_properties)}
-    ctx.node_instance.runtime_properties.update(committed)
-    ctx.model.node_instance.update(ctx.node_instance)
-    out['after_update'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.node_instance.runtime_properties.update(changed_but_refreshed)
-    out['after_change'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.model.node_instance.refresh(ctx.node_instance)
-    out['after_refresh'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.node_instance.runtime_properties['out'] = out
+    out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
+    ctx.node.runtime_properties.update(committed)
+    ctx.model.node.update(ctx.node)
+    out['after_update'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.node.runtime_properties.update(changed_but_refreshed)
+    out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.model.node.refresh(ctx.node)
+    out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.node.runtime_properties['out'] = out
 
 
 def _operation_mapping(func):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/resources/scripts/test_ssh.sh
----------------------------------------------------------------------
diff --git a/tests/resources/scripts/test_ssh.sh b/tests/resources/scripts/test_ssh.sh
index 6f18278..90202c7 100644
--- a/tests/resources/scripts/test_ssh.sh
+++ b/tests/resources/scripts/test_ssh.sh
@@ -4,7 +4,7 @@ set -u
 set -e
 
 test_run_script_basic() {
-    ctx node-instance runtime-properties test_value $test_value
+    ctx node runtime-properties test_value $test_value
 }
 
 test_run_script_as_sudo() {
@@ -12,7 +12,7 @@ test_run_script_as_sudo() {
 }
 
 test_run_script_default_base_dir() {
-    ctx node-instance runtime-properties work_dir $PWD
+    ctx node runtime-properties work_dir $PWD
 }
 
 test_run_script_with_hide() {
@@ -20,44 +20,44 @@ test_run_script_with_hide() {
 }
 
 test_run_script_process_config() {
-    ctx node-instance runtime-properties env_value $test_value_env
-    ctx node-instance runtime-properties bash_version $BASH_VERSION
-    ctx node-instance runtime-properties arg1_value $1
-    ctx node-instance runtime-properties arg2_value $2
-    ctx node-instance runtime-properties cwd $PWD
-    ctx node-instance runtime-properties ctx_path $(which ctx)
+    ctx node runtime-properties env_value $test_value_env
+    ctx node runtime-properties bash_version $BASH_VERSION
+    ctx node runtime-properties arg1_value $1
+    ctx node runtime-properties arg2_value $2
+    ctx node runtime-properties cwd $PWD
+    ctx node runtime-properties ctx_path $(which ctx)
 }
 
 test_run_script_command_prefix() {
-    ctx node-instance runtime-properties dollar_dash $-
+    ctx node runtime-properties dollar_dash $-
 }
 
 test_run_script_reuse_existing_ctx_1() {
-    ctx node-instance runtime-properties test_value1 $test_value1
+    ctx node runtime-properties test_value1 $test_value1
 }
 
 test_run_script_reuse_existing_ctx_2() {
-    ctx node-instance runtime-properties test_value2 $test_value2
+    ctx node runtime-properties test_value2 $test_value2
 }
 
 test_run_script_download_resource_plain() {
     local destination=$(mktemp)
     ctx download-resource ${destination} test_resource
-    ctx node-instance runtime-properties test_value "$(cat ${destination})"
+    ctx node runtime-properties test_value "$(cat ${destination})"
 }
 
 test_run_script_download_resource_and_render() {
     local destination=$(mktemp)
     ctx download-resource-and-render ${destination} test_resource
-    ctx node-instance runtime-properties test_value "$(cat ${destination})"
+    ctx node runtime-properties test_value "$(cat ${destination})"
 }
 
 test_run_script_inputs_as_env_variables_no_override() {
-    ctx node-instance runtime-properties test_value "$custom_env_var"
+    ctx node runtime-properties test_value "$custom_env_var"
 }
 
 test_run_script_inputs_as_env_variables_process_env_override() {
-    ctx node-instance runtime-properties test_value "$custom_env_var"
+    ctx node runtime-properties test_value "$custom_env_var"
 }
 
 test_run_script_error_in_script() {

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 3b3715e..66060b8 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -14,8 +14,8 @@
 # limitations under the License.
 import os
 import platform
-from tempfile import mkdtemp
 from shutil import rmtree
+from tempfile import mkdtemp
 
 from sqlalchemy import (
     create_engine,
@@ -29,12 +29,12 @@ from sqlalchemy import (
 
 from aria.storage import (
     model,
-    structure,
     type as aria_type,
+    structure
 )
 
 
-class MockModel(model.DeclarativeBase, structure.ModelMixin): #pylint: disable=abstract-method
+class MockModel(model.DB, structure.ModelMixin): #pylint: disable=abstract-method
     __tablename__ = 'mock_models'
     model_dict = Column(aria_type.Dict)
     model_list = Column(aria_type.List)
@@ -42,6 +42,8 @@ class MockModel(model.DeclarativeBase, structure.ModelMixin): #pylint: disable=a
     name = Column(Text)
 
 
+from aria.storage import modeling
+
 
 class TestFileSystem(object):
 
@@ -52,7 +54,8 @@ class TestFileSystem(object):
         rmtree(self.path, ignore_errors=True)
 
 
-def get_sqlite_api_kwargs(base_dir=None, filename='db.sqlite'):
+def get_sqlite_api_kwargs(base_dir=None,
+                          filename='db.sqlite'):
     """
     Create sql params. works in in-memory and in filesystem mode.
     If base_dir is passed, the mode will be filesystem mode. while the default mode is in-memory.
@@ -77,7 +80,7 @@ def get_sqlite_api_kwargs(base_dir=None, filename='db.sqlite'):
     session_factory = orm.sessionmaker(bind=engine)
     session = orm.scoped_session(session_factory=session_factory) if base_dir else session_factory()
 
-    model.DeclarativeBase.metadata.create_all(bind=engine)
+    modeling.declarative_base.metadata.create_all(bind=engine)
     return dict(engine=engine, session=session)
 
 
@@ -94,4 +97,4 @@ def release_sqlite_storage(storage):
             session.rollback()
             session.close()
         for engine in set(mapi._engine for mapi in mapis):
-            model.DeclarativeBase.metadata.drop_all(engine)
+            modeling.declarative_base.metadata.drop_all(engine)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/storage/test_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_instrumentation.py b/tests/storage/test_instrumentation.py
index 9b4da4f..da74bb2 100644
--- a/tests/storage/test_instrumentation.py
+++ b/tests/storage/test_instrumentation.py
@@ -17,17 +17,17 @@ import pytest
 from sqlalchemy import Column, Text, Integer, event
 
 from aria.storage import (
-    model,
+    modeling,
     structure,
-    type as aria_type,
     ModelStorage,
     sql_mapi,
     instrumentation,
-    exceptions
+    exceptions,
+    type as aria_type,
+    model
 )
 from ..storage import get_sqlite_api_kwargs, release_sqlite_storage
 
-
 STUB = instrumentation._STUB
 Value = instrumentation._Value
 instruments_holder = []
@@ -347,15 +347,15 @@ class _MockModel(structure.ModelMixin):
     string2 = Column(Text)
 
 
-class MockModel1(model.DeclarativeBase, _MockModel):
-    __tablename__ = 'mock_model1'
+class MockModel1(modeling.declarative_base, _MockModel):
+    __tablename__ = 'mock_model_1'
 
 
-class MockModel2(model.DeclarativeBase, _MockModel):
-    __tablename__ = 'mock_model2'
+class MockModel2(modeling.declarative_base, _MockModel):
+    __tablename__ = 'mock_model_2'
 
 
-class StrictMockModel(model.DeclarativeBase):
+class StrictMockModel(model.DB):
     __tablename__ = 'strict_mock_model'
 
     strict_dict = Column(aria_type.StrictDict(basestring, basestring))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py b/tests/storage/test_model_storage.py
index d1596e3..acddab1 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -17,9 +17,9 @@ import pytest
 
 from aria.storage import (
     ModelStorage,
-    model,
     exceptions,
     sql_mapi,
+    modeling,
 )
 from aria import application_model_storage
 from ..storage import get_sqlite_api_kwargs, release_sqlite_storage
@@ -37,7 +37,7 @@ def storage():
 
 @pytest.fixture(scope='module', autouse=True)
 def module_cleanup():
-    model.DeclarativeBase.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
+    modeling.model.DB.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
 
 
 def test_storage_base(storage):
@@ -62,14 +62,41 @@ def test_model_storage(storage):
 def test_application_storage_factory():
     storage = application_model_storage(sql_mapi.SQLAlchemyModelAPI,
                                         api_kwargs=get_sqlite_api_kwargs())
+
+    assert storage.parameter
+    assert storage.mapping_template
+    assert storage.substitution_template
+    assert storage.service_template
+    assert storage.node_template
+    assert storage.group_template
+    assert storage.interface_template
+    assert storage.operation_template
+    assert storage.artifact_template
+    assert storage.policy_template
+    assert storage.group_policy_template
+    assert storage.group_policy_trigger_template
+    assert storage.requirement_template
+    assert storage.capability_template
+
+    assert storage.mapping
+    assert storage.substitution
+    assert storage.service_instance
     assert storage.node
-    assert storage.node_instance
-    assert storage.plugin
-    assert storage.blueprint
-    assert storage.deployment
-    assert storage.deployment_update
-    assert storage.deployment_update_step
-    assert storage.deployment_modification
+    assert storage.group
+    assert storage.interface
+    assert storage.operation
+    assert storage.capability
+    assert storage.artifact
+    assert storage.policy
+    assert storage.group_policy
+    assert storage.group_policy_trigger
+    assert storage.relationship
+
     assert storage.execution
+    assert storage.service_instance_update
+    assert storage.service_instance_update_step
+    assert storage.service_instance_modification
+    assert storage.plugin
+    assert storage.task
 
     release_sqlite_storage(storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/storage/test_models.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_models.py b/tests/storage/test_models.py
deleted file mode 100644
index 2088676..0000000
--- a/tests/storage/test_models.py
+++ /dev/null
@@ -1,919 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-from datetime import datetime
-from contextlib import contextmanager
-
-import pytest
-
-from aria import application_model_storage
-from aria.storage import (
-    exceptions,
-    sql_mapi,
-)
-from aria.storage.model import (
-    DeploymentUpdateStep,
-    Blueprint,
-    Execution,
-    Task,
-    Plugin,
-    Deployment,
-    Node,
-    NodeInstance,
-    Relationship,
-    RelationshipInstance,
-    DeploymentUpdate,
-    DeploymentModification,
-)
-
-
-from tests import mock
-from tests.storage import get_sqlite_api_kwargs, release_sqlite_storage
-
-
-@contextmanager
-def sql_storage(storage_func):
-    storage = None
-    try:
-        storage = storage_func()
-        yield storage
-    finally:
-        if storage:
-            release_sqlite_storage(storage)
-
-
-def _empty_storage():
-    return application_model_storage(sql_mapi.SQLAlchemyModelAPI,
-                                     api_kwargs=get_sqlite_api_kwargs())
-
-
-def _blueprint_storage():
-    storage = _empty_storage()
-    blueprint = mock.models.get_blueprint()
-    storage.blueprint.put(blueprint)
-    return storage
-
-
-def _deployment_storage():
-    storage = _blueprint_storage()
-    deployment = mock.models.get_deployment(storage.blueprint.list()[0])
-    storage.deployment.put(deployment)
-    return storage
-
-
-def _deployment_update_storage():
-    storage = _deployment_storage()
-    deployment_update = DeploymentUpdate(
-        deployment=storage.deployment.list()[0],
-        created_at=now,
-        deployment_plan={},
-    )
-    storage.deployment_update.put(deployment_update)
-    return storage
-
-
-def _node_storage():
-    storage = _deployment_storage()
-    node = mock.models.get_dependency_node(storage.deployment.list()[0])
-    storage.node.put(node)
-    return storage
-
-
-def _nodes_storage():
-    storage = _deployment_storage()
-    dependent_node = mock.models.get_dependent_node(storage.deployment.list()[0])
-    dependency_node = mock.models.get_dependency_node(storage.deployment.list()[0])
-    storage.node.put(dependent_node)
-    storage.node.put(dependency_node)
-    return storage
-
-
-def _node_instances_storage():
-    storage = _nodes_storage()
-    dependent_node = storage.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
-    dependency_node = storage.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    dependency_node_instance = mock.models.get_dependency_node_instance(dependency_node)
-    dependent_node_instance = mock.models.get_dependent_node_instance(dependent_node)
-    storage.node_instance.put(dependency_node_instance)
-    storage.node_instance.put(dependent_node_instance)
-    return storage
-
-
-def _execution_storage():
-    storage = _deployment_storage()
-    execution = mock.models.get_execution(storage.deployment.list()[0])
-    plugin = mock.models.get_plugin()
-    storage.execution.put(execution)
-    storage.plugin.put(plugin)
-    return storage
-
-
-@pytest.fixture
-def empty_storage():
-    with sql_storage(_empty_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def blueprint_storage():
-    with sql_storage(_blueprint_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def deployment_storage():
-    with sql_storage(_deployment_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def deployment_update_storage():
-    with sql_storage(_deployment_update_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def node_storage():
-    with sql_storage(_node_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def nodes_storage():
-    with sql_storage(_nodes_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def node_instances_storage():
-    with sql_storage(_node_instances_storage) as storage:
-        yield storage
-
-
-@pytest.fixture
-def execution_storage():
-    with sql_storage(_execution_storage) as storage:
-        yield storage
-
-
-m_cls = type('MockClass')
-now = datetime.utcnow()
-
-
-def _test_model(is_valid, storage, model_name, model_cls, model_kwargs):
-    if is_valid:
-        model = model_cls(**model_kwargs)
-        getattr(storage, model_name).put(model)
-        return model
-    else:
-        with pytest.raises(exceptions.StorageError):
-            getattr(storage, model_name).put(model_cls(**model_kwargs))
-
-
-class TestBlueprint(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, plan, description, created_at, updated_at, main_file_name',
-        [
-            (False, None, 'description', now, now, '/path'),
-            (False, {}, {}, now, now, '/path'),
-            (False, {}, 'description', 'error', now, '/path'),
-            (False, {}, 'description', now, 'error', '/path'),
-            (False, {}, 'description', now, now, {}),
-            (True, {}, 'description', now, now, '/path'),
-        ]
-    )
-    def test_blueprint_model_creation(self, empty_storage, is_valid, plan, description, created_at,
-                                      updated_at, main_file_name):
-        _test_model(is_valid=is_valid,
-                    storage=empty_storage,
-                    model_name='blueprint',
-                    model_cls=Blueprint,
-                    model_kwargs=dict(plan=plan,
-                                      description=description,
-                                      created_at=created_at,
-                                      updated_at=updated_at,
-                                      main_file_name=main_file_name))
-
-
-class TestDeployment(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, name, created_at, description, inputs, groups, permalink, policy_triggers, '
-        'policy_types, outputs, scaling_groups, updated_at, workflows',
-        [
-            (False, m_cls, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (False, 'name', m_cls, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (False, 'name', now, m_cls, {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (False, 'name', now, 'desc', m_cls, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (False, 'name', now, 'desc', {}, m_cls, 'perlnk', {}, {}, {}, {}, now, {}),
-            (False, 'name', now, 'desc', {}, {}, m_cls, {}, {}, {}, {}, now, {}),
-            (False, 'name', now, 'desc', {}, {}, 'perlnk', m_cls, {}, {}, {}, now, {}),
-            (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, m_cls, {}, {}, now, {}),
-            (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, m_cls, {}, now, {}),
-            (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, m_cls, now, {}),
-            (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, m_cls, {}),
-            (False, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, m_cls),
-
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (True, None, now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (True, 'name', now, None, {}, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (True, 'name', now, 'desc', None, {}, 'perlnk', {}, {}, {}, {}, now, {}),
-            (True, 'name', now, 'desc', {}, None, 'perlnk', {}, {}, {}, {}, now, {}),
-            (True, 'name', now, 'desc', {}, {}, None, {}, {}, {}, {}, now, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', None, {}, {}, {}, now, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, None, {}, {}, now, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, None, {}, now, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, None, now, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, None, {}),
-            (True, 'name', now, 'desc', {}, {}, 'perlnk', {}, {}, {}, {}, now, None),
-        ]
-    )
-    def test_deployment_model_creation(self, deployment_storage, is_valid, name, created_at,
-                                       description, inputs, groups, permalink, policy_triggers,
-                                       policy_types, outputs, scaling_groups, updated_at,
-                                       workflows):
-        deployment = _test_model(is_valid=is_valid,
-                                 storage=deployment_storage,
-                                 model_name='deployment',
-                                 model_cls=Deployment,
-                                 model_kwargs=dict(
-                                     name=name,
-                                     blueprint=deployment_storage.blueprint.list()[0],
-                                     created_at=created_at,
-                                     description=description,
-                                     inputs=inputs,
-                                     groups=groups,
-                                     permalink=permalink,
-                                     policy_triggers=policy_triggers,
-                                     policy_types=policy_types,
-                                     outputs=outputs,
-                                     scaling_groups=scaling_groups,
-                                     updated_at=updated_at,
-                                     workflows=workflows
-                                 ))
-        if is_valid:
-            assert deployment.blueprint == deployment_storage.blueprint.list()[0]
-
-
-class TestExecution(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, '
-        'status, workflow_name',
-        [
-            (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, m_cls, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, now, m_cls, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, now, now, m_cls, False, {}, Execution.STARTED, 'wf_name'),
-            (False, now, now, now, 'error', False, m_cls, Execution.STARTED, 'wf_name'),
-            (False, now, now, now, 'error', False, {}, m_cls, 'wf_name'),
-            (False, now, now, now, 'error', False, {}, Execution.STARTED, m_cls),
-
-            (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'),
-            (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'),
-        ]
-    )
-    def test_execution_model_creation(self, deployment_storage, is_valid, created_at, started_at,
-                                      ended_at, error, is_system_workflow, parameters, status,
-                                      workflow_name):
-        execution = _test_model(is_valid=is_valid,
-                                storage=deployment_storage,
-                                model_name='execution',
-                                model_cls=Execution,
-                                model_kwargs=dict(
-                                    deployment=deployment_storage.deployment.list()[0],
-                                    created_at=created_at,
-                                    started_at=started_at,
-                                    ended_at=ended_at,
-                                    error=error,
-                                    is_system_workflow=is_system_workflow,
-                                    parameters=parameters,
-                                    status=status,
-                                    workflow_name=workflow_name,
-                                ))
-        if is_valid:
-            assert execution.deployment == deployment_storage.deployment.list()[0]
-            assert execution.blueprint == deployment_storage.blueprint.list()[0]
-
-    def test_execution_status_transition(self):
-        def create_execution(status):
-            execution = Execution(
-                id='e_id',
-                workflow_name='w_name',
-                status=status,
-                parameters={},
-                created_at=now,
-            )
-            return execution
-
-        valid_transitions = {
-            Execution.PENDING: [Execution.STARTED,
-                                Execution.CANCELLED,
-                                Execution.PENDING],
-            Execution.STARTED: [Execution.FAILED,
-                                Execution.TERMINATED,
-                                Execution.CANCELLED,
-                                Execution.CANCELLING,
-                                Execution.STARTED],
-            Execution.CANCELLING: [Execution.FAILED,
-                                   Execution.TERMINATED,
-                                   Execution.CANCELLED,
-                                   Execution.CANCELLING],
-            Execution.FAILED: [Execution.FAILED],
-            Execution.TERMINATED: [Execution.TERMINATED],
-            Execution.CANCELLED: [Execution.CANCELLED]
-        }
-
-        invalid_transitions = {
-            Execution.PENDING: [Execution.FAILED,
-                                Execution.TERMINATED,
-                                Execution.CANCELLING],
-            Execution.STARTED: [Execution.PENDING],
-            Execution.CANCELLING: [Execution.PENDING,
-                                   Execution.STARTED],
-            Execution.FAILED: [Execution.PENDING,
-                               Execution.STARTED,
-                               Execution.TERMINATED,
-                               Execution.CANCELLED,
-                               Execution.CANCELLING],
-            Execution.TERMINATED: [Execution.PENDING,
-                                   Execution.STARTED,
-                                   Execution.FAILED,
-                                   Execution.CANCELLED,
-                                   Execution.CANCELLING],
-            Execution.CANCELLED: [Execution.PENDING,
-                                  Execution.STARTED,
-                                  Execution.FAILED,
-                                  Execution.TERMINATED,
-                                  Execution.CANCELLING],
-        }
-
-        for current_status, valid_transitioned_statues in valid_transitions.items():
-            for transitioned_status in valid_transitioned_statues:
-                execution = create_execution(current_status)
-                execution.status = transitioned_status
-
-        for current_status, invalid_transitioned_statues in invalid_transitions.items():
-            for transitioned_status in invalid_transitioned_statues:
-                execution = create_execution(current_status)
-                with pytest.raises(ValueError):
-                    execution.status = transitioned_status
-
-
-class TestDeploymentUpdate(object):
-    @pytest.mark.parametrize(
-        'is_valid, created_at, deployment_plan, deployment_update_node_instances, '
-        'deployment_update_deployment, deployment_update_nodes, modified_entity_ids, state',
-        [
-            (False, m_cls, {}, {}, {}, [], {}, 'state'),
-            (False, now, m_cls, {}, {}, [], {}, 'state'),
-            (False, now, {}, m_cls, {}, [], {}, 'state'),
-            (False, now, {}, {}, m_cls, [], {}, 'state'),
-            (False, now, {}, {}, {}, m_cls, {}, 'state'),
-            (False, now, {}, {}, {}, [], m_cls, 'state'),
-            (False, now, {}, {}, {}, [], {}, m_cls),
-
-            (True, now, {}, {}, {}, [], {}, 'state'),
-            (True, now, {}, None, {}, [], {}, 'state'),
-            (True, now, {}, {}, None, [], {}, 'state'),
-            (True, now, {}, {}, {}, None, {}, 'state'),
-            (True, now, {}, {}, {}, [], None, 'state'),
-            (True, now, {}, {}, {}, [], {}, None),
-        ]
-    )
-    def test_deployment_update_model_creation(self, deployment_storage, is_valid, created_at,
-                                              deployment_plan, deployment_update_node_instances,
-                                              deployment_update_deployment, deployment_update_nodes,
-                                              modified_entity_ids, state):
-        deployment_update = _test_model(
-            is_valid=is_valid,
-            storage=deployment_storage,
-            model_name='deployment_update',
-            model_cls=DeploymentUpdate,
-            model_kwargs=dict(
-                deployment=deployment_storage.deployment.list()[0],
-                created_at=created_at,
-                deployment_plan=deployment_plan,
-                deployment_update_node_instances=deployment_update_node_instances,
-                deployment_update_deployment=deployment_update_deployment,
-                deployment_update_nodes=deployment_update_nodes,
-                modified_entity_ids=modified_entity_ids,
-                state=state,
-            ))
-        if is_valid:
-            assert deployment_update.deployment == deployment_storage.deployment.list()[0]
-
-
-class TestDeploymentUpdateStep(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, action, entity_id, entity_type',
-        [
-            (False, m_cls, 'id', DeploymentUpdateStep.ENTITY_TYPES.NODE),
-            (False, DeploymentUpdateStep.ACTION_TYPES.ADD, m_cls,
-             DeploymentUpdateStep.ENTITY_TYPES.NODE),
-            (False, DeploymentUpdateStep.ACTION_TYPES.ADD, 'id', m_cls),
-
-            (True, DeploymentUpdateStep.ACTION_TYPES.ADD, 'id',
-             DeploymentUpdateStep.ENTITY_TYPES.NODE)
-        ]
-    )
-    def test_deployment_update_step_model_creation(self, deployment_update_storage, is_valid,
-                                                   action, entity_id, entity_type):
-        deployment_update_step = _test_model(
-            is_valid=is_valid,
-            storage=deployment_update_storage,
-            model_name='deployment_update_step',
-            model_cls=DeploymentUpdateStep,
-            model_kwargs=dict(
-                deployment_update=deployment_update_storage.deployment_update.list()[0],
-                action=action,
-                entity_id=entity_id,
-                entity_type=entity_type
-            ))
-        if is_valid:
-            assert deployment_update_step.deployment_update == \
-                  deployment_update_storage.deployment_update.list()[0]
-
-    def test_deployment_update_step_order(self):
-        add_node = DeploymentUpdateStep(
-            id='add_step',
-            action='add',
-            entity_type='node',
-            entity_id='node_id')
-
-        modify_node = DeploymentUpdateStep(
-            id='modify_step',
-            action='modify',
-            entity_type='node',
-            entity_id='node_id')
-
-        remove_node = DeploymentUpdateStep(
-            id='remove_step',
-            action='remove',
-            entity_type='node',
-            entity_id='node_id')
-
-        for step in (add_node, modify_node, remove_node):
-            assert hash((step.id, step.entity_id)) == hash(step)
-
-        assert remove_node < modify_node < add_node
-        assert not remove_node > modify_node > add_node
-
-        add_rel = DeploymentUpdateStep(
-            id='add_step',
-            action='add',
-            entity_type='relationship',
-            entity_id='relationship_id')
-
-        remove_rel = DeploymentUpdateStep(
-            id='remove_step',
-            action='remove',
-            entity_type='relationship',
-            entity_id='relationship_id')
-
-        assert remove_rel < remove_node < add_node < add_rel
-        assert not add_node < None
-
-
-class TestDeploymentModification(object):
-    @pytest.mark.parametrize(
-        'is_valid, context, created_at, ended_at, modified_nodes, node_instances, status',
-        [
-            (False, m_cls, now, now, {}, {}, DeploymentModification.STARTED),
-            (False, {}, m_cls, now, {}, {}, DeploymentModification.STARTED),
-            (False, {}, now, m_cls, {}, {}, DeploymentModification.STARTED),
-            (False, {}, now, now, m_cls, {}, DeploymentModification.STARTED),
-            (False, {}, now, now, {}, m_cls, DeploymentModification.STARTED),
-            (False, {}, now, now, {}, {}, m_cls),
-
-            (True, {}, now, now, {}, {}, DeploymentModification.STARTED),
-            (True, {}, now, None, {}, {}, DeploymentModification.STARTED),
-            (True, {}, now, now, None, {}, DeploymentModification.STARTED),
-            (True, {}, now, now, {}, None, DeploymentModification.STARTED),
-        ]
-    )
-    def test_deployment_modification_model_creation(self, deployment_storage, is_valid, context,
-                                                    created_at, ended_at, modified_nodes,
-                                                    node_instances, status):
-        deployment_modification = _test_model(
-            is_valid=is_valid,
-            storage=deployment_storage,
-            model_name='deployment_modification',
-            model_cls=DeploymentModification,
-            model_kwargs=dict(
-                deployment=deployment_storage.deployment.list()[0],
-                context=context,
-                created_at=created_at,
-                ended_at=ended_at,
-                modified_nodes=modified_nodes,
-                node_instances=node_instances,
-                status=status,
-            ))
-        if is_valid:
-            assert deployment_modification.deployment == deployment_storage.deployment.list()[0]
-
-
-class TestNode(object):
-    @pytest.mark.parametrize(
-        'is_valid, name, deploy_number_of_instances, max_number_of_instances, '
-        'min_number_of_instances, number_of_instances, planned_number_of_instances, plugins, '
-        'properties, operations, type, type_hierarchy',
-        [
-            (False, m_cls, 1, 1, 1, 1, 1, [], {}, {}, 'type', []),
-            (False, 'name', m_cls, 1, 1, 1, 1, [], {}, {}, 'type', []),
-            (False, 'name', 1, m_cls, 1, 1, 1, [], {}, {}, 'type', []),
-            (False, 'name', 1, 1, m_cls, 1, 1, [], {}, {}, 'type', []),
-            (False, 'name', 1, 1, 1, m_cls, 1, [], {}, {}, 'type', []),
-            (False, 'name', 1, 1, 1, 1, m_cls, [], {}, {}, 'type', []),
-            (False, 'name', 1, 1, 1, 1, 1, m_cls, {}, {}, 'type', []),
-            (False, 'name', 1, 1, 1, 1, 1, [], m_cls, {}, 'type', []),
-            (False, 'name', 1, 1, 1, 1, 1, [], {}, m_cls, 'type', []),
-            (False, 'name', 1, 1, 1, 1, 1, [], {}, {}, m_cls, []),
-            (False, 'name', 1, 1, 1, 1, 1, [], {}, {}, 'type', m_cls),
-
-            (True, 'name', 1, 1, 1, 1, 1, [], {}, {}, 'type', []),
-            (True, 'name', 1, 1, 1, 1, 1, None, {}, {}, 'type', []),
-            (True, 'name', 1, 1, 1, 1, 1, [], None, {}, 'type', []),
-            (True, 'name', 1, 1, 1, 1, 1, [], {}, None, 'type', []),
-            (True, 'name', 1, 1, 1, 1, 1, [], {}, {}, 'type', None),
-        ]
-    )
-    def test_node_model_creation(self, deployment_storage, is_valid, name,
-                                 deploy_number_of_instances, max_number_of_instances,
-                                 min_number_of_instances, number_of_instances,
-                                 planned_number_of_instances, plugins,
-                                 properties, operations, type, type_hierarchy):
-        node = _test_model(
-            is_valid=is_valid,
-            storage=deployment_storage,
-            model_name='node',
-            model_cls=Node,
-            model_kwargs=dict(
-                name=name,
-                deploy_number_of_instances=deploy_number_of_instances,
-                max_number_of_instances=max_number_of_instances,
-                min_number_of_instances=min_number_of_instances,
-                number_of_instances=number_of_instances,
-                planned_number_of_instances=planned_number_of_instances,
-                plugins=plugins,
-                properties=properties,
-                operations=operations,
-                type=type,
-                type_hierarchy=type_hierarchy,
-                deployment=deployment_storage.deployment.list()[0]
-            ))
-        if is_valid:
-            assert node.deployment == deployment_storage.deployment.list()[0]
-
-
-class TestRelationship(object):
-    @pytest.mark.parametrize(
-        'is_valid, source_interfaces, source_operations, target_interfaces, target_operations, '
-        'type, type_hierarchy, properties',
-        [
-            (False, m_cls, {}, {}, {}, 'type', [], {}),
-            (False, {}, m_cls, {}, {}, 'type', [], {}),
-            (False, {}, {}, m_cls, {}, 'type', [], {}),
-            (False, {}, {}, {}, m_cls, 'type', [], {}),
-            (False, {}, {}, {}, {}, m_cls, [], {}),
-            (False, {}, {}, {}, {}, 'type', m_cls, {}),
-            (False, {}, {}, {}, {}, 'type', [], m_cls),
-
-            (True, {}, {}, {}, {}, 'type', [], {}),
-            (True, None, {}, {}, {}, 'type', [], {}),
-            (True, {}, {}, None, {}, 'type', [], {}),
-            (True, {}, {}, {}, {}, 'type', None, {}),
-            (True, {}, {}, {}, {}, 'type', [], None),
-        ]
-        )
-    def test_relationship_model_ceration(self, nodes_storage, is_valid, source_interfaces,
-                                         source_operations, target_interfaces, target_operations,
-                                         type, type_hierarchy, properties):
-        relationship = _test_model(
-            is_valid=is_valid,
-            storage=nodes_storage,
-            model_name='relationship',
-            model_cls=Relationship,
-            model_kwargs=dict(
-                source_node=nodes_storage.node.list()[1],
-                target_node=nodes_storage.node.list()[0],
-                source_interfaces=source_interfaces,
-                source_operations=source_operations,
-                target_interfaces=target_interfaces,
-                target_operations=target_operations,
-                type=type,
-                type_hierarchy=type_hierarchy,
-                properties=properties,
-            ))
-        if is_valid:
-            assert relationship.source_node == nodes_storage.node.list()[1]
-            assert relationship.target_node == nodes_storage.node.list()[0]
-
-
-class TestNodeInstance(object):
-    @pytest.mark.parametrize(
-        'is_valid, name, runtime_properties, scaling_groups, state, version',
-        [
-            (False, m_cls, {}, [], 'state', 1),
-            (False, 'name', m_cls, [], 'state', 1),
-            (False, 'name', {}, m_cls, 'state', 1),
-            (False, 'name', {}, [], m_cls, 1),
-            (False, m_cls, {}, [], 'state', m_cls),
-
-            (True, 'name', {}, [], 'state', 1),
-            (True, None, {}, [], 'state', 1),
-            (True, 'name', None, [], 'state', 1),
-            (True, 'name', {}, None, 'state', 1),
-            (True, 'name', {}, [], 'state', None),
-        ]
-    )
-    def test_node_instance_model_creation(self, node_storage, is_valid, name, runtime_properties,
-                                          scaling_groups, state, version):
-        node_instance = _test_model(
-            is_valid=is_valid,
-            storage=node_storage,
-            model_name='node_instance',
-            model_cls=NodeInstance,
-            model_kwargs=dict(
-                node=node_storage.node.list()[0],
-                name=name,
-                runtime_properties=runtime_properties,
-                scaling_groups=scaling_groups,
-                state=state,
-                version=version,
-            ))
-        if is_valid:
-            assert node_instance.node == node_storage.node.list()[0]
-            assert node_instance.deployment == node_storage.deployment.list()[0]
-
-
-class TestNodeInstanceIP(object):
-
-    ip = '1.1.1.1'
-
-    def test_ip_on_none_hosted_node_instance(self, deployment_storage):
-        node = self._node(deployment_storage, ip='not considered')
-        node_instance = self._node_instance(deployment_storage, node,
-                                            is_host=False,
-                                            ip='not considered')
-        assert node_instance.ip is None
-
-    def test_property_ip_on_host_node_instance(self, deployment_storage):
-        node = self._node(deployment_storage, ip=self.ip)
-        node_instance = self._node_instance(deployment_storage, node,
-                                            is_host=True,
-                                            ip=None)
-        assert node_instance.ip == self.ip
-
-    def test_runtime_property_ip_on_host_node_instance(self, deployment_storage):
-        node = self._node(deployment_storage, ip='not considered')
-        node_instance = self._node_instance(deployment_storage, node,
-                                            is_host=True,
-                                            ip=self.ip)
-        assert node_instance.ip == self.ip
-
-    def test_no_ip_configured_on_host_node_instance(self, deployment_storage):
-        node = self._node(deployment_storage, ip=None)
-        node_instance = self._node_instance(deployment_storage, node,
-                                            is_host=True,
-                                            ip=None)
-        assert node_instance.ip is None
-
-    def test_runtime_property_on_hosted_node_instance(self, deployment_storage):
-        host_node = self._node(deployment_storage, ip=None)
-        host_node_instance = self._node_instance(deployment_storage, host_node,
-                                                 is_host=True,
-                                                 ip=self.ip)
-        node = self._node(deployment_storage, ip=None)
-        node_instance = self._node_instance(deployment_storage, node,
-                                            is_host=False,
-                                            ip=None,
-                                            host_fk=host_node_instance.id)
-        assert node_instance.ip == self.ip
-
-    def _node(self, storage, ip):
-        kwargs = dict(
-            name='node',
-            deploy_number_of_instances=1,
-            max_number_of_instances=1,
-            min_number_of_instances=1,
-            number_of_instances=1,
-            planned_number_of_instances=1,
-            properties={},
-            type='',
-            deployment=storage.deployment.list()[0]
-        )
-        if ip:
-            kwargs['properties']['ip'] = ip
-        node = Node(**kwargs)
-        storage.node.put(node)
-        return node
-
-    def _node_instance(self, storage, node, is_host, ip, host_fk=None):
-        kwargs = dict(
-            name='node_instance',
-            node=node,
-            runtime_properties={},
-            state=''
-        )
-        if ip:
-            kwargs['runtime_properties']['ip'] = ip
-        if is_host:
-            kwargs['host_fk'] = 1
-        elif host_fk:
-            kwargs['host_fk'] = host_fk
-        node_instance = NodeInstance(**kwargs)
-        storage.node_instance.put(node_instance)
-        return node_instance
-
-
-class TestRelationshipInstance(object):
-    def test_relatiship_instance_model_creation(self, node_instances_storage):
-        relationship = mock.models.get_relationship(
-            source=node_instances_storage.node.get_by_name(mock.models.DEPENDENT_NODE_NAME),
-            target=node_instances_storage.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-        )
-        node_instances_storage.relationship.put(relationship)
-        node_instances = node_instances_storage.node_instance
-        source_node_instance = node_instances.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-        target_node_instance = node_instances.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-
-        relationship_instance = _test_model(
-            is_valid=True,
-            storage=node_instances_storage,
-            model_name='relationship_instance',
-            model_cls=RelationshipInstance,
-            model_kwargs=dict(
-                relationship=relationship,
-                source_node_instance=source_node_instance,
-                target_node_instance=target_node_instance
-            ))
-        assert relationship_instance.relationship == relationship
-        assert relationship_instance.source_node_instance == source_node_instance
-        assert relationship_instance.target_node_instance == target_node_instance
-
-
-class TestPlugin(object):
-    @pytest.mark.parametrize(
-        'is_valid, archive_name, distribution, distribution_release, '
-        'distribution_version, package_name, package_source, '
-        'package_version, supported_platform, supported_py_versions, uploaded_at, wheels',
-        [
-            (False, m_cls, 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', m_cls, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', m_cls, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', m_cls, 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', m_cls, 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', m_cls, 'pak_ver',
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', m_cls,
-             'sup_plat', [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', m_cls, [], now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', m_cls, now, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], m_cls, []),
-            (False, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], now, m_cls),
-
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', None, 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', None, 'dis_ver', 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', None, 'pak_name', 'pak_src', 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', None, 'pak_ver',
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src', None,
-             'sup_plat', [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', None, [], now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', None, now, []),
-            (True, 'arc_name', 'dis_name', 'dis_rel', 'dis_ver', 'pak_name', 'pak_src',
-             'pak_ver', 'sup_plat', [], now, []),
-        ]
-    )
-    def test_plugin_model_creation(self, empty_storage, is_valid, archive_name, distribution,
-                                   distribution_release, distribution_version,
-                                   package_name, package_source, package_version,
-                                   supported_platform, supported_py_versions, uploaded_at, wheels):
-        _test_model(is_valid=is_valid,
-                    storage=empty_storage,
-                    model_name='plugin',
-                    model_cls=Plugin,
-                    model_kwargs=dict(
-                        archive_name=archive_name,
-                        distribution=distribution,
-                        distribution_release=distribution_release,
-                        distribution_version=distribution_version,
-                        package_name=package_name,
-                        package_source=package_source,
-                        package_version=package_version,
-                        supported_platform=supported_platform,
-                        supported_py_versions=supported_py_versions,
-                        uploaded_at=uploaded_at,
-                        wheels=wheels,
-                    ))
-
-
-class TestTask(object):
-
-    @pytest.mark.parametrize(
-        'is_valid, status, due_at, started_at, ended_at, max_attempts, retry_count, '
-        'retry_interval, ignore_failure, name, operation_mapping, inputs, plugin_id',
-        [
-            (False, m_cls, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, m_cls, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, m_cls, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, m_cls, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, m_cls, 1, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, m_cls, 1, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, m_cls, True, 'name', 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, m_cls, 'map', {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', m_cls, {}, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', m_cls, '1'),
-            (False, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, m_cls),
-
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, None, now, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, None, now, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, None, 1, 1, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, None, 1, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, None, True, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, None, 'name', 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, None, 'map', {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', None, {}, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', None, '1'),
-            (True, Task.STARTED, now, now, now, 1, 1, 1, True, 'name', 'map', {}, None),
-        ]
-    )
-    def test_task_model_creation(self, execution_storage, is_valid, status, due_at, started_at,
-                                 ended_at, max_attempts, retry_count, retry_interval,
-                                 ignore_failure, name, operation_mapping, inputs, plugin_id):
-        task = _test_model(
-            is_valid=is_valid,
-            storage=execution_storage,
-            model_name='task',
-            model_cls=Task,
-            model_kwargs=dict(
-                status=status,
-                execution=execution_storage.execution.list()[0],
-                due_at=due_at,
-                started_at=started_at,
-                ended_at=ended_at,
-                max_attempts=max_attempts,
-                retry_count=retry_count,
-                retry_interval=retry_interval,
-                ignore_failure=ignore_failure,
-                name=name,
-                operation_mapping=operation_mapping,
-                inputs=inputs,
-                plugin_fk=plugin_id,
-            ))
-        if is_valid:
-            assert task.execution == execution_storage.execution.list()[0]
-            if task.plugin:
-                assert task.plugin == execution_storage.plugin.list()[0]
-
-    def test_task_max_attempts_validation(self):
-        def create_task(max_attempts):
-            Task(execution_fk='eid',
-                 name='name',
-                 operation_mapping='',
-                 inputs={},
-                 max_attempts=max_attempts)
-        create_task(max_attempts=1)
-        create_task(max_attempts=2)
-        create_task(max_attempts=Task.INFINITE_RETRIES)
-        with pytest.raises(ValueError):
-            create_task(max_attempts=0)
-        with pytest.raises(ValueError):
-            create_task(max_attempts=-2)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/55556793/tests/storage/test_structures.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_structures.py b/tests/storage/test_structures.py
index 0223a98..7908cb4 100644
--- a/tests/storage/test_structures.py
+++ b/tests/storage/test_structures.py
@@ -21,8 +21,8 @@ from aria.storage import (
     ModelStorage,
     sql_mapi,
     model,
-    type,
-    exceptions
+    exceptions,
+    type
 )
 
 from ..storage import get_sqlite_api_kwargs, release_sqlite_storage, structure
@@ -44,7 +44,7 @@ def storage():
 
 @pytest.fixture(scope='module', autouse=True)
 def module_cleanup():
-    model.DeclarativeBase.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
+    model.DB.metadata.remove(MockModel.__table__)  #pylint: disable=no-member
 
 
 @pytest.fixture
@@ -225,7 +225,7 @@ def test_relationship_model_ordering(context):
     flip_and_assert(target_node_instance, 'inbound')
 
 
-class StrictClass(model.DeclarativeBase, structure.ModelMixin):
+class StrictClass(model.DB, structure.ModelMixin):
     __tablename__ = 'strict_class'
 
     strict_dict = sqlalchemy.Column(type.StrictDict(basestring, basestring))


Mime
View raw message