Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 78E27200C59 for ; Mon, 17 Apr 2017 17:51:08 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 774CA160B9C; Mon, 17 Apr 2017 15:51:08 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 5146C160BAE for ; Mon, 17 Apr 2017 17:51:06 +0200 (CEST) Received: (qmail 42476 invoked by uid 500); 17 Apr 2017 15:51:05 -0000 Mailing-List: contact dev-help@ariatosca.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@ariatosca.incubator.apache.org Delivered-To: mailing list dev@ariatosca.incubator.apache.org Received: (qmail 42431 invoked by uid 99); 17 Apr 2017 15:51:05 -0000 Received: from pnap-us-west-generic-nat.apache.org (HELO spamd2-us-west.apache.org) (209.188.14.142) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 17 Apr 2017 15:51:05 +0000 Received: from localhost (localhost [127.0.0.1]) by spamd2-us-west.apache.org (ASF Mail Server at spamd2-us-west.apache.org) with ESMTP id E94D41B03F9 for ; Mon, 17 Apr 2017 15:51:04 +0000 (UTC) X-Virus-Scanned: Debian amavisd-new at spamd2-us-west.apache.org X-Spam-Flag: NO X-Spam-Score: -4.221 X-Spam-Level: X-Spam-Status: No, score=-4.221 tagged_above=-999 required=6.31 tests=[HK_RANDOM_FROM=0.001, KAM_ASCII_DIVIDERS=0.8, RCVD_IN_DNSWL_HI=-5, RCVD_IN_MSPIKE_H3=-0.01, RCVD_IN_MSPIKE_WL=-0.01, RP_MATCHES_RCVD=-0.001, SPF_PASS=-0.001] autolearn=disabled Received: from mx1-lw-us.apache.org ([10.40.0.8]) by localhost (spamd2-us-west.apache.org [10.40.0.9]) (amavisd-new, port 10024) with ESMTP id 0ZBa8YkNRqfT for ; Mon, 17 Apr 2017 15:50:48 +0000 (UTC) Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx1-lw-us.apache.org (ASF Mail Server at mx1-lw-us.apache.org) with SMTP id 1FA4462462 for ; Mon, 17 Apr 2017 15:50:40 +0000 (UTC) Received: (qmail 40409 invoked by uid 99); 17 Apr 2017 15:50:39 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 17 Apr 2017 15:50:39 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 69B4BE00B4; Mon, 17 Apr 2017 15:50:39 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: mxmrlv@apache.org To: dev@ariatosca.incubator.apache.org Date: Mon, 17 Apr 2017 15:50:44 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [06/12] incubator-ariatosca git commit: ARIA-48 cli archived-at: Mon, 17 Apr 2017 15:51:08 -0000 http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml new file mode 100644 index 0000000..d0b0854 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-1/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml new file mode 100644 index 0000000..d0b0854 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-2/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml new file mode 100644 index 0000000..daca041 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-3/inputs.yaml @@ -0,0 +1,2 @@ +storage_location: /mnt +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml new file mode 100644 index 0000000..18e457d --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-4/inputs.yaml @@ -0,0 +1,2 @@ +storage_snapshot_id: "snapshot-id" +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml new file mode 100644 index 0000000..d0b0854 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-5/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml b/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml new file mode 100644 index 0000000..d0b0854 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/block-storage-6/inputs.yaml @@ -0,0 +1,3 @@ +storage_snapshot_id: "snapshot-id" +storage_location: /mnt +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml new file mode 100644 index 0000000..c1ee88a --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/compute-1/inputs.yaml @@ -0,0 +1 @@ +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml new file mode 100644 index 0000000..5302bbf --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/multi-tier-1/inputs.yaml @@ -0,0 +1 @@ +my_cpus: 8 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml new file mode 100644 index 0000000..9687bb0 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/network-1/inputs.yaml @@ -0,0 +1 @@ +network_name: "network" \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml new file mode 100644 index 0000000..9687bb0 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/network-2/inputs.yaml @@ -0,0 +1 @@ +network_name: "network" \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml b/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml new file mode 100644 index 0000000..9687bb0 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/network-3/inputs.yaml @@ -0,0 +1 @@ +network_name: "network" \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml new file mode 100644 index 0000000..57f99a3 --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/object-storage-1/inputs.yaml @@ -0,0 +1 @@ +objectstore_name: "objectstore" \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml ---------------------------------------------------------------------- diff --git a/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml b/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml new file mode 100644 index 0000000..c1ee88a --- /dev/null +++ b/examples/tosca-simple-1.0/use-cases/software-component-1/inputs.yaml @@ -0,0 +1 @@ +cpus: 4 \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py ---------------------------------------------------------------------- diff --git a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py index 0e9177f..9576260 100644 --- a/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py +++ b/extensions/aria_extension_tosca/simple_v1_0/modeling/__init__.py @@ -19,6 +19,7 @@ Creates ARIA service template models based on the TOSCA presentation. Relies on many helper methods in the presentation classes. """ +import os import re from types import FunctionType from datetime import datetime @@ -41,7 +42,7 @@ IMPLEMENTATION_PREFIX_REGEX = re.compile(r'(?') def create_service_template_model(context): # pylint: disable=too-many-locals,too-many-branches model = ServiceTemplate(created_at=datetime.now(), - main_file_name=str(context.presentation.location)) + main_file_name=os.path.basename(str(context.presentation.location))) model.description = context.presentation.get('service_template', 'description', 'value') http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/requirements.in ---------------------------------------------------------------------- diff --git a/requirements.in b/requirements.in index bc27479..3950140 100644 --- a/requirements.in +++ b/requirements.in @@ -10,6 +10,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# In order to create the requirements.txt file, execute +# pip-compile --output-file requirements.txt requirements.in (pip-tools package is needed). + PyYAML<3.13 requests>=2.3.0, <2.14.0 networkx>=1.9, <1.10 # version 1.10 dropped support of python 2.6 @@ -25,6 +28,12 @@ SQLAlchemy>=1.1.0, <1.2 # version 1.2 dropped support of python 2.6 wagon==0.6.0 bottle>=0.12.0, <0.13 Fabric>=1.13.0, <1.14 +click>=4.1, < 5.0 +colorama>=0.3.3, < 0.3.5 +PrettyTable>=0.7,<0.8 +click_didyoumean==0.0.3 +backports.shutil_get_terminal_size==1.0.0 +logutils==0.3.4.1 # Since the tool we are using to generate our requirements.txt, `pip-tools`, # does not currently support conditional dependencies (;), we're adding our original http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/requirements.txt ---------------------------------------------------------------------- diff --git a/requirements.txt b/requirements.txt index 901aa75..3accaa3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,28 +4,30 @@ # # pip-compile --output-file requirements.txt requirements.in # - -# ---------------------------------------------------------------------------------- # Since the tool we are using to generate our requirements.txt, `pip-tools`, # does not currently support conditional dependencies (;), we're adding our original -# conditional dependencies here manually. +# conditional dependencies here as comments, and manually adding them to our +# generated requirements.txt file. # The relevant pip-tools issue: https://github.com/jazzband/pip-tools/issues/435 -importlib==1.0.4 ; python_version < '2.7' -ordereddict==1.1 ; python_version < '2.7' -total-ordering==0.1.0 ; python_version < '2.7' +importlib ; python_version < '2.7' +ordereddict ; python_version < '2.7' +total-ordering ; python_version < '2.7' # only one version on pypi # Fabric makes use of this library, but doesn't bring it :( pypiwin32==219 ; sys_platform == 'win32' # ---------------------------------------------------------------------------------- -appdirs==1.4.3 # via setuptools args==0.1.0 # via clint asn1crypto==0.22.0 # via cryptography +backports.shutil_get_terminal_size==1.0.0 blinker==1.4 bottle==0.12.13 cachecontrol[filecache]==0.12.1 cffi==1.10.0 # via cryptography +click==4.1 +click_didyoumean==0.0.3 clint==0.5.1 +colorama==0.3.4 cryptography==1.8.1 # via paramiko decorator==4.0.11 # via networkx enum34==1.1.6 # via cryptography @@ -35,11 +37,13 @@ ipaddress==1.0.18 # via cryptography jinja2==2.8.1 jsonpickle==0.9.4 lockfile==0.12.2 # via cachecontrol +logutils==0.3.4.1 markupsafe==1.0 # via jinja2 msgpack-python==0.4.8 # via cachecontrol networkx==1.9.1 -packaging==16.8 # via cryptography, setuptools +packaging==16.8 # via cryptography paramiko==2.1.2 # via fabric +prettytable==0.7.2 pyasn1==0.2.3 # via paramiko pycparser==2.17 # via cffi pyparsing==2.2.0 # via packaging @@ -49,7 +53,7 @@ retrying==1.3.3 ruamel.ordereddict==0.4.9 # via ruamel.yaml ruamel.yaml==0.11.15 shortuuid==0.5.0 -six==1.10.0 # via cryptography, packaging, retrying, setuptools +six==1.10.0 # via cryptography, packaging, retrying sqlalchemy==1.1.6 wagon==0.6.0 wheel==0.29.0 # via wagon http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/setup.py ---------------------------------------------------------------------- diff --git a/setup.py b/setup.py index 3d72ebc..b64453a 100644 --- a/setup.py +++ b/setup.py @@ -61,7 +61,7 @@ except IOError: extras_require = {} -console_scripts = ['aria = aria.cli.cli:main'] +console_scripts = ['aria = aria.cli.main:main'] def _generate_user_options(command): http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/.pylintrc ---------------------------------------------------------------------- diff --git a/tests/.pylintrc b/tests/.pylintrc index 06409e9..eead6e8 100644 --- a/tests/.pylintrc +++ b/tests/.pylintrc @@ -77,7 +77,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,no-self-use,missing-docstring,attribute-defined-outside-init,redefined-outer-name,import-error,redefined-variable-type,broad -except,protected-access,global-statement,too-many-locals,abstract-method,no-member +disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,redefined-builtin,no-self-use,missing-docstring,attribute-defined-outside-init,redefined-outer-name,import-error,redefined-variable-type,broad -except,protected-access,global-statement,too-many-locals,abstract-method,no-member,unused-argument [REPORTS] http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/__init__.py ---------------------------------------------------------------------- diff --git a/tests/cli/__init__.py b/tests/cli/__init__.py new file mode 100644 index 0000000..ae1e83e --- /dev/null +++ b/tests/cli/__init__.py @@ -0,0 +1,14 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/base_test.py ---------------------------------------------------------------------- diff --git a/tests/cli/base_test.py b/tests/cli/base_test.py new file mode 100644 index 0000000..9268f71 --- /dev/null +++ b/tests/cli/base_test.py @@ -0,0 +1,62 @@ +from StringIO import StringIO +import logging + +import pytest + +import tests.cli.runner as runner +from tests.cli.utils import setup_logger, MockStorage + + +@pytest.fixture +def mock_storage(): + return MockStorage() + + +@pytest.mark.usefixtures("redirect_logger") +class TestCliBase(object): + + @staticmethod + @pytest.fixture(scope="class") + def redirect_logger(): + + setup_logger(logger_name='aria.cli.main', + handlers=[logging.StreamHandler(TestCliBase._logger_output)], + logger_format='%(message)s') + yield + setup_logger(logger_name='aria.cli.main', + handlers=_default_logger_config['handlers'], + level=_default_logger_config['level']) + + _logger_output = StringIO() + + def invoke(self, command): + self._logger_output.truncate(0) + return runner.invoke(command) + + @property + def logger_output_string(self): + return self._logger_output.getvalue() + + +def assert_exception_raised(outcome, expected_exception, expected_msg=''): + assert isinstance(outcome.exception, expected_exception) + assert expected_msg == str(outcome.exception) + + +# This exists as I wanted to mocked a function using monkeypatch to return a function that raises an +# exception. I tried doing that using a lambda in-place, but this can't be accomplished in a trivial +# way it seems. So I wrote this silly function instead +def raise_exception(exception, msg=''): + + def inner(*args, **kwargs): + raise exception(msg) + + return inner + + +def get_default_logger_config(): + logger = logging.getLogger('aria.cli.main') + return {'handlers': logger.handlers, + 'level': logger.level} + +_default_logger_config = get_default_logger_config() http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/runner.py ---------------------------------------------------------------------- diff --git a/tests/cli/runner.py b/tests/cli/runner.py new file mode 100644 index 0000000..1682f95 --- /dev/null +++ b/tests/cli/runner.py @@ -0,0 +1,11 @@ +import aria.cli.commands as commands +import click.testing + + +def invoke(command_string): + command_list = command_string.split() + command, sub, args = command_list[0], command_list[1], command_list[2:] + runner = click.testing.CliRunner() + outcome = runner.invoke(getattr( + getattr(commands, command), sub), args) + return outcome http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_node_templates.py ---------------------------------------------------------------------- diff --git a/tests/cli/test_node_templates.py b/tests/cli/test_node_templates.py new file mode 100644 index 0000000..f0ad539 --- /dev/null +++ b/tests/cli/test_node_templates.py @@ -0,0 +1,101 @@ +from mock import ANY +import pytest + +from aria.cli.env import _Environment +from tests.cli.base_test import TestCliBase, mock_storage # pylint: disable=unused-import + + +class TestNodeTemplatesShow(TestCliBase): + + def test_no_properties_no_nodes(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates show 1') + assert 'Showing node template 1' in self.logger_output_string + assert 'Node template properties:' in self.logger_output_string + assert 'No properties' in self.logger_output_string + assert 'prop1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + assert 'No nodes' in self.logger_output_string + assert 'node1' not in self.logger_output_string + + def test_one_property_no_nodes(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates show 2') + assert 'Showing node template 2' in self.logger_output_string + assert 'Node template properties:' in self.logger_output_string + assert 'No properties' not in self.logger_output_string + assert 'prop1' in self.logger_output_string and 'value1' in self.logger_output_string + assert 'No nodes' in self.logger_output_string + assert 'node1' not in self.logger_output_string + + def test_no_properties_one_node(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates show 3') + assert 'Showing node template 3' in self.logger_output_string + assert 'Node template properties:' in self.logger_output_string + assert 'No properties' in self.logger_output_string + assert 'prop1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + assert 'No nodes' not in self.logger_output_string + assert 'node1' in self.logger_output_string + + def test_one_property_one_node(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates show 4') + assert 'Showing node template 4' in self.logger_output_string + assert 'Node template properties:' in self.logger_output_string + assert 'No properties' not in self.logger_output_string + assert 'prop1' in self.logger_output_string and 'value1' in self.logger_output_string + assert 'No nodes' not in self.logger_output_string + assert 'node1' in self.logger_output_string + + +class TestNodeTemplatesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_template_name', 'asc'), + ('', ' --descending', 'service_template_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates list -t test_st{sort_by}{order}'.format(sort_by=sort_by, + order=order)) + assert 'Listing node templates for service template test_st...' in self.logger_output_string + assert 'Listing all node templates...' not in self.logger_output_string + + node_templates_list = mock_storage.node_template.list + node_templates_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={'service_template': ANY}) + assert 'Node templates:' in self.logger_output_string + assert 'test_st' in self.logger_output_string + assert 'test_nt' in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_template_name', 'asc'), + ('', ' --descending', 'service_template_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_no_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('node_templates list{sort_by}{order}'.format(sort_by=sort_by, order=order)) + assert 'Listing all node templates...' in self.logger_output_string + assert 'Listing node templates for service template test_st...' not in \ + self.logger_output_string + + node_templates_list = mock_storage.node_template.list + node_templates_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={}) + assert 'Node templates:' in self.logger_output_string + assert 'test_st' in self.logger_output_string + assert 'test_nt' in self.logger_output_string http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_nodes.py ---------------------------------------------------------------------- diff --git a/tests/cli/test_nodes.py b/tests/cli/test_nodes.py new file mode 100644 index 0000000..9be97ca --- /dev/null +++ b/tests/cli/test_nodes.py @@ -0,0 +1,76 @@ +import pytest +from mock import ANY + +from aria.cli.env import _Environment +from tests.cli.base_test import TestCliBase, mock_storage # pylint: disable=unused-import + + +class TestNodesShow(TestCliBase): + + def test_no_attributes(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes show 1') + assert 'Showing node 1' in self.logger_output_string + assert 'Node:' in self.logger_output_string + assert 'Node attributes:' in self.logger_output_string + assert 'No attributes' in self.logger_output_string + assert 'attribute1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + + def test_one_attribute(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes show 2') + assert 'Showing node 2' in self.logger_output_string + assert 'Node:' in self.logger_output_string + assert 'Node attributes:' in self.logger_output_string + assert 'No attributes' not in self.logger_output_string + assert 'attribute1' in self.logger_output_string and 'value1' in self.logger_output_string + + +class TestNodesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_name', 'asc'), + ('', ' --descending', 'service_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_specified_service(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes list -s test_s{sort_by}{order}'.format(sort_by=sort_by, + order=order)) + assert 'Listing nodes for service test_s...' in self.logger_output_string + assert 'Listing all nodes...' not in self.logger_output_string + + nodes_list = mock_storage.node.list + nodes_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={'service': ANY}) + assert 'Nodes:' in self.logger_output_string + assert 'test_s' in self.logger_output_string + assert 'test_n' in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'service_name', 'asc'), + ('', ' --descending', 'service_name', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_no_specified_service(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('nodes list{sort_by}{order}'.format(sort_by=sort_by, + order=order)) + assert 'Listing nodes for service test_s...' not in self.logger_output_string + assert 'Listing all nodes...' in self.logger_output_string + + nodes_list = mock_storage.node.list + nodes_list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={}) + assert 'Nodes:' in self.logger_output_string + assert 'test_s' in self.logger_output_string + assert 'test_n' in self.logger_output_string http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_service_templates.py ---------------------------------------------------------------------- diff --git a/tests/cli/test_service_templates.py b/tests/cli/test_service_templates.py new file mode 100644 index 0000000..ef70c37 --- /dev/null +++ b/tests/cli/test_service_templates.py @@ -0,0 +1,174 @@ +import pytest + +from aria.cli import service_template_utils, csar +from aria.cli.env import _Environment +from aria.cli.exceptions import AriaCliError +from aria.core import Core +from aria.exceptions import AriaException +from aria.storage import exceptions as storage_exceptions +from tests.cli.base_test import TestCliBase, assert_exception_raised, raise_exception, mock_storage # pylint: disable=unused-import + + +class TestServiceTemplatesShow(TestCliBase): + + def test_show_no_services_no_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates show no_services_no_description') + + assert 'Showing service template no_services_no_description...' in self.logger_output_string + assert 'Description:' not in self.logger_output_string + assert 'Existing services:\n[]' in self.logger_output_string + + def test_show_no_services_yes_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates show no_services_yes_description') + + assert 'Showing service template no_services_yes_description...' in \ + self.logger_output_string + assert 'Description:\ntest_description' in self.logger_output_string + assert 'Existing services:\n[]' in self.logger_output_string + + def test_show_one_service_no_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates show one_service_no_description') + + assert 'Showing service template one_service_no_description...' in self.logger_output_string + assert 'Description:' not in self.logger_output_string + assert "Existing services:\n['test_s']" in self.logger_output_string + + def test_show_one_service_yes_description(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates show one_service_yes_description') + + assert 'Showing service template one_service_yes_description...' in \ + self.logger_output_string + assert 'Description:\ntest_description' in self.logger_output_string + assert "Existing services:\n['test_s']" in self.logger_output_string + + +class TestServiceTemplatesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'created_at', 'asc'), + ('', ' --descending', 'created_at', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_all_sorting_combinations(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates list{sort_by}{order}'.format(sort_by=sort_by, order=order)) + + mock_storage.service_template.list.assert_called_with( + sort={sort_by_in_output: order_in_output}) + assert 'Listing all service templates...' in self.logger_output_string + assert 'test_st' in self.logger_output_string + + +class TestServiceTemplatesStore(TestCliBase): + + def test_store_no_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(Core, 'create_service_template', mock_object) + monkeypatch.setattr(service_template_utils, 'get', mock_object) + self.invoke('service_templates store stubpath test_st') + assert 'Storing service template test_st...' in self.logger_output_string + assert 'Service template test_st stored' in self.logger_output_string + + def test_store_raises_exception_resulting_from_name_uniqueness(self, monkeypatch, mock_object): + + monkeypatch.setattr(service_template_utils, 'get', mock_object) + monkeypatch.setattr(Core, + 'create_service_template', + raise_exception(storage_exceptions.NotFoundError, + msg='UNIQUE constraint failed')) + + assert_exception_raised( + self.invoke('service_templates store stubpath test_st'), + expected_exception=AriaCliError, + expected_msg='Could not store service template `test_st`\n' + 'There already a exists a service template with the same name') + assert 'Storing service template test_st...' in self.logger_output_string + + def test_store_raises_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(service_template_utils, 'get', mock_object) + monkeypatch.setattr(Core, + 'create_service_template', + raise_exception(storage_exceptions.NotFoundError)) + + assert_exception_raised( + self.invoke('service_templates store stubpath test_st'), + expected_exception=AriaCliError) + assert 'Storing service template test_st...' in self.logger_output_string + + +class TestServiceTemplatesDelete(TestCliBase): + + def test_delete_no_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, 'delete_service_template', mock_object) + self.invoke('service_templates delete test_st') + assert 'Deleting service template test_st...' in self.logger_output_string + assert 'Service template test_st deleted' in self.logger_output_string + + def test_delete_raises_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, + 'delete_service_template', + raise_exception(storage_exceptions.NotFoundError)) + + assert_exception_raised( + self.invoke('service_templates delete test_st'), + expected_exception=AriaCliError, + expected_msg='') + assert 'Deleting service template test_st...' in self.logger_output_string + + +class TestServiceTemplatesInputs(TestCliBase): + + def test_inputs_existing_inputs(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates inputs with_inputs') + assert 'Showing inputs for service template with_inputs...' in self.logger_output_string + assert 'input1' in self.logger_output_string and 'value1' in self.logger_output_string + + def test_inputs_no_inputs(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('service_templates inputs without_inputs') + assert 'Showing inputs for service template without_inputs...' in self.logger_output_string + assert 'No inputs' in self.logger_output_string + + +class TestServiceTemplatesValidate(TestCliBase): + + def test_validate_no_exception(self, monkeypatch, mock_object): + monkeypatch.setattr(Core, 'validate_service_template', mock_object) + monkeypatch.setattr(service_template_utils, 'get', mock_object) + self.invoke('service_templates validate stubpath') + assert 'Validating service template: stubpath' in self.logger_output_string + assert 'Service template validated successfully' in self.logger_output_string + + def test_validate_raises_exception(self, monkeypatch, mock_object): + monkeypatch.setattr(Core, 'validate_service_template', raise_exception(AriaException)) + monkeypatch.setattr(service_template_utils, 'get', mock_object) + assert_exception_raised( + self.invoke('service_templates validate stubpath'), + expected_exception=AriaCliError) + assert 'Validating service template: stubpath' in self.logger_output_string + + +class TestServiceTemplatesCreateArchive(TestCliBase): + + def test_create_archive_successful(self, monkeypatch, mock_object): + monkeypatch.setattr(csar, 'write', mock_object) + self.invoke('service_templates create_archive stubpath stubdest') + assert 'Creating a csar archive' in self.logger_output_string + assert 'Csar archive created at stubdest' in self.logger_output_string http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/test_services.py ---------------------------------------------------------------------- diff --git a/tests/cli/test_services.py b/tests/cli/test_services.py new file mode 100644 index 0000000..116e449 --- /dev/null +++ b/tests/cli/test_services.py @@ -0,0 +1,178 @@ +import pytest +from mock import ANY +from aria.cli.exceptions import AriaCliError +from aria.cli.env import _Environment +from aria.core import Core +from aria.exceptions import (AriaException, DependentActiveExecutionsError, + DependentAvailableNodesError) +from aria.storage import exceptions as storage_exceptions +from tests.cli.base_test import TestCliBase, raise_exception, assert_exception_raised, mock_storage #pylint: disable=unused-import +from tests.mock.models import create_service, create_service_template + + +class TestServicesList(TestCliBase): + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'created_at', 'asc'), + ('', ' --descending', 'created_at', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_no_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services list{sort_by}{order}'.format(sort_by=sort_by, order=order)) + assert 'Listing all services...' in self.logger_output_string + assert 'Listing services for service template' not in self.logger_output_string + + mock_storage.service.list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={}) + assert 'Services:' in self.logger_output_string + assert 'test_st' in self.logger_output_string + assert 'test_s' in self.logger_output_string + + @pytest.mark.parametrize('sort_by, order, sort_by_in_output, order_in_output', [ + ('', '', 'created_at', 'asc'), + ('', ' --descending', 'created_at', 'desc'), + (' --sort-by name', '', 'name', 'asc'), + (' --sort-by name', ' --descending', 'name', 'desc') + ]) + def test_list_specified_service_template(self, monkeypatch, mock_storage, sort_by, order, + sort_by_in_output, order_in_output): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services list -t test_st{sort_by}{order}'.format(sort_by=sort_by, order=order)) + assert 'Listing services for service template test_st...' in self.logger_output_string + assert 'Listing all services...' not in self.logger_output_string + + mock_storage.service.list.assert_called_once_with(sort={sort_by_in_output: order_in_output}, + filters={'service_template': ANY}) + assert 'Services:' in self.logger_output_string + assert 'test_st' in self.logger_output_string + assert 'test_s' in self.logger_output_string + + +class TestServicesCreate(TestCliBase): + + def test_create_no_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + + test_st = create_service_template('test_st') + mock_object.return_value = create_service(test_st, 'test_s') + monkeypatch.setattr(Core, 'create_service', mock_object) + self.invoke('services create -t test_st test_s') + + assert 'Creating new service from service template test_st...' in self.logger_output_string + assert "Service created. The service's name is test_s" in self.logger_output_string + + def test_store_raises_storage_error_resulting_from_name_uniqueness(self, monkeypatch, + mock_object): + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, + 'create_service', + raise_exception(storage_exceptions.NotFoundError, + msg='UNIQUE constraint failed')) + assert_exception_raised( + self.invoke('services create -t test_st test_s'), + expected_exception=AriaCliError, + expected_msg='Could not store service `test_s`\n' + 'There already a exists a service with the same name') + + assert 'Creating new service from service template test_st...' in self.logger_output_string + assert "Service created. The service's name is test_s" not in self.logger_output_string + + def test_store_raises_other_storage_error(self, monkeypatch, mock_object): + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, + 'create_service', + raise_exception(storage_exceptions.NotFoundError)) + + assert_exception_raised( + self.invoke('services create -t test_st test_s'), + expected_exception=AriaCliError) + + assert 'Creating new service from service template test_st...' in self.logger_output_string + assert "Service created. The service's name is test_s" not in self.logger_output_string + + def test_store_raises_aria_exception(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + monkeypatch.setattr(Core, + 'create_service', + raise_exception(AriaException, msg='error creating service `test_s`')) + + assert_exception_raised( + self.invoke('services create -t with_inputs test_s'), + expected_exception=AriaCliError, + expected_msg='error creating service `test_s`') + + assert 'Creating new service from service template with_inputs...' in \ + self.logger_output_string + assert 'error creating service `test_s`' in self.logger_output_string + assert 'input1' in self.logger_output_string and 'value1' in self.logger_output_string + assert "Service created. The service's name is test_s" not in self.logger_output_string + + +class TestServicesDelete(TestCliBase): + + def test_delete_no_exception(self, monkeypatch, mock_object): + + monkeypatch.setattr(_Environment, 'model_storage', mock_object) + monkeypatch.setattr(Core, 'delete_service', mock_object) + self.invoke('services delete test_s') + assert 'Deleting service test_s...' in self.logger_output_string + assert 'Service test_s deleted' in self.logger_output_string + + def test_delete_active_execution_error(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + assert_exception_raised( + self.invoke('services delete service_with_active_executions'), + expected_exception=DependentActiveExecutionsError, + expected_msg="Can't delete service test_s - there is an active " + "execution for this service. Active execution id: 1" + ) + assert 'Deleting service service_with_active_executions...' in self.logger_output_string + + def test_delete_available_nodes_error(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + assert_exception_raised( + self.invoke('services delete service_with_available_nodes'), + expected_exception=DependentAvailableNodesError, + expected_msg="Can't delete service test_s - " + "there are available nodes for this service. Available node ids: 1" + ) + assert 'Deleting service service_with_available_nodes...' in self.logger_output_string + + def test_delete_available_nodes_error_with_force(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services delete service_with_available_nodes --force') + + assert mock_storage.service.delete.call_count == 1 + assert 'Deleting service service_with_available_nodes...' in self.logger_output_string + assert 'Service service_with_available_nodes deleted' in self.logger_output_string + +class TestServicesOutputs(TestCliBase): + pass + + +class TestServicesInputs(TestCliBase): + + def test_inputs_no_inputs(self, monkeypatch, mock_storage): + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services inputs service_with_no_inputs') + + assert 'Showing inputs for service service_with_no_inputs...' in self.logger_output_string + assert 'No inputs' in self.logger_output_string + assert 'input1' not in self.logger_output_string + assert 'value1' not in self.logger_output_string + + def test_inputs_one_input(self, monkeypatch, mock_storage): + + monkeypatch.setattr(_Environment, 'model_storage', mock_storage) + self.invoke('services inputs service_with_one_input') + + assert 'Showing inputs for service service_with_one_input...' in self.logger_output_string + assert 'input1' in self.logger_output_string + assert 'value1' in self.logger_output_string + assert 'No inputs' not in self.logger_output_string http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/cli/utils.py ---------------------------------------------------------------------- diff --git a/tests/cli/utils.py b/tests/cli/utils.py new file mode 100644 index 0000000..20fdb90 --- /dev/null +++ b/tests/cli/utils.py @@ -0,0 +1,175 @@ +import logging +from aria.modeling import models +from mock import MagicMock + +from tests.mock import models as mock_models + + +def setup_logger(logger_name, + level=logging.INFO, + handlers=None, + remove_existing_handlers=True, + logger_format=None, + propagate=True): + """ + :param logger_name: Name of the logger. + :param level: Level for the logger (not for specific handler). + :param handlers: An optional list of handlers (formatter will be + overridden); If None, only a StreamHandler for + sys.stdout will be used. + :param remove_existing_handlers: Determines whether to remove existing + handlers before adding new ones + :param logger_format: the format this logger will have. + :param propagate: propagate the message the parent logger. + :return: A logger instance. + :rtype: logging.Logger + """ + + logger = logging.getLogger(logger_name) + + if remove_existing_handlers: + for handler in logger.handlers: + logger.removeHandler(handler) + + for handler in handlers: + if logger_format: + formatter = logging.Formatter(fmt=logger_format) + handler.setFormatter(formatter) + logger.addHandler(handler) + + logger.setLevel(level) + if not propagate: + logger.propagate = False + + return logger + + +class MockStorage(object): + + def __init__(self): + self.service_template = MockServiceTemplateStorage() + self.service = MockServiceStorage() + self.node_template = MockNodeTemplateStorage() + self.node = MockNodeStorage() + + +class MockServiceTemplateStorage(object): + + def __init__(self): + self.list = MagicMock(return_value=[mock_models.create_service_template('test_st')]) + + @staticmethod + def get_by_name(name): + st = mock_models.create_service_template('test_st') + if name == 'no_services_no_description': + pass + elif name == 'no_services_yes_description': + st.description = 'test_description' + elif name == 'one_service_no_description': + service = mock_models.create_service(st, 'test_s') + st.services = [service] + elif name == 'one_service_yes_description': + service = mock_models.create_service(st, 'test_s') + st.description = 'test_description' + st.services = [service] + elif name == 'with_inputs': + input = mock_models.create_parameter(name='input1', value='value1') + st.inputs = {'input1': input} + elif name == 'without_inputs': + st.inputs = {} + elif name == 'one_service': + service = mock_models.create_service(st, 'test_s') + st.services = [service] + return st + + +class MockServiceStorage(object): + + def __init__(self): + self.st = mock_models.create_service_template('test_st') + self.list = MagicMock(return_value=[mock_models.create_service(self.st, 'test_s')]) + self.delete = MagicMock() + + @staticmethod + def get(id): + test_st = mock_models.create_service_template('test_st') + test_s = mock_models.create_service(test_st, 'test_s') + if id == '1': + execution = mock_models.create_execution(test_s, status=models.Execution.STARTED) + execution.id = '1' + test_s.executions = [execution] + elif id == '2': + node_template = mock_models.create_node_template(service_template=test_st) + node = mock_models.create_node(name='test_node', + dependency_node_template=node_template, + service=test_s, + state=models.Node.STARTED) + node.id = '1' + return test_s + + @staticmethod + def get_by_name(name): + test_st = mock_models.create_service_template('test_st') + test_s = mock_models.create_service(test_st, 'test_s') + if name == 'service_with_active_executions': + m = MagicMock() + m.id = '1' + return m + elif name == 'service_with_available_nodes': + m = MagicMock() + m.id = '2' + return m + elif name == 'service_with_no_inputs': + pass + elif name == 'service_with_one_input': + input = mock_models.create_parameter(name='input1', value='value1') + test_s.inputs = {'input1': input} + + return test_s + + +class MockNodeTemplateStorage(object): + + def __init__(self): + self.st = mock_models.create_service_template('test_st') + self.list = MagicMock(return_value=[mock_models.create_node_template(self.st, 'test_nt')]) + + + @staticmethod + def get(id): + st = mock_models.create_service_template('test_st') + s = mock_models.create_service(st, 'test_s') + nt = mock_models.create_node_template(service_template=st, name='test_nt') + if id == '1': + pass + elif id == '2': + prop1 = mock_models.create_parameter('prop1', 'value1') + nt.properties = {'prop1': prop1} + elif id == '3': + mock_models.create_node('node1', nt, s) + elif id == '4': + prop1 = mock_models.create_parameter('prop1', 'value1') + nt.properties = {'prop1': prop1} + mock_models.create_node('node1', nt, s) + return nt + + +class MockNodeStorage(object): + + def __init__(self): + self.st = mock_models.create_service_template('test_st') + self.s = mock_models.create_service(self.st, 'test_s') + self.nt = mock_models.create_node_template(service_template=self.st, name='test_nt') + self.list = MagicMock(return_value=[mock_models.create_node('test_n', self.nt, self.s)]) + + @staticmethod + def get(id): + st = mock_models.create_service_template('test_st') + s = mock_models.create_service(st, 'test_s') + nt = mock_models.create_node_template(service_template=st, name='test_nt') + n = mock_models.create_node('test_n', nt, s) + if id == '1': + pass + elif id == '2': + n.runtime_properties = {'attribute1': 'value1'} + return n http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/conftest.py ---------------------------------------------------------------------- diff --git a/tests/conftest.py b/tests/conftest.py index c501eeb..8f2c273 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,6 +18,7 @@ import logging import pytest import aria +from aria import logger @pytest.fixture(scope='session', autouse=True) @@ -37,11 +38,10 @@ def logging_handler_cleanup(request): :return: """ def clear_logging_handlers(): - logged_ctx_names = [ - aria.orchestrator.context.workflow.WorkflowContext.__name__, - aria.orchestrator.context.operation.NodeOperationContext.__name__, - aria.orchestrator.context.operation.RelationshipOperationContext.__name__ - ] - for logger_name in logged_ctx_names: - logging.getLogger(logger_name).handlers = [] + logging.getLogger(logger.TASK_LOGGER_NAME).handlers = [] request.addfinalizer(clear_logging_handlers) + + +@pytest.fixture +def mock_object(mocker): + return mocker.MagicMock() http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/end2end/test_orchestrator.py ---------------------------------------------------------------------- diff --git a/tests/end2end/test_orchestrator.py b/tests/end2end/test_orchestrator.py deleted file mode 100644 index 4dfca44..0000000 --- a/tests/end2end/test_orchestrator.py +++ /dev/null @@ -1,63 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys -import os - -from aria.orchestrator.runner import Runner -from aria.orchestrator.workflows.builtin import BUILTIN_WORKFLOWS -from aria.utils.imports import import_fullname -from aria.utils.collections import OrderedDict -from aria.cli.dry import convert_to_dry - -from tests.parser.service_templates import consume_node_cellar - - -WORKFLOW_POLICY_INTERNAL_PROPERTIES = ('implementation', 'dependencies') - - -def test_install(): - _workflow('install') - - -def test_custom(): - _workflow('maintenance_on') - - -def _workflow(workflow_name): - context, _ = consume_node_cellar() - - convert_to_dry(context.modeling.instance) - - # TODO: this logic will eventually stabilize and be part of the ARIA API, - # likely somewhere in aria.orchestrator.workflows - if workflow_name in BUILTIN_WORKFLOWS: - workflow_fn = import_fullname('aria.orchestrator.workflows.builtin.' + workflow_name) - inputs = {} - else: - workflow = context.modeling.instance.policies[workflow_name] - sys.path.append(os.path.dirname(str(context.presentation.location))) - workflow_fn = import_fullname(workflow.properties['implementation'].value) - inputs = OrderedDict([ - (k, v.value) for k, v in workflow.properties.iteritems() - if k not in WORKFLOW_POLICY_INTERNAL_PROPERTIES - ]) - - def _initialize_storage(model_storage): - context.modeling.store(model_storage) - - runner = Runner(workflow_name, workflow_fn, inputs, _initialize_storage, - lambda: context.modeling.instance.id) - runner.run() http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/end2end/test_tosca_simple_v1_0.py ---------------------------------------------------------------------- diff --git a/tests/end2end/test_tosca_simple_v1_0.py b/tests/end2end/test_tosca_simple_v1_0.py deleted file mode 100644 index 4658fc3..0000000 --- a/tests/end2end/test_tosca_simple_v1_0.py +++ /dev/null @@ -1,112 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from tests.parser.service_templates import (consume_use_case, consume_node_cellar) - - -# Use Cases - -def test_use_case_compute_1(): - consume_use_case('compute-1', 'instance') - - -def test_use_case_software_component_1(): - consume_use_case('software-component-1', 'instance') - - -def test_use_case_block_storage_1(): - consume_use_case('block-storage-1', 'instance') - - -def test_use_case_block_storage_2(): - consume_use_case('block-storage-2', 'instance') - - -def test_use_case_block_storage_3(): - consume_use_case('block-storage-3', 'instance') - - -def test_use_case_block_storage_4(): - consume_use_case('block-storage-4', 'instance') - - -def test_use_case_block_storage_5(): - consume_use_case('block-storage-5', 'instance') - - -def test_use_case_block_storage_6(): - consume_use_case('block-storage-6', 'instance') - - -def test_use_case_object_storage_1(): - consume_use_case('object-storage-1', 'instance') - - -def test_use_case_network_1(): - consume_use_case('network-1', 'instance') - - -def test_use_case_network_2(): - consume_use_case('network-2', 'instance') - - -def test_use_case_network_3(): - consume_use_case('network-3', 'instance') - - -def test_use_case_network_4(): - consume_use_case('network-4', 'instance') - - -def test_use_case_webserver_dbms_1(): - consume_use_case('webserver-dbms-1', 'template') - - -def test_use_case_webserver_dbms_2(): - consume_use_case('webserver-dbms-2', 'instance') - - -def test_use_case_multi_tier_1(): - consume_use_case('multi-tier-1', 'instance') - - -def test_use_case_container_1(): - consume_use_case('container-1', 'template') - - -# NodeCellar - -def test_node_cellar_validation(): - consume_node_cellar('validate') - - -def test_node_cellar_validation_no_cache(): - consume_node_cellar('validate', False) - - -def test_node_cellar_presentation(): - consume_node_cellar('presentation') - - -def test_node_cellar_model(): - consume_node_cellar('template') - - -def test_node_cellar_types(): - consume_node_cellar('types') - - -def test_node_cellar_instance(): - consume_node_cellar('instance') http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/fixtures.py ---------------------------------------------------------------------- diff --git a/tests/fixtures.py b/tests/fixtures.py new file mode 100644 index 0000000..3b1b9b5 --- /dev/null +++ b/tests/fixtures.py @@ -0,0 +1,70 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import shutil + +import pytest + +from aria import ( + application_model_storage, + application_resource_storage +) +from aria.orchestrator import plugin +from aria.storage import ( + sql_mapi, + filesystem_rapi +) + +from . import storage + + +@pytest.fixture +def inmemory_model(): + model = application_model_storage(sql_mapi.SQLAlchemyModelAPI, + initiator=storage.init_inmemory_model_storage) + yield model + storage.release_sqlite_storage(model) + + +@pytest.fixture +def fs_model(tmpdir): + result = application_model_storage(sql_mapi.SQLAlchemyModelAPI, + initiator_kwargs=dict(base_dir=str(tmpdir)), + initiator=sql_mapi.init_storage) + yield result + storage.release_sqlite_storage(result) + + +@pytest.fixture +def resource_storage(tmpdir): + result = tmpdir.join('resources') + result.mkdir() + resource_storage = application_resource_storage( + filesystem_rapi.FileSystemResourceAPI, + api_kwargs=dict(directory=str(result))) + yield resource_storage + shutil.rmtree(str(result)) + + +@pytest.fixture +def plugins_dir(tmpdir): + result = tmpdir.join('plugins') + result.mkdir() + return str(result) + + +@pytest.fixture +def plugin_manager(model, plugins_dir): + return plugin.PluginManager(model=model, plugins_dir=plugins_dir) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/mock/context.py ---------------------------------------------------------------------- diff --git a/tests/mock/context.py b/tests/mock/context.py index f943d7e..ac0a8a7 100644 --- a/tests/mock/context.py +++ b/tests/mock/context.py @@ -39,12 +39,17 @@ def simple(tmpdir, inmemory=False, context_kwargs=None, topology=None): api_kwargs=dict(directory=os.path.join(tmpdir, 'resources')) ) + service_id = topology(model_storage) + execution = models.create_execution(model_storage.service.get(service_id)) + model_storage.execution.put(execution) + final_kwargs = dict( name='simple_context', model_storage=model_storage, resource_storage=resource_storage, - service_id=topology(model_storage), + service_id=service_id, workflow_name=models.WORKFLOW_NAME, + execution_id=execution.id, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL ) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/mock/models.py ---------------------------------------------------------------------- diff --git a/tests/mock/models.py b/tests/mock/models.py index 1d29e2d..38c2b28 100644 --- a/tests/mock/models.py +++ b/tests/mock/models.py @@ -39,8 +39,8 @@ from aria.orchestrator.workflows.builtin.workflows import ( SERVICE_NAME = 'test_service_name' SERVICE_TEMPLATE_NAME = 'test_service_template_name' +NODE_TEMPLATE_NAME = 'test_node_template' WORKFLOW_NAME = 'test_workflow_name' -EXECUTION_NAME = 'test_execution_name' TASK_RETRY_INTERVAL = 1 TASK_MAX_ATTEMPTS = 1 @@ -81,6 +81,33 @@ def create_service(service_template, name=SERVICE_NAME): ) +def create_node_template(service_template, + name=NODE_TEMPLATE_NAME, + type=models.Type(variant='node', name='test_node_type'), + capability_templates=None, + requirement_templates=None, + interface_templates=None, + default_instances=1, + min_instances=1, + max_instances=1): + capability_templates = capability_templates or {} + requirement_templates = requirement_templates or [] + interface_templates = interface_templates or {} + node_template = models.NodeTemplate( + name=name, + type=type, + capability_templates=capability_templates, + requirement_templates=requirement_templates, + interface_templates=interface_templates, + default_instances=default_instances, + min_instances=min_instances, + max_instances=max_instances, + service_template=service_template) + + service_template.node_templates[node_template.name] = node_template + return node_template + + def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPLATE_NAME): node_type = service_template.node_types.get_descendant('test_node_type') capability_type = service_template.capability_types.get_descendant('test_capability_type') @@ -89,18 +116,12 @@ def create_dependency_node_template(service_template, name=DEPENDENCY_NODE_TEMPL name='capability', type=capability_type ) - - node_template = models.NodeTemplate( + return create_node_template( + service_template=service_template, name=name, type=node_type, - capability_templates=_dictify(capability_template), - default_instances=1, - min_instances=1, - max_instances=1, - service_template=service_template + capability_templates=_dictify(capability_template) ) - service_template.node_templates[node_template.name] = node_template - return node_template def create_dependent_node_template( @@ -111,29 +132,26 @@ def create_dependent_node_template( name='requirement', target_node_template=dependency_node_template ) - - node_template = models.NodeTemplate( + return create_node_template( + service_template=service_template, name=name, type=the_type, - default_instances=1, - min_instances=1, - max_instances=1, interface_templates=_dictify(get_standard_interface_template(service_template)), requirement_templates=[requirement_template], - service_template=service_template ) - service_template.node_templates[node_template.name] = node_template - return node_template -def create_node(name, dependency_node_template, service): +def create_node(name, dependency_node_template, service, state=models.Node.INITIAL, + runtime_properties=None): + runtime_properties = runtime_properties or {} + # tmp_runtime_properties = {'ip': '1.1.1.1'} node = models.Node( name=name, type=dependency_node_template.type, - runtime_properties={'ip': '1.1.1.1'}, + runtime_properties=runtime_properties, version=None, node_template=dependency_node_template, - state=models.Node.INITIAL, + state=state, scaling_groups=[], service=service, interfaces=get_standard_interface(service), @@ -168,6 +186,12 @@ def create_interface_template(service_template, interface_name, operation_name, def create_interface(service, interface_name, operation_name, operation_kwargs=None, interface_kwargs=None): the_type = service.service_template.interface_types.get_descendant('test_interface_type') + + if operation_kwargs and operation_kwargs.get('inputs'): + operation_kwargs['inputs'] = dict( + (input_name, models.Parameter.wrap(input_name, input_value)) + for input_name, input_value in operation_kwargs['inputs'].iteritems()) + operation = models.Operation( name=operation_name, **(operation_kwargs or {}) @@ -180,13 +204,14 @@ def create_interface(service, interface_name, operation_name, operation_kwargs=N ) -def create_execution(service): +def create_execution(service, status=models.Execution.PENDING): return models.Execution( service=service, - status=models.Execution.STARTED, + status=status, workflow_name=WORKFLOW_NAME, + created_at=datetime.utcnow(), started_at=datetime.utcnow(), - parameters=None + inputs={} ) @@ -214,6 +239,11 @@ def create_plugin_specification(name='test_plugin', version='0.1'): ) +def create_parameter(name, value): + p = models.Parameter() + return p.wrap(name, value) + + def _dictify(item): return dict(((item.name, item),)) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/mock/workflow.py ---------------------------------------------------------------------- diff --git a/tests/mock/workflow.py b/tests/mock/workflow.py new file mode 100644 index 0000000..b12b9fa --- /dev/null +++ b/tests/mock/workflow.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from aria.orchestrator.decorators import workflow + + +@workflow +def mock_workflow(graph, ctx, output_path=None, **kwargs): # pylint: disable=unused-argument + if output_path: + # writes call arguments to the specified output file + with open(output_path, 'w') as f: + json.dump(kwargs, f) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/modeling/test_models.py ---------------------------------------------------------------------- diff --git a/tests/modeling/test_models.py b/tests/modeling/test_models.py index bd4eba4..d91249a 100644 --- a/tests/modeling/test_models.py +++ b/tests/modeling/test_models.py @@ -180,7 +180,7 @@ class TestServiceTemplate(object): @pytest.mark.parametrize( 'is_valid, description, created_at, updated_at, main_file_name', [ - (False, {}, now, now, '/path'), + (False, [], now, now, '/path'), (False, 'description', 'error', now, '/path'), (False, 'description', now, 'error', '/path'), (False, 'description', now, now, {}), @@ -253,7 +253,7 @@ class TestService(object): class TestExecution(object): @pytest.mark.parametrize( - 'is_valid, created_at, started_at, ended_at, error, is_system_workflow, parameters, ' + 'is_valid, created_at, started_at, ended_at, error, is_system_workflow, inputs, ' 'status, workflow_name', [ (False, m_cls, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'), @@ -268,11 +268,11 @@ class TestExecution(object): (True, now, None, now, 'error', False, {}, Execution.STARTED, 'wf_name'), (True, now, now, None, 'error', False, {}, Execution.STARTED, 'wf_name'), (True, now, now, now, None, False, {}, Execution.STARTED, 'wf_name'), - (True, now, now, now, 'error', False, None, Execution.STARTED, 'wf_name'), + (True, now, now, now, 'error', False, {}, Execution.STARTED, 'wf_name'), ] ) def test_execution_model_creation(self, service_storage, is_valid, created_at, started_at, - ended_at, error, is_system_workflow, parameters, status, + ended_at, error, is_system_workflow, inputs, status, workflow_name): execution = _test_model( is_valid=is_valid, @@ -285,7 +285,7 @@ class TestExecution(object): ended_at=ended_at, error=error, is_system_workflow=is_system_workflow, - parameters=parameters, + inputs=inputs, status=status, workflow_name=workflow_name, )) @@ -299,7 +299,7 @@ class TestExecution(object): id='e_id', workflow_name='w_name', status=status, - parameters={}, + inputs={}, created_at=now, ) return execution http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_operation.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_operation.py b/tests/orchestrator/context/test_operation.py index af8b454..c399474 100644 --- a/tests/orchestrator/context/test_operation.py +++ b/tests/orchestrator/context/test_operation.py @@ -69,16 +69,17 @@ def test_node_operation_task_execution(ctx, thread_executor): interface_name = 'Standard' operation_name = 'create' + inputs = {'putput': True} node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) interface = mock.models.create_interface( node.service, interface_name, operation_name, - operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface ctx.model.node.update(node) - inputs = {'putput': True} @workflow def basic_workflow(graph, **_): @@ -124,17 +125,18 @@ def test_relationship_operation_task_execution(ctx, thread_executor): interface_name = 'Configure' operation_name = 'post_configure' + inputs = {'putput': True} relationship = ctx.model.relationship.list()[0] interface = mock.models.create_interface( relationship.source_node.service, interface_name, operation_name, - operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__)), + operation_kwargs=dict(implementation=op_path(basic_operation, module_path=__name__), + inputs=inputs), ) relationship.interfaces[interface.name] = interface ctx.model.relationship.update(relationship) - inputs = {'putput': True} @workflow def basic_workflow(graph, **_): @@ -231,21 +233,21 @@ def test_plugin_workdir(ctx, thread_executor, tmpdir): plugin = mock.models.create_plugin() ctx.model.plugin.put(plugin) node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + filename = 'test_file' + content = 'file content' + inputs = {'filename': filename, 'content': content} interface = mock.models.create_interface( node.service, interface_name, operation_name, operation_kwargs=dict( implementation='{0}.{1}'.format(__name__, _test_plugin_workdir.__name__), - plugin=plugin) + plugin=plugin, + inputs=inputs) ) node.interfaces[interface.name] = interface ctx.model.node.update(node) - filename = 'test_file' - content = 'file content' - inputs = {'filename': filename, 'content': content} - @workflow def basic_workflow(graph, **_): graph.add_tasks(api.task.OperationTask.for_node(node=node, @@ -277,21 +279,22 @@ def test_node_operation_logging(ctx, executor): interface_name, operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0] node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME) + + inputs = { + 'op_start': 'op_start', + 'op_end': 'op_end', + } interface = mock.models.create_interface( node.service, interface_name, operation_name, operation_kwargs=dict( - implementation=op_path(logged_operation, module_path=__name__)) + implementation=op_path(logged_operation, module_path=__name__), + inputs=inputs) ) node.interfaces[interface.name] = interface ctx.model.node.update(node) - inputs = { - 'op_start': 'op_start', - 'op_end': 'op_end', - } - @workflow def basic_workflow(graph, **_): graph.add_tasks( @@ -311,20 +314,20 @@ def test_relationship_operation_logging(ctx, executor): interface_name, operation_name = mock.operations.RELATIONSHIP_OPERATIONS_INSTALL[0] relationship = ctx.model.relationship.list()[0] + inputs = { + 'op_start': 'op_start', + 'op_end': 'op_end', + } interface = mock.models.create_interface( relationship.source_node.service, interface_name, operation_name, - operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(logged_operation, module_path=__name__), + inputs=inputs) ) relationship.interfaces[interface.name] = interface ctx.model.relationship.update(relationship) - inputs = { - 'op_start': 'op_start', - 'op_end': 'op_end', - } - @workflow def basic_workflow(graph, **_): graph.add_tasks( http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_resource_render.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_resource_render.py b/tests/orchestrator/context/test_resource_render.py index 696e9b3..8249086 100644 --- a/tests/orchestrator/context/test_resource_render.py +++ b/tests/orchestrator/context/test_resource_render.py @@ -64,9 +64,9 @@ def resources(tmpdir, ctx): implicit_ctx_template_path.write(_IMPLICIT_CTX_TEMPLATE) variables_template_path = tmpdir.join(_VARIABLES_TEMPLATE_PATH) variables_template_path.write(_VARIABLES_TEMPLATE) - ctx.resource.deployment.upload(entry_id='1', - source=str(implicit_ctx_template_path), - path=_IMPLICIT_CTX_TEMPLATE_PATH) - ctx.resource.deployment.upload(entry_id='1', - source=str(variables_template_path), - path=_VARIABLES_TEMPLATE_PATH) + ctx.resource.service.upload(entry_id='1', + source=str(implicit_ctx_template_path), + path=_IMPLICIT_CTX_TEMPLATE_PATH) + ctx.resource.service.upload(entry_id='1', + source=str(variables_template_path), + path=_VARIABLES_TEMPLATE_PATH) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_serialize.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_serialize.py b/tests/orchestrator/context/test_serialize.py index 8b809b3..f4acc36 100644 --- a/tests/orchestrator/context/test_serialize.py +++ b/tests/orchestrator/context/test_serialize.py @@ -15,8 +15,6 @@ import pytest -import aria -from aria.storage import sql_mapi from aria.orchestrator.workflows import api from aria.orchestrator.workflows.core import engine from aria.orchestrator.workflows.executor import process @@ -34,7 +32,7 @@ def test_serialize_operation_context(context, executor, tmpdir): test_file = tmpdir.join(TEST_FILE_NAME) test_file.write(TEST_FILE_CONTENT) resource = context.resource - resource.blueprint.upload(TEST_FILE_ENTRY_ID, str(test_file)) + resource.service_template.upload(TEST_FILE_ENTRY_ID, str(test_file)) graph = _mock_workflow(ctx=context) # pylint: disable=no-value-for-parameter eng = engine.Engine(executor=executor, workflow_context=context, tasks_graph=graph) eng.execute() @@ -72,7 +70,7 @@ def _mock_operation(ctx): # a correct ctx.deployment.name tells us we kept the correct deployment_id assert ctx.service.name == mock.models.SERVICE_NAME # Here we test that the resource storage was properly re-created - test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME) + test_file_content = ctx.resource.service_template.read(TEST_FILE_ENTRY_ID, TEST_FILE_NAME) assert test_file_content == TEST_FILE_CONTENT # a non empty plugin workdir tells us that we kept the correct base_workdir assert ctx.plugin_workdir is not None @@ -98,10 +96,3 @@ def context(tmpdir): yield result storage.release_sqlite_storage(result.model) - - -@pytest.fixture -def memory_model_storage(): - result = aria.application_model_storage(sql_mapi.SQLAlchemyModelAPI) - yield result - storage.release_sqlite_storage(result) http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_toolbelt.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_toolbelt.py b/tests/orchestrator/context/test_toolbelt.py index cf82127..213d964 100644 --- a/tests/orchestrator/context/test_toolbelt.py +++ b/tests/orchestrator/context/test_toolbelt.py @@ -76,15 +76,16 @@ def test_host_ip(workflow_context, executor): interface_name = 'Standard' operation_name = 'create' _, dependency_node, _, _, _ = _get_elements(workflow_context) + inputs = {'putput': True} interface = mock.models.create_interface( dependency_node.service, interface_name=interface_name, operation_name=operation_name, - operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(host_ip, module_path=__name__), + inputs=inputs) ) dependency_node.interfaces[interface.name] = interface workflow_context.model.node.update(dependency_node) - inputs = {'putput': True} @workflow def basic_workflow(graph, **_): @@ -106,17 +107,17 @@ def test_relationship_tool_belt(workflow_context, executor): interface_name = 'Configure' operation_name = 'post_configure' _, _, _, _, relationship = _get_elements(workflow_context) + inputs = {'putput': True} interface = mock.models.create_interface( relationship.source_node.service, interface_name=interface_name, operation_name=operation_name, - operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__)) + operation_kwargs=dict(implementation=op_path(relationship_operation, module_path=__name__), + inputs=inputs) ) relationship.interfaces[interface.name] = interface workflow_context.model.relationship.update(relationship) - inputs = {'putput': True} - @workflow def basic_workflow(graph, **_): graph.add_tasks( http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/aaf66420/tests/orchestrator/context/test_workflow.py ---------------------------------------------------------------------- diff --git a/tests/orchestrator/context/test_workflow.py b/tests/orchestrator/context/test_workflow.py index fa1f387..3c35435 100644 --- a/tests/orchestrator/context/test_workflow.py +++ b/tests/orchestrator/context/test_workflow.py @@ -35,7 +35,7 @@ class TestWorkflowContext(object): assert execution.service_template == storage.service_template.get_by_name( models.SERVICE_TEMPLATE_NAME) assert execution.status == storage.execution.model_cls.PENDING - assert execution.parameters == {} + assert execution.inputs == {} assert execution.created_at <= datetime.utcnow() def test_subsequent_workflow_context_creation_do_not_fail(self, storage): @@ -49,11 +49,13 @@ class TestWorkflowContext(object): :param storage: :return WorkflowContext: """ + service = storage.service.get_by_name(models.SERVICE_NAME) return context.workflow.WorkflowContext( name='simple_context', model_storage=storage, resource_storage=None, - service_id=storage.service.get_by_name(models.SERVICE_NAME).id, + service_id=service, + execution_id=storage.execution.list(filters=dict(service=service))[0].id, workflow_name=models.WORKFLOW_NAME, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL @@ -66,6 +68,8 @@ def storage(): sql_mapi.SQLAlchemyModelAPI, initiator=test_storage.init_inmemory_model_storage) workflow_storage.service_template.put(models.create_service_template()) service_template = workflow_storage.service_template.get_by_name(models.SERVICE_TEMPLATE_NAME) - workflow_storage.service.put(models.create_service(service_template)) + service = models.create_service(service_template) + workflow_storage.service.put(service) + workflow_storage.execution.put(models.create_execution(service)) yield workflow_storage test_storage.release_sqlite_storage(workflow_storage)