Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 101231872F for ; Mon, 4 Apr 2016 20:45:59 +0000 (UTC) Received: (qmail 911 invoked by uid 500); 4 Apr 2016 20:45:59 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 880 invoked by uid 500); 4 Apr 2016 20:45:58 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 871 invoked by uid 99); 4 Apr 2016 20:45:58 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 04 Apr 2016 20:45:58 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id CD006DFF41; Mon, 4 Apr 2016 20:45:58 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: jluniya@apache.org To: commits@ambari.apache.org Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: ambari git commit: AMBARI-15655: Remove remaining hdp specific logic from resource_management library (jluniya) Date: Mon, 4 Apr 2016 20:45:58 +0000 (UTC) Repository: ambari Updated Branches: refs/heads/trunk 825d557ae -> 62e406a61 AMBARI-15655: Remove remaining hdp specific logic from resource_management library (jluniya) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/62e406a6 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/62e406a6 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/62e406a6 Branch: refs/heads/trunk Commit: 62e406a6143ef83991478c8518c688b2b31f0367 Parents: 825d557 Author: Jayush Luniya Authored: Mon Apr 4 13:46:06 2016 -0700 Committer: Jayush Luniya Committed: Mon Apr 4 13:46:06 2016 -0700 ---------------------------------------------------------------------- .../TestListAmbariManagedRepos.py | 8 +-- .../libraries/functions/conf_select.py | 22 ++++---- .../libraries/functions/get_lzo_packages.py | 11 ++-- .../functions/list_ambari_managed_repos.py | 8 +-- .../libraries/functions/stack_features.py | 6 +-- .../libraries/functions/stack_select.py | 8 ++- .../libraries/functions/version_select_util.py | 53 +++++++++----------- .../libraries/script/script.py | 13 ++--- .../1.0.0.2.3/package/scripts/mahout_client.py | 3 +- .../MAHOUT/1.0.0.2.3/package/scripts/params.py | 5 +- .../custom_actions/scripts/install_packages.py | 2 +- .../src/test/python/TestVersionSelectUtil.py | 16 +++--- .../stacks/2.0.6/HIVE/test_hive_server.py | 4 +- .../stacks/2.0.6/YARN/test_historyserver.py | 3 +- .../stacks/2.1/FALCON/test_falcon_server.py | 2 +- .../stacks/2.2/RANGER/test_ranger_usersync.py | 4 +- 16 files changed, 82 insertions(+), 86 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-agent/src/test/python/resource_management/TestListAmbariManagedRepos.py ---------------------------------------------------------------------- diff --git a/ambari-agent/src/test/python/resource_management/TestListAmbariManagedRepos.py b/ambari-agent/src/test/python/resource_management/TestListAmbariManagedRepos.py index 8c590da..d77e7ba 100644 --- a/ambari-agent/src/test/python/resource_management/TestListAmbariManagedRepos.py +++ b/ambari-agent/src/test/python/resource_management/TestListAmbariManagedRepos.py @@ -51,7 +51,7 @@ class TestListAmbariManagedRepos(TestCase): ], [] ] - res = list_ambari_managed_repos() + res = list_ambari_managed_repos('HDP') self.assertEquals(glob_mock.call_args_list[0][0][0], "/etc/apt/sources.list.d/HDP*") self.assertEquals(res, ['HDP-1.1.1', 'HDP-1.1.2', 'HDP-1.1.3', 'HDP-UTILS-1.1.3']) self.assertTrue(glob_mock.call_count > 1) @@ -78,7 +78,7 @@ class TestListAmbariManagedRepos(TestCase): ], [] ] - res = list_ambari_managed_repos() + res = list_ambari_managed_repos('HDP') self.assertEquals(glob_mock.call_args_list[0][0][0], "/etc/yum.repos.d/HDP*") self.assertEquals(res, ['HDP-1.1.1', 'HDP-1.1.2', 'HDP-1.1.3', 'HDP-UTILS-1.1.3']) self.assertTrue(glob_mock.call_count > 1) @@ -106,7 +106,7 @@ class TestListAmbariManagedRepos(TestCase): ], [] ] - res = list_ambari_managed_repos() + res = list_ambari_managed_repos('HDP') self.assertEquals(glob_mock.call_args_list[0][0][0], "/etc/zypp/repos.d/HDP*") self.assertEquals(res, ['HDP-1.1.1', 'HDP-1.1.2', 'HDP-1.1.3', 'HDP-UTILS-1.1.3']) self.assertTrue(glob_mock.call_count > 1) @@ -121,7 +121,7 @@ class TestListAmbariManagedRepos(TestCase): is_redhat_family_mock.return_value = False is_suse_family_mock.return_value = False try: - list_ambari_managed_repos() + list_ambari_managed_repos('HDP') self.fail("Should throw a Fail") except Fail: pass # Expected http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py index d4e88e7..a85befb 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py @@ -36,8 +36,9 @@ from resource_management.core.resources.system import Link from resource_management.libraries.functions.default import default from resource_management.libraries.functions import stack_tools from resource_management.core.exceptions import Fail -from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.core.shell import as_sudo +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature STACK_ROOT_PATTERN = "{{ stack_root }}" @@ -199,13 +200,7 @@ def _get_cmd(command, package, version): return ('ambari-python-wrap', conf_selector_path, command, '--package', package, '--stack-version', version, '--conf-version', '0') def _valid(stack_name, package, ver): - if stack_name != "HDP": - return False - - if version.compare_versions(version.format_stack_version(ver), "2.3.0.0") < 0: - return False - - return True + return (ver and check_stack_feature(StackFeature.CONFIG_VERSIONING, ver)) def get_package_dirs(): """ @@ -332,15 +327,16 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False): hadoop_conf_dir = "/etc/hadoop/conf" stack_name = None stack_root = Script.get_stack_root() + stack_version = Script.get_stack_version() version = None allow_setting_conf_select_symlink = False if not Script.in_stack_upgrade(): # During normal operation, the HDP stack must be 2.3 or higher - if Script.is_stack_greater_or_equal("2.2"): + if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version): hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf") - if Script.is_stack_greater_or_equal("2.3"): + if stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version): hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf") stack_name = default("/hostLevelParams/stack_name", None) version = default("/commandParams/version", None) @@ -365,11 +361,11 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False): EU/RU | 2.3 | 2.3.* | Any | Use /$version/hadoop/conf, which should be a symlink destination ''' - # The method "is_stack_greater_or_equal" uses "stack_version" which is the desired stack, e.g., 2.2 or 2.3 + # The "stack_version" is the desired stack, e.g., 2.2 or 2.3 # In an RU, it is always the desired stack, and doesn't change even during the Downgrade! # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf - if Script.is_stack_greater_or_equal("2.2"): + if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version): hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf") # This contains the "version", including the build number, that is actually used during a stack upgrade and @@ -384,7 +380,7 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False): Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination version {1}, determining which hadoop conf dir to use.".format(stack_name, version)) # This is the version either upgrading or downgrading to. - if compare_versions(format_stack_version(version), "2.3.0.0") >= 0: + if version and check_stack_feature(StackFeature.CONFIG_VERSIONING, version): # Determine if has been run and if not, then use the current # hdp version until this component is upgraded. if not force_latest_on_upgrade: http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py b/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py index 870bb0c..e189d62 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py @@ -22,9 +22,10 @@ Ambari Agent __all__ = ["get_lzo_packages"] from ambari_commons.os_check import OSCheck -from resource_management.libraries.functions.version import compare_versions, format_stack_version -from resource_management.libraries.functions.format import format +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature +# TODO: Make list of lzo packages stack driven def get_lzo_packages(stack_version_unformatted): lzo_packages = [] @@ -32,12 +33,8 @@ def get_lzo_packages(stack_version_unformatted): lzo_packages += ["lzo", "hadoop-lzo-native"] elif OSCheck.is_ubuntu_family(): lzo_packages += ["liblzo2-2"] - - underscored_version = stack_version_unformatted.replace('.', '_') - dashed_version = stack_version_unformatted.replace('.', '-') - stack_version_formatted = format_stack_version(stack_version_unformatted) - if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0: + if stack_version_unformatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_unformatted): lzo_packages += ["hadooplzo_*"] else: lzo_packages += ["hadoop-lzo"] http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/functions/list_ambari_managed_repos.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/list_ambari_managed_repos.py b/ambari-common/src/main/python/resource_management/libraries/functions/list_ambari_managed_repos.py index bab6374..dce6065 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/list_ambari_managed_repos.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/list_ambari_managed_repos.py @@ -26,14 +26,14 @@ import glob from ambari_commons.os_check import OSCheck from resource_management.core.exceptions import Fail -# TODO : get it dynamically from the server -repository_names = ["HDP", "HDP-UTILS"] - -def list_ambari_managed_repos(): +def list_ambari_managed_repos(stack_name): """ Lists all repositories that are present at host """ + stack_name = stack_name.upper() + # TODO : get it dynamically from the server + repository_names = [stack_name, stack_name + "-UTILS" ] if OSCheck.is_ubuntu_family(): repo_dir = '/etc/apt/sources.list.d/' elif OSCheck.is_redhat_family(): # Centos/RHEL 5/6 http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py index 41508b4..4fc3489 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py @@ -20,9 +20,6 @@ limitations under the License. # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. import ambari_simplejson as json -from resource_management.libraries.script import Script -from resource_management.libraries.functions.default import default -from resource_management.libraries.functions.version import compare_versions _DEFAULT_STACK_FEATURES = { "stack_features": [ @@ -190,6 +187,9 @@ def check_stack_feature(stack_feature, stack_version): :param stack_version: Version of the stack :return: Will return True if successful, otherwise, False. """ + + from resource_management.libraries.functions.default import default + from resource_management.libraries.functions.version import compare_versions stack_features_config = default("/configurations/cluster-env/stack_features", None) data = _DEFAULT_STACK_FEATURES http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py index 02cd7ca..6308a81 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py @@ -32,6 +32,8 @@ from resource_management.libraries.functions import stack_tools from resource_management.core.shell import call from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.version_select_util import get_versions_from_stack_root +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions import StackFeature STACK_SELECT_PREFIX = 'ambari-python-wrap' @@ -198,13 +200,15 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False): without the stack version built into the path, such as /current/hadoop-client """ stack_root = Script.get_stack_root() + stack_version = Script.get_stack_version() if not target in HADOOP_DIR_DEFAULTS: raise Fail("Target {0} not defined".format(target)) hadoop_dir = HADOOP_DIR_DEFAULTS[target] - if Script.is_stack_greater_or_equal("2.2"): + formatted_stack_version = format_stack_version(stack_version) + if formatted_stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version): # home uses a different template if target == "home": hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client") @@ -248,7 +252,7 @@ def get_hadoop_dir_for_stack_version(target, stack_version): hadoop_dir = HADOOP_DIR_DEFAULTS[target] formatted_stack_version = format_stack_version(stack_version) - if Script.is_stack_greater_or_equal_to(formatted_stack_version, "2.2"): + if formatted_stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version): # home uses a different template if target == "home": hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop") http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py index 9122a0e..615a0cd 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py @@ -46,37 +46,34 @@ def get_component_version(stack_name, component_name): out = None code = -1 - if stack_name == "HDP": - tmpfile = tempfile.NamedTemporaryFile() - - get_stack_comp_version_cmd = "" + if not stack_name: + Logger.error("Stack name not provided") + elif not component_name: + Logger.error("Component name not provided") + else: (stack_selector_name, stack_selector_path, stack_selector_package) = stack_tools.get_stack_tool(stack_tools.STACK_SELECTOR_NAME) - try: - # This is necessary because Ubuntu returns "stdin: is not a tty", see AMBARI-8088 - with open(tmpfile.name, 'r') as file: - get_stack_comp_version_cmd = '%s status %s > %s' % (stack_selector_path, component_name, tmpfile.name) - code, stdoutdata = shell.call(get_stack_comp_version_cmd, quiet=True) - out = file.read() + if stack_selector_name and stack_selector_path and os.path.exists(stack_selector_path): + tmpfile = tempfile.NamedTemporaryFile() - if code != 0 or out is None: - raise Exception("Code is nonzero or output is empty") + get_stack_comp_version_cmd = "" + try: + # This is necessary because Ubuntu returns "stdin: is not a tty", see AMBARI-8088 + with open(tmpfile.name, 'r') as file: + get_stack_comp_version_cmd = '%s status %s > %s' % (stack_selector_path, component_name, tmpfile.name) + code, stdoutdata = shell.call(get_stack_comp_version_cmd, quiet=True) + out = file.read() - Logger.debug("Command: %s\nOutput: %s" % (get_stack_comp_version_cmd, str(out))) - matches = re.findall(r"([\d\.]+\-\d+)", out) - version = matches[0] if matches and len(matches) > 0 else None - except Exception, e: - Logger.error("Could not determine stack version for component %s by calling '%s'. Return Code: %s, Output: %s." % - (component_name, get_stack_comp_version_cmd, str(code), str(out))) - elif stack_name == "HDPWIN": - pass - elif stack_name == "GlusterFS": - pass - elif stack_name == "PHD": - pass - elif stack_name == "BIGTOP": - pass - else: - Logger.error("Could not find a stack for stack name: %s" % str(stack_name)) + if code != 0 or out is None: + raise Exception("Code is nonzero or output is empty") + + Logger.debug("Command: %s\nOutput: %s" % (get_stack_comp_version_cmd, str(out))) + matches = re.findall(r"([\d\.]+\-\d+)", out) + version = matches[0] if matches and len(matches) > 0 else None + except Exception, e: + Logger.error("Could not determine stack version for component %s by calling '%s'. Return Code: %s, Output: %s." % + (component_name, get_stack_comp_version_cmd, str(code), str(out))) + else: + Logger.error("Could not find stack selector for stack: %s" % str(stack_name)) return version http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-common/src/main/python/resource_management/libraries/script/script.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py index 4cbf2d7..31d7b2a 100644 --- a/ambari-common/src/main/python/resource_management/libraries/script/script.py +++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py @@ -49,6 +49,8 @@ from resource_management.libraries.functions import packages_analyzer from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration from resource_management.core.resources.system import Execute from contextlib import closing +from resource_management.libraries.functions.stack_features import check_stack_feature +from resource_management.libraries.functions.constants import StackFeature import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. @@ -179,7 +181,7 @@ class Script(object): from resource_management.libraries.functions.default import default stack_version_unformatted = str(default("/hostLevelParams/stack_version", "")) stack_version_formatted = format_stack_version(stack_version_unformatted) - if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0: + if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted): if command_name.lower() == "status": request_version = default("/commandParams/request_version", None) if request_version is not None: @@ -360,14 +362,10 @@ class Script(object): @staticmethod def get_stack_version(): """ - Gets the normalized version of the HDP stack in the form #.#.#.# if it is + Gets the normalized version of the stack in the form #.#.#.# if it is present on the configurations sent. - :return: a normalized HDP stack version or None + :return: a normalized stack version or None """ - stack_name = Script.get_stack_name() - if stack_name is None or stack_name.upper() not in ["HDP", "HDPWIN"]: - return None - config = Script.get_config() if 'hostLevelParams' not in config or 'stack_version' not in config['hostLevelParams']: return None @@ -379,7 +377,6 @@ class Script(object): return format_stack_version(stack_version_unformatted) - @staticmethod def in_stack_upgrade(): from resource_management.libraries.functions.default import default http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py index 38269cb..d39b66a 100644 --- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py +++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py @@ -29,7 +29,8 @@ from mahout import mahout class MahoutClient(Script): def get_stack_to_component(self): - return {"HDP": "mahout-client"} + import params + return {params.stack_name: "mahout-client"} def pre_upgrade_restart(self, env, upgrade_type=None): http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py index 4467114..b6fc153 100644 --- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py @@ -31,6 +31,7 @@ from resource_management.libraries.script.script import Script # server configurations config = Script.get_config() tmp_dir = Script.get_tmp_dir() +stack_root = Script.get_stack_root() stack_name = default("/hostLevelParams/stack_name", None) host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False) @@ -42,8 +43,8 @@ stack_version_formatted = format_stack_version(stack_version_unformatted) version = default("/commandParams/version", None) #mahout params -mahout_home = "/usr/hdp/current/mahout-client" -mahout_conf_dir = "/usr/hdp/current/mahout-client/conf" +mahout_home = format("{stack_root}/current/mahout-client") +mahout_conf_dir = format("{stack_root}/current/mahout-client/conf") mahout_user = config['configurations']['mahout-env']['mahout_user'] yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix'] http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py index 4020ae0..2e5cffc 100644 --- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py +++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py @@ -129,7 +129,7 @@ class InstallPackages(Script): self.current_repo_files.add(repo_file) append_to_file = True - installed_repositories = list_ambari_managed_repos() + installed_repositories = list_ambari_managed_repos(stack_name) except Exception, err: Logger.logger.exception("Cannot distribute repositories. Error: {0}".format(str(err))) num_errors += 1 http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/test/python/TestVersionSelectUtil.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/TestVersionSelectUtil.py b/ambari-server/src/test/python/TestVersionSelectUtil.py index 9fa24cc..807a0e8 100644 --- a/ambari-server/src/test/python/TestVersionSelectUtil.py +++ b/ambari-server/src/test/python/TestVersionSelectUtil.py @@ -38,7 +38,8 @@ class TestVersionSelectUtil(TestCase): @patch('__builtin__.open') @patch("resource_management.core.shell.call") - def test_get_component_version(self, call_mock, open_mock): + @patch('os.path.exists') + def test_get_component_version(self, os_path_exists_mock, call_mock, open_mock): stack_expected_version = "2.2.1.0-2175" # Mock classes for reading from a file @@ -68,21 +69,22 @@ class TestVersionSelectUtil(TestCase): def read(self): return super(MagicFile3, self).read("hadoop-hdfs-datanode") + os_path_exists_mock.side_effect = [False, True, True, True] open_mock.side_effect = [MagicFile1(), MagicFile2(), MagicFile3()] call_mock.side_effect = [(0, "value will come from MagicFile"), ] * 3 - # Missing Stack + # Missing stack name version = self.module.get_component_version(None, "hadoop-hdfs-datanode") self.assertEquals(version, None) - version = self.module.get_component_version("StackDoesNotExist", "hadoop-hdfs-datanode") - self.assertEquals(version, None) - - # Invalid request + # Missing component name version = self.module.get_component_version("HDP", None) self.assertEquals(version, None) - # Incorrect name + # Invalid stack name + version = self.module.get_component_version("StackDoesNotExist", "hadoop-hdfs-datanode") + self.assertEquals(version, None) + # Invalid component name version = self.module.get_component_version("HDP", "hadoop-nonexistent-component-name") self.assertEquals(version, None) http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py index 338d3d3..8d0467a 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py @@ -910,7 +910,6 @@ From source with checksum 150f554beae04f76f814f59549dead8b""" ) put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"}) - @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=True)) @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs") def test_pre_upgrade_restart(self, copy_to_hdfs_mock): copy_to_hdfs_mock.return_value = True @@ -920,6 +919,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b""" json_content = json.load(f) version = '2.2.1.0-3242' json_content['commandParams']['version'] = version + json_content['hostLevelParams']['stack_version'] = '2.2' self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py", classname = "HiveServer", command = "pre_upgrade_restart", @@ -947,7 +947,6 @@ From source with checksum 150f554beae04f76f814f59549dead8b""" @patch("os.path.exists") @patch("resource_management.core.shell.call") - @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value=True)) @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs") def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock, call_mock, os_path__exists_mock): config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json" @@ -956,6 +955,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b""" json_content = json.load(f) version = '2.3.0.0-1234' json_content['commandParams']['version'] = version + json_content['hostLevelParams']['stack_version'] = '2.3' copy_to_hdfs_mock.return_value = True mocks_dict = {} http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py index b6a19ec..19253b2 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py +++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py @@ -862,7 +862,6 @@ class TestHistoryServer(RMFTestCase): action = ["delete"]) self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/usr/hdp/current/hadoop-client/conf") - @patch.object(Script, "is_stack_greater_or_equal", new = MagicMock(return_value="2.3.0")) @patch.object(functions, "get_stack_version", new = MagicMock(return_value="2.3.0.0-1234")) @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs") def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock): @@ -871,7 +870,7 @@ class TestHistoryServer(RMFTestCase): json_content = json.load(f) version = '2.3.0.0-1234' json_content['commandParams']['version'] = version - + json_content['hostLevelParams']['stack_version'] = '2.3' copy_to_hdfs_mock.return_value = True mocks_dict = {} self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/historyserver.py", http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py index 8c61b0b..6dfb609 100644 --- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py +++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py @@ -200,7 +200,7 @@ class TestFalconServer(RMFTestCase): def test_upgrade(self, isfile_mock, exists_mock, isdir_mock): isdir_mock.return_value = True - exists_mock.side_effect = [False,False,True, True] + exists_mock.side_effect = [False,False,True, True,True] isfile_mock.return_value = True self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/falcon_server.py", http://git-wip-us.apache.org/repos/asf/ambari/blob/62e406a6/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py index 67bbb9d..5ca6c62 100644 --- a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py +++ b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py @@ -128,12 +128,14 @@ class TestRangerUsersync(RMFTestCase): self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-usersync', '2.2.2.0-2399'), sudo=True) @patch("setup_ranger.setup_usersync") - def test_upgrade_23(self, setup_usersync_mock): + @patch("os.path.exists") + def test_upgrade_23(self, os_path_exists_mock, setup_usersync_mock): config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json" with open(config_file, "r") as f: json_content = json.load(f) json_content['commandParams']['version'] = '2.3.0.0-1234' + os_path_exists_mock.return_value = True mocks_dict = {} self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ranger_usersync.py", classname = "RangerUsersync",