Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 44AD2200D01 for ; Thu, 17 Aug 2017 12:16:14 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 430B716AA11; Thu, 17 Aug 2017 10:16:14 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id E5D2616A9FF for ; Thu, 17 Aug 2017 12:16:11 +0200 (CEST) Received: (qmail 67682 invoked by uid 500); 17 Aug 2017 10:16:10 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 65668 invoked by uid 99); 17 Aug 2017 10:16:09 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 17 Aug 2017 10:16:09 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 71E7BF5EE0; Thu, 17 Aug 2017 10:16:08 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: ababiichuk@apache.org To: commits@ambari.apache.org Date: Thu, 17 Aug 2017 10:16:31 -0000 Message-Id: <2c01546877bf45848037dd8cbfd1b938@git.apache.org> In-Reply-To: <747363a9155d44f1826ff474626d9791@git.apache.org> References: <747363a9155d44f1826ff474626d9791@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [25/32] ambari git commit: AMBARI-21722 - Begin Using Service Versions In Python stack_feature Code (jonathanhurley) archived-at: Thu, 17 Aug 2017 10:16:14 -0000 http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py index b8e8f78..34c6b9a 100644 --- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py @@ -38,7 +38,6 @@ stack_version_formatted = status_params.stack_version_formatted stack_root = status_params.stack_root stack_name = status_params.stack_name -current_version = default("/hostLevelParams/current_version", None) component_directory = status_params.component_directory # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json index 973d9dd..86de20d 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_features.json @@ -104,12 +104,6 @@ "min_version": "2.2.0.0" }, { - "name": "create_kafka_broker_id", - "description": "Ambari should create Kafka Broker Id (AMBARI-12678)", - "min_version": "2.2.0.0", - "max_version": "2.3.0.0" - }, - { "name": "kafka_listeners", "description": "Kafka listeners (AMBARI-10984)", "min_version": "2.3.0.0" http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py index 4a422ec..ddc6100 100644 --- a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py @@ -44,9 +44,6 @@ sudo = AMBARI_SUDO_BINARY stack_version_unformatted = config['hostLevelParams']['stack_version'] stack_version_formatted = format_stack_version(stack_version_unformatted) -# current host stack version -current_version = default("/hostLevelParams/current_version", None) - # service name service_name = config['serviceName'] http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json index 9422cbc..afd5183 100644 --- a/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json +++ b/ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_features.json @@ -104,12 +104,6 @@ "min_version": "2.2.0.0" }, { - "name": "create_kafka_broker_id", - "description": "Ambari should create Kafka Broker Id (AMBARI-12678)", - "min_version": "2.2.0.0", - "max_version": "2.3.0.0" - }, - { "name": "kafka_listeners", "description": "Kafka listeners (AMBARI-10984)", "min_version": "2.3.0.0" http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java index b048d04..8b94338 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/StateRecoveryManagerTest.java @@ -89,8 +89,8 @@ public class StateRecoveryManagerTest { getHostVersionMock("install_failed_version", RepositoryVersionState.INSTALL_FAILED, installFailedHostVersionCapture), getHostVersionMock("installing_version", RepositoryVersionState.INSTALLING, installingHostVersionCapture), getHostVersionMock("installed_version", RepositoryVersionState.INSTALLED, installedHostVersionCapture), - getHostVersionMock("out_of_sync_version", RepositoryVersionState.OUT_OF_SYNC, outOfSyncHostVersionCapture), - getHostVersionMock("current_version", RepositoryVersionState.CURRENT, currentHostVersionCapture))); + getHostVersionMock("out_of_sync_version", RepositoryVersionState.OUT_OF_SYNC, + outOfSyncHostVersionCapture))); // Adding all possible cluster version states @@ -101,14 +101,13 @@ public class StateRecoveryManagerTest { final Capture upgradeFailedClusterVersionCapture = EasyMock.newCapture(); final Capture upgradingClusterVersionCapture = EasyMock.newCapture(); final Capture upgradedClusterVersionCapture = EasyMock.newCapture(); - final Capture currentClusterVersionCapture = EasyMock.newCapture(); expect(serviceComponentDesiredStateDAOMock.findAll()).andReturn(Lists.newArrayList( getDesiredStateEntityMock("install_failed_version", RepositoryVersionState.INSTALL_FAILED, installFailedClusterVersionCapture), getDesiredStateEntityMock("installing_version", RepositoryVersionState.INSTALLING, installingClusterVersionCapture), getDesiredStateEntityMock("installed_version", RepositoryVersionState.INSTALLED, installedClusterVersionCapture), - getDesiredStateEntityMock("out_of_sync_version", RepositoryVersionState.OUT_OF_SYNC, outOfSyncClusterVersionCapture), - getDesiredStateEntityMock("current_version", RepositoryVersionState.CURRENT, currentClusterVersionCapture))); + getDesiredStateEntityMock("out_of_sync_version", RepositoryVersionState.OUT_OF_SYNC, + outOfSyncClusterVersionCapture))); replay(hostVersionDAOMock, serviceComponentDesiredStateDAOMock); @@ -132,7 +131,6 @@ public class StateRecoveryManagerTest { assertFalse(upgradeFailedClusterVersionCapture.hasCaptured()); assertFalse(upgradingClusterVersionCapture.hasCaptured()); assertFalse(upgradedClusterVersionCapture.hasCaptured()); - assertFalse(currentClusterVersionCapture.hasCaptured()); } http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java index 1a4935a..76160cc 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java @@ -298,7 +298,7 @@ public class ExecutionCommandWrapperTest { executionCommand.setRequestAndStage(1, 1); executionCommand.setHostname(HOST1); executionCommand.setRole("NAMENODE"); - executionCommand.setRoleParams(Collections.emptyMap()); + executionCommand.setRoleParams(Collections.emptyMap()); executionCommand.setRoleCommand(RoleCommand.INSTALL); executionCommand.setServiceName("HDFS"); executionCommand.setCommandType(AgentCommandType.EXECUTION_COMMAND); @@ -310,7 +310,7 @@ public class ExecutionCommandWrapperTest { ExecutionCommand processedExecutionCommand = execCommWrap.getExecutionCommand(); commandParams = processedExecutionCommand.getCommandParams(); - Assert.assertTrue(commandParams.containsKey(KeyNames.VERSION)); + Assert.assertFalse(commandParams.containsKey(KeyNames.VERSION)); // now try with a START command which should populate the version even // though the state is INSTALLING @@ -322,7 +322,7 @@ public class ExecutionCommandWrapperTest { executionCommand.setRequestAndStage(1, 1); executionCommand.setHostname(HOST1); executionCommand.setRole("NAMENODE"); - executionCommand.setRoleParams(Collections.emptyMap()); + executionCommand.setRoleParams(Collections. emptyMap()); executionCommand.setRoleCommand(RoleCommand.START); executionCommand.setServiceName("HDFS"); executionCommand.setCommandType(AgentCommandType.EXECUTION_COMMAND); @@ -335,7 +335,7 @@ public class ExecutionCommandWrapperTest { processedExecutionCommand = execCommWrap.getExecutionCommand(); commandParams = processedExecutionCommand.getCommandParams(); Assert.assertEquals("0.1-0000", commandParams.get(KeyNames.VERSION)); - } + } @AfterClass public static void tearDown() throws AmbariException, SQLException { http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java index c3b820b..b90295d 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/StageUtilsTest.java @@ -57,12 +57,14 @@ import org.apache.ambari.server.actionmanager.StageFactoryImpl; import org.apache.ambari.server.agent.ExecutionCommand; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.configuration.Configuration; +import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.orm.DBAccessor; import org.apache.ambari.server.orm.dao.HostDAO; import org.apache.ambari.server.orm.dao.HostRoleCommandDAO; import org.apache.ambari.server.security.SecurityHelper; import org.apache.ambari.server.security.encryption.CredentialStoreService; import org.apache.ambari.server.stack.StackManagerFactory; +import org.apache.ambari.server.stageplanner.RoleGraphFactory; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; @@ -74,6 +76,7 @@ import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.ServiceComponent; import org.apache.ambari.server.state.ServiceComponentHost; import org.apache.ambari.server.state.ServiceComponentHostFactory; +import org.apache.ambari.server.state.UpgradeContextFactory; import org.apache.ambari.server.state.cluster.ClusterFactory; import org.apache.ambari.server.state.host.HostFactory; import org.apache.ambari.server.state.stack.OsFamily; @@ -127,10 +130,13 @@ public class StageUtilsTest extends EasyMockSupport { bind(HostDAO.class).toInstance(createNiceMock(HostDAO.class)); bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class)); bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class)); + bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class)); install(new FactoryModuleBuilder().build(ExecutionCommandWrapperFactory.class)); install(new FactoryModuleBuilder().implement(Config.class, ConfigImpl.class).build(ConfigFactory.class)); install(new FactoryModuleBuilder().build(ConfigureClusterTaskFactory.class)); + install(new FactoryModuleBuilder().build(UpgradeContextFactory.class)); + install(new FactoryModuleBuilder().build(RoleGraphFactory.class)); } }); http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/TestComponentVersionMapping.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/TestComponentVersionMapping.py b/ambari-server/src/test/python/TestComponentVersionMapping.py new file mode 100644 index 0000000..76fd8ed --- /dev/null +++ b/ambari-server/src/test/python/TestComponentVersionMapping.py @@ -0,0 +1,84 @@ +# !/usr/bin/env python + +''' +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +''' + + +from resource_management.core.logger import Logger +from resource_management.libraries.functions import component_version +from resource_management.libraries.script import Script +from unittest import TestCase + +Logger.initialize_logger() + +class TestComponentVersionMapping(TestCase): + + def test_get_component_versions(self): + """ + Tests that the component version map can be parsed + :return: + """ + command_json = TestComponentVersionMapping._get_component_version_mappings() + Script.config = command_json + + version = component_version.get_component_repository_version(service_name="HDFS", + component_name="DATANODE") + + self.assertEqual(version, "2.5.0.0-1234") + + version = component_version.get_component_repository_version(service_name = "ZOOKEEPER", + component_name = "ZOOKEEPER_SERVER") + + self.assertEqual(version, "2.6.0.0-9999") + + + def test_get_component_version_by_service_name(self): + """ + Tests that the component version map can be parsed using only the service name + :return: + """ + command_json = TestComponentVersionMapping._get_component_version_mappings() + Script.config = command_json + + version = component_version.get_component_repository_version(service_name="HDFS") + self.assertEqual(version, "2.5.0.0-1234") + + version = component_version.get_component_repository_version(service_name = "ZOOKEEPER") + self.assertEqual(version, "2.6.0.0-9999") + + + @staticmethod + def _get_component_version_mappings(): + """ + A typical component version mapping structure + :return: + """ + return { + "componentVersionMap": { + "HDFS": { + "NAMENODE": "2.5.0.0-1234", + "SECONDARY_NAMENODE": "2.5.0.0-1234", + "DATANODE": "2.5.0.0-1234", + "HDFS_CLIENT": "2.5.0.0-1234" + }, + "ZOOKEEPER": { + "ZOOKEEPER_SERVER": "2.6.0.0-9999", + "ZOOKEEPER_CLIENT": "2.6.0.0-9999" + } + }, + } http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/TestStackFeature.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/TestStackFeature.py b/ambari-server/src/test/python/TestStackFeature.py index 6e8bcec..ddd3f72 100644 --- a/ambari-server/src/test/python/TestStackFeature.py +++ b/ambari-server/src/test/python/TestStackFeature.py @@ -36,24 +36,20 @@ class TestStackFeature(TestCase): - STOP hostLevelParams/stack_name = HDP hostLevelParams/stack_version = 2.5 - hostLevelParams/current_version = 2.5.0.0-1237 commandParams/version = 2.5.0.0-1237 - START hostLevelParams/stack_name = HDP hostLevelParams/stack_version = 2.6 - hostLevelParams/current_version = 2.5.0.0-1237 commandParams/version = 2.6.0.0-334 EU Downgrade (HDP 2.6 to HDP 2.5) - STOP hostLevelParams/stack_name = HDP hostLevelParams/stack_version = 2.6 - hostLevelParams/current_version = 2.5.0.0-1237 commandParams/version = 2.6.0.0-334 - START hostLevelParams/stack_name = HDP hostLevelParams/stack_version = 2.5 - hostLevelParams/current_version = 2.5.0.0-1237 commandParams/version = 2.5.0.0-1237 """ @@ -153,6 +149,7 @@ class TestStackFeature(TestCase): :return: """ return { + "serviceName":"HDFS", "roleCommand": "ACTIONEXECUTE", "hostLevelParams": { "stack_name": "HDP", @@ -172,17 +169,33 @@ class TestStackFeature(TestCase): :return: """ return { + "serviceName":"HDFS", "roleCommand":"ACTIONEXECUTE", "hostLevelParams": { "stack_name": "HDP", "stack_version": "2.4", - "current_version": "2.4.0.0-1234" }, "commandParams": { "source_stack": "2.4", "target_stack": "2.5", "upgrade_direction": "upgrade", "version": "2.5.9.9-9999" + }, + "upgradeSummary": { + "services":{ + "HDFS":{ + "sourceRepositoryId":1, + "sourceStackId":"HDP-2.4", + "sourceVersion":"2.4.0.0-1234", + "targetRepositoryId":2, + "targetStackId":"HDP-2.5", + "targetVersion":"2.5.9.9-9999" + } + }, + "direction":"UPGRADE", + "type":"rolling_upgrade", + "isRevert":False, + "orchestration":"STANDARD" } } @@ -193,18 +206,33 @@ class TestStackFeature(TestCase): :return: """ return { + "serviceName":"HDFS", "roleCommand":"ACTIONEXECUTE", "hostLevelParams":{ "stack_name":"HDP", - "stack_version":"2.4", - "current_version":"2.4.0.0-1234" + "stack_version":"2.4" }, "commandParams":{ "source_stack":"2.5", "target_stack":"2.4", "upgrade_direction":"downgrade", - "version":"2.4.0.0-1234", - "downgrade_from_version": "2.5.9.9-9999" + "version":"2.4.0.0-1234" + }, + "upgradeSummary":{ + "services":{ + "HDFS":{ + "sourceRepositoryId":2, + "sourceStackId":"HDP-2.5", + "sourceVersion":"2.5.9.9-9999", + "targetRepositoryId":1, + "targetStackId":"HDP-2.4", + "targetVersion":"2.4.0.0-1234" + } + }, + "direction":"DOWNGRADE", + "type":"rolling_upgrade", + "isRevert":False, + "orchestration":"STANDARD" } } @@ -216,18 +244,33 @@ class TestStackFeature(TestCase): :return: """ return { + "serviceName":"HDFS", "roleCommand":"STOP", "hostLevelParams":{ "stack_name":"HDP", "stack_version":"2.5", - "current_version":"2.4.0.0-1234" }, "commandParams":{ "source_stack":"2.5", "target_stack":"2.4", "upgrade_direction":"downgrade", - "version":"2.5.9.9-9999", - "downgrade_from_version":"2.5.9.9-9999" + "version":"2.5.9.9-9999" + }, + "upgradeSummary":{ + "services":{ + "HDFS":{ + "sourceRepositoryId":2, + "sourceStackId":"HDP-2.5", + "sourceVersion":"2.5.9.9-9999", + "targetRepositoryId":1, + "targetStackId":"HDP-2.4", + "targetVersion":"2.4.0.0-1234" + } + }, + "direction":"DOWNGRADE", + "type":"rolling_upgrade", + "isRevert":False, + "orchestration":"STANDARD" } } @@ -238,19 +281,34 @@ class TestStackFeature(TestCase): :return: """ return { + "serviceName":"HDFS", "roleCommand":"CUSTOM_COMMAND", "hostLevelParams":{ "stack_name":"HDP", "stack_version":"2.5", - "current_version":"2.4.0.0-1234", "custom_command":"STOP" }, "commandParams":{ "source_stack":"2.5", "target_stack":"2.4", "upgrade_direction":"downgrade", - "version":"2.5.9.9-9999", - "downgrade_from_version":"2.5.9.9-9999" + "version":"2.5.9.9-9999" + }, + "upgradeSummary":{ + "services":{ + "HDFS":{ + "sourceRepositoryId":2, + "sourceStackId":"HDP-2.5", + "sourceVersion":"2.5.9.9-9999", + "targetRepositoryId":1, + "targetStackId":"HDP-2.4", + "targetVersion":"2.4.0.0-1234" + } + }, + "direction":"DOWNGRADE", + "type":"rolling_upgrade", + "isRevert":False, + "orchestration":"STANDARD" } } http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/TestUpgradeSummary.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/TestUpgradeSummary.py b/ambari-server/src/test/python/TestUpgradeSummary.py new file mode 100644 index 0000000..7606867 --- /dev/null +++ b/ambari-server/src/test/python/TestUpgradeSummary.py @@ -0,0 +1,137 @@ +# !/usr/bin/env python + +''' +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +''' + + +from resource_management.core.logger import Logger +from resource_management.libraries.functions import upgrade_summary +from resource_management.libraries.script import Script +from unittest import TestCase + +Logger.initialize_logger() + +class TestUpgradeSummary(TestCase): + + def test_get_stack_feature_version_missing_params(self): + """ + Tests that simple upgrade information can be extracted from JSON + :return: + """ + command_json = TestUpgradeSummary._get_cluster_simple_upgrade_json() + Script.config = command_json + + summary = upgrade_summary.get_upgrade_summary() + self.assertEqual(False, summary.is_revert) + self.assertEqual("UPGRADE", summary.direction) + self.assertEqual("STANDARD", summary.orchestration) + self.assertEqual("rolling_upgrade", summary.type) + + services = summary.services + self.assertEqual("2.4.0.0-1234", services["HDFS"].source_version) + self.assertEqual("2.5.9.9-9999", services["HDFS"].target_version) + + self.assertEqual("2.4.0.0-1234", upgrade_summary.get_source_version("HDFS")) + self.assertEqual("2.5.9.9-9999", upgrade_summary.get_target_version("HDFS")) + + self.assertIsNone(upgrade_summary.get_downgrade_from_version("HDFS")) + + + def test_get_downgrade_from_version(self): + """ + Tests that simple downgrade returns the correct version + :return: + """ + command_json = TestUpgradeSummary._get_cluster_simple_downgrade_json() + Script.config = command_json + + self.assertIsNone(upgrade_summary.get_downgrade_from_version("FOO")) + self.assertEqual("2.5.9.9-9999", upgrade_summary.get_downgrade_from_version("HDFS")) + + + @staticmethod + def _get_cluster_simple_upgrade_json(): + """ + A restart command during an upgrade. + :return: + """ + return { + "roleCommand":"ACTIONEXECUTE", + "hostLevelParams": { + "stack_name": "HDP", + "stack_version": "2.4", + }, + "commandParams": { + "source_stack": "2.4", + "target_stack": "2.5", + "upgrade_direction": "upgrade", + "version": "2.5.9.9-9999" + }, + "upgradeSummary": { + "services":{ + "HDFS":{ + "sourceRepositoryId":1, + "sourceStackId":"HDP-2.4", + "sourceVersion":"2.4.0.0-1234", + "targetRepositoryId":2, + "targetStackId":"HDP-2.5", + "targetVersion":"2.5.9.9-9999" + } + }, + "direction":"UPGRADE", + "type":"rolling_upgrade", + "isRevert":False, + "orchestration":"STANDARD" + } + } + + @staticmethod + def _get_cluster_simple_downgrade_json(): + """ + A restart command during a downgrade. + :return: + """ + return { + "roleCommand":"ACTIONEXECUTE", + "hostLevelParams": { + "stack_name": "HDP", + "stack_version": "2.4", + }, + "commandParams": { + "source_stack": "2.5", + "target_stack": "2.4", + "upgrade_direction": "downgrade", + "version": "2.4.0.0-1234" + }, + "upgradeSummary": { + "services":{ + "HDFS":{ + "sourceRepositoryId":2, + "sourceStackId":"HDP-2.5", + "sourceVersion":"2.5.9.9-9999", + "targetRepositoryId":1, + "targetStackId":"HDP-2.4", + "targetVersion":"2.4.0.0-1234" + } + }, + "direction":"DOWNGRADE", + "type":"rolling_upgrade", + "isRevert":False, + "orchestration":"STANDARD" + } + } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py index 033680c..8c04a79 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py @@ -255,22 +255,24 @@ class TestHiveServer(RMFTestCase): @patch("hive_service.check_fs_root") @patch("socket.socket") def test_start_secured(self, socket_mock, check_fs_root_mock, copy_to_hfds_mock): + config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/secured.json" + with open(config_file, "r") as f: + json_content = json.load(f) + + json_content['commandParams']['version'] = '2.3.0.0-1234' + s = socket_mock.return_value copy_to_hfds_mock.return_value = None self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py", classname = "HiveServer", command = "start", - config_file="secured.json", + config_dict = json_content, stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assert_configure_secured() - self.assertResourceCalled('Execute', - '/usr/bin/kinit -kt /etc/security/keytabs/hive.service.keytab hive/c6401.ambari.apache.org@EXAMPLE.COM; ', - user = 'hive', - ) self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive', environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client', 'HIVE_BIN': 'hive', @@ -299,10 +301,6 @@ class TestHiveServer(RMFTestCase): stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assertResourceCalled('Execute', - '/usr/bin/kinit -kt /etc/security/keytabs/hive.service.keytab hive/c6401.ambari.apache.org@EXAMPLE.COM; ', - user = 'hive', - ) self.assertResourceCalled('Execute', "ambari-sudo.sh kill 123", not_if = "! (ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1)", @@ -853,7 +851,6 @@ From source with checksum 150f554beae04f76f814f59549dead8b""" ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'), sudo=True) - self.assertNoMoreResources() @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs") def test_pre_upgrade_restart(self, copy_to_hdfs_mock): http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py index 0d87315..66a5e54 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py @@ -34,11 +34,15 @@ class TestServiceCheck(RMFTestCase): def test_service_check_default(self, socket_mock): + config_file = "default.json" + + base_path, configs_path = self._get_test_paths(RMFTestCase.TARGET_COMMON_SERVICES, self.STACK_VERSION) + json_content = self.get_config_file(configs_path, config_file) self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py", classname="HiveServiceCheck", command="service_check", - config_file="default.json", + config_dict = json_content, stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) @@ -146,11 +150,15 @@ class TestServiceCheck(RMFTestCase): def test_service_check_secured(self, socket_mock): + config_file = "secured.json" + base_path, configs_path = self._get_test_paths(RMFTestCase.TARGET_COMMON_SERVICES, self.STACK_VERSION) + json_content = self.get_config_file(configs_path, config_file) + del json_content["commandParams"]["version"] self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py", classname="HiveServiceCheck", command="service_check", - config_file="secured.json", + config_dict = json_content, stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json index 3aadf2c..3440085 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu.json @@ -42,8 +42,7 @@ "stack_name": "HDP", "group_list": "[\"hadoop\",\"users\"]", "host_sys_prepped": "false", - "ambari_db_rca_username": "mapred", - "current_version": "2.2.7.0-2816", + "ambari_db_rca_username": "mapred", "jdk_name": "jdk-7u45-linux-x64.tar.gz", "mysql_jdbc_url": "http://10.0.0.28:8080/resources//mysql-connector-java.jar", "repo_info": "[{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-2.2.7.0-2816\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.2\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.6.0\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.8.0\",\"baseSaved\":true},{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-UTILS-1.1.0.20\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]", @@ -70,7 +69,23 @@ "role": "NAMENODE", "requestId": 22, "taskId": 147, - "public_hostname": "c6402.ambari.apache.org", + "public_hostname": "c6402.ambari.apache.org", + "upgradeSummary": { + "services": { + "HDFS": { + "sourceRepositoryId": 1, + "sourceStackId": "HDP-2.2", + "sourceVersion": "2.2.7.0-2816", + "targetRepositoryId": 2, + "targetStackId": "HDP-2.3", + "targetVersion": "2.3.2.0-2844" + } + }, + "direction": "UPGRADE", + "type": "nonrolling_upgrade", + "isRevert": false, + "orchestration": "STANDARD" + }, "configurations": { "hdfs-site": { "dfs.namenode.http-address.nn1.nn1": "c6401.ambari.apache.org:50070", http://git-wip-us.apache.org/repos/asf/ambari/blob/330a61cd/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json index 2d48ff6..90b2493 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json +++ b/ambari-server/src/test/python/stacks/2.0.6/configs/nn_eu_standby.json @@ -42,8 +42,7 @@ "stack_name": "HDP", "group_list": "[\"hadoop\",\"users\"]", "host_sys_prepped": "false", - "ambari_db_rca_username": "mapred", - "current_version": "2.2.7.0-2816", + "ambari_db_rca_username": "mapred", "jdk_name": "jdk-7u45-linux-x64.tar.gz", "mysql_jdbc_url": "http://10.0.0.28:8080/resources//mysql-connector-java.jar", "repo_info": "[{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-2.2.7.0-2816\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.2\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.6.0\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.2.8.0\",\"baseSaved\":true},{\"baseUrl\":\"http://repos.ambari.apache.org/hdp/HDP-UTILS-1.1.0.20\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]", @@ -70,7 +69,23 @@ "role": "NAMENODE", "requestId": 22, "taskId": 147, - "public_hostname": "c6402.ambari.apache.org", + "public_hostname": "c6402.ambari.apache.org", + "upgradeSummary": { + "services": { + "HDFS": { + "sourceRepositoryId": 1, + "sourceStackId": "HDP-2.2", + "sourceVersion": "2.2.7.0-2816", + "targetRepositoryId": 2, + "targetStackId": "HDP-2.3", + "targetVersion": "2.3.2.0-2844" + } + }, + "direction": "UPGRADE", + "type": "nonrolling_upgrade", + "isRevert": false, + "orchestration": "STANDARD" + }, "configurations": { "hdfs-site": { "dfs.namenode.http-address.nn1.nn1": "c6401.ambari.apache.org:50070",