ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jonathanhur...@apache.org
Subject [4/4] ambari git commit: AMBARI-21722 - Begin Using Service Versions In Python stack_feature Code (jonathanhurley)
Date Wed, 16 Aug 2017 04:05:38 GMT
AMBARI-21722 - Begin Using Service Versions In Python stack_feature Code (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e7f413d3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e7f413d3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e7f413d3

Branch: refs/heads/branch-feature-AMBARI-21450
Commit: e7f413d321e7c74463a6c1e5898b5739bef8f82f
Parents: 086e368
Author: Jonathan Hurley <jhurley@hortonworks.com>
Authored: Mon Aug 14 16:58:52 2017 -0400
Committer: Jonathan Hurley <jhurley@hortonworks.com>
Committed: Tue Aug 15 16:56:44 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/component_version.py    |   63 +
 .../libraries/functions/copy_tarball.py         |   38 +-
 .../libraries/functions/stack_features.py       |   33 +-
 .../libraries/functions/upgrade_summary.py      |  107 ++
 .../actionmanager/ExecutionCommandWrapper.java  |   18 +-
 .../ambari/server/agent/ExecutionCommand.java   |   35 +-
 .../AmbariManagementControllerImpl.java         |    3 +-
 .../ambari/server/state/UpgradeContext.java     |   89 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |    4 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |   25 +-
 .../package/scripts/hive_metastore.py           |    6 +-
 .../package/scripts/hive_server_upgrade.py      |   12 +-
 .../0.12.0.2.0/package/scripts/hive_service.py  |    2 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   11 +-
 .../KAFKA/0.8.1/package/scripts/kafka.py        |    4 +-
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |   15 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |    8 -
 .../KAFKA/0.8.1/package/scripts/upgrade.py      |    7 +-
 .../0.5.0.2.2/package/scripts/params_linux.py   |    3 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |    4 +-
 .../RANGER/0.4.0/package/scripts/params.py      |    1 -
 .../3.4.5/package/scripts/params_linux.py       |    1 -
 .../HIVE/package/scripts/hive_metastore.py      |    8 +-
 .../HIVE/package/scripts/hive_server_upgrade.py |    5 +-
 .../4.0/services/HIVE/package/scripts/params.py |   12 +-
 .../KAFKA/package/scripts/kafka_broker.py       |   15 +-
 .../services/KAFKA/package/scripts/params.py    |    7 +-
 .../services/KAFKA/package/scripts/upgrade.py   |    7 +-
 .../4.0/services/SOLR/package/scripts/params.py |    3 +-
 .../SOLR/package/scripts/solr_service.py        |    2 +-
 .../SOLR/package/scripts/solr_upgrade.py        |    4 +-
 .../ZOOKEEPER/package/scripts/params.py         |    1 -
 .../ZOOKEEPER/package/scripts/zookeeper.py      |    4 +-
 .../package/scripts/zookeeper_service.py        |    4 +-
 .../4.2.5/hooks/after-INSTALL/scripts/params.py |    3 -
 .../services/SOLR/package/scripts/params.py     |    2 -
 .../services/HBASE/package/scripts/params.py    |    3 +-
 .../HIVE/package/scripts/hive_metastore.py      |    6 +-
 .../HIVE/package/scripts/hive_server_upgrade.py |   16 +-
 .../HIVE/package/scripts/params_linux.py        |   11 +-
 .../KAFKA/package/scripts/kafka_broker.py       |   15 +-
 .../services/KAFKA/package/scripts/params.py    |    9 +-
 .../services/KAFKA/package/scripts/upgrade.py   |    6 -
 .../4.2/services/KNOX/package/scripts/params.py |    3 +-
 .../services/RANGER/package/scripts/params.py   |    1 -
 .../4.2/services/SOLR/package/scripts/params.py |    3 +-
 .../SOLR/package/scripts/solr_upgrade.py        |    4 +-
 .../services/SPARK/package/scripts/params.py    |    4 +-
 .../ZOOKEEPER/package/scripts/params.py         |    1 -
 .../ZOOKEEPER/package/scripts/zookeeper.py      |    4 +-
 .../package/scripts/zookeeper_service.py        |    4 +-
 .../ambari/server/StateRecoveryManagerTest.java |   10 +-
 .../ambari/server/utils/StageUtilsTest.java     |    6 +
 .../test/python/TestComponentVersionMapping.py  |   84 ++
 .../src/test/python/TestStackFeature.py         |   88 +-
 .../src/test/python/TestUpgradeSummary.py       |   87 ++
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   17 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |   12 +-
 .../test/python/stacks/2.0.6/configs/nn_eu.json |   21 +-
 .../stacks/2.0.6/configs/nn_eu_standby.json     |   21 +-
 .../python/stacks/2.0.6/configs/secured.json    | 1210 +++++++++---------
 .../hooks/after-INSTALL/test_after_install.py   |    2 -
 .../stacks/2.1/FALCON/test_falcon_client.py     |    5 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |   15 +-
 .../stacks/2.1/FALCON/test_service_check.py     |    5 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   20 +-
 .../2.1/configs/hive-metastore-upgrade.json     |   21 +-
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |   36 +-
 .../python/stacks/2.2/configs/knox_upgrade.json |   21 +-
 .../stacks/2.3/configs/storm_default.json       |    6 +-
 .../2.3/configs/storm_default_secure.json       |    6 +-
 .../2.5/configs/ranger-admin-default.json       |    1 -
 .../2.5/configs/ranger-admin-secured.json       |    1 -
 .../stacks/2.5/configs/ranger-kms-default.json  |    1 -
 .../stacks/2.5/configs/ranger-kms-secured.json  |    1 -
 .../2.6/configs/ranger-admin-default.json       |    1 -
 .../2.6/configs/ranger-admin-secured.json       |    1 -
 .../src/test/python/stacks/utils/RMFTestCase.py |   66 +-
 .../2.0/hooks/after-INSTALL/scripts/hook.py     |    2 +-
 .../2.0/hooks/after-INSTALL/scripts/params.py   |    3 -
 .../scripts/shared_initialization.py            |   26 +-
 .../HIVE/package/scripts/hive_metastore.py      |    7 +-
 .../HIVE/package/scripts/hive_server_upgrade.py |   21 +-
 .../HIVE/package/scripts/hive_service.py        |    2 +-
 .../HIVE/package/scripts/params_linux.py        |   11 +-
 85 files changed, 1507 insertions(+), 986 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
new file mode 100644
index 0000000..a1fd6b2
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+
+def get_component_repository_version(service_name, component_name = None):
+  """
+  Gets the version associated with the specified component from the structure in the command.
+  Every command should contain a mapping of service/component to the desired repository it's set
+  to.
+
+  :service_name: the name of the service
+  :component_name: the name of the component
+  """
+  versions = _get_component_repositories()
+  if versions is None:
+    return None
+
+  if service_name not in versions:
+    return None
+
+  component_versions = versions[service_name]
+  if len(component_versions) == 0:
+    return None
+
+  if component_name is None:
+    for component in component_versions:
+      return component_versions[component]
+
+  if not component_name in component_versions:
+    return None
+
+  return component_versions[component_name]
+
+
+def _get_component_repositories():
+  """
+  Gets an initialized dictionary from the value in componentVersionMap. This structure is
+  sent on every command by Ambari and should contain each service & component's desired repository.
+  :return:
+  """
+  config = Script.get_config()
+  if "componentVersionMap" not in config or config["componentVersionMap"] is "":
+    return None
+
+  return config["componentVersionMap"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 31a9be4..e0dd96d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -21,7 +21,6 @@ limitations under the License.
 __all__ = ["copy_to_hdfs", "get_sysprep_skip_copy_tarballs_hdfs"]
 
 import os
-import uuid
 import tempfile
 import re
 
@@ -30,7 +29,7 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.default import default
 from resource_management.core import shell
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import stack_tools
+from resource_management.libraries.functions import stack_tools, stack_features, stack_select
 
 STACK_NAME_PATTERN = "{{ stack_name }}"
 STACK_ROOT_PATTERN = "{{ stack_root }}"
@@ -129,32 +128,23 @@ def get_current_version(use_upgrading_version_during_upgrade=True):
   :param use_upgrading_version_during_upgrade: True, except when the RU/EU hasn't started yet.
   :return: Version, or False if an error occurred.
   """
-  upgrade_direction = default("/commandParams/upgrade_direction", None)
-  is_stack_upgrade = upgrade_direction is not None
-  current_version = default("/hostLevelParams/current_version", None)
-  Logger.info("Default version is {0}".format(current_version))
-  if is_stack_upgrade:
-    if use_upgrading_version_during_upgrade:
-      # This is the version going to. In the case of a downgrade, it is the lower version.
-      current_version = default("/commandParams/version", None)
-      Logger.info("Because this is a Stack Upgrade, will use version {0}".format(current_version))
-    else:
-      Logger.info("This is a Stack Upgrade, but keep the version unchanged.")
-  else:
-    if current_version is None:
-      # During normal operation, the first installation of services won't yet know about the version, so must rely
-      # on <stack-selector> to get it.
-      stack_version = _get_single_version_from_stack_select()
-      if stack_version:
-        Logger.info("Will use stack version {0}".format(stack_version))
-        current_version = stack_version
+  # get the version for this command
+  version = stack_features.get_stack_feature_version(Script.get_config())
 
+  # if there is no upgrade, then use the command's version
+  if not Script.in_stack_upgrade() or use_upgrading_version_during_upgrade:
+    Logger.info("Tarball version was calcuated as {0}. Use Command Version: {1}".format(
+      version, use_upgrading_version_during_upgrade))
+
+    return version
+
+  # we're in an upgrade and we need to use an older version
+  current_version = stack_select.get_role_component_current_stack_version()
   if current_version is None:
-    message_suffix = "during stack %s" % str(upgrade_direction) if is_stack_upgrade else ""
-    Logger.warning("Cannot copy tarball because unable to determine current version {0}.".format(message_suffix))
+    Logger.warning("Unable to determine the current version of the component for this command; unable to copy the tarball")
     return False
 
-  return current_version
+  return current_version;
 
 
 def _get_single_version_from_stack_select():

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
index 3fcce82..6e1caa4 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_features.py
@@ -109,10 +109,6 @@ def get_stack_feature_version(config):
   command_version = default("/commandParams/version", None)
   command_stack = default("/commandParams/target_stack", None)
 
-  # something like 2.4.0.0-1234
-  # (or None if this is a cluster install and it hasn't been calculated yet)
-  current_cluster_version = default("/hostLevelParams/current_version", None)
-
   # UPGRADE or DOWNGRADE (or None)
   upgrade_direction = default("/commandParams/upgrade_direction", None)
 
@@ -122,8 +118,8 @@ def get_stack_feature_version(config):
   # if this is not an upgrade, then we take the simple path
   if upgrade_direction is None:
     Logger.info(
-      "Stack Feature Version Info: Cluster Stack={0}, Cluster Current Version={1}, Command Stack={2}, Command Version={3} -> {4}".format(
-        stack_version, current_cluster_version, command_stack, command_version, version_for_stack_feature_checks))
+      "Stack Feature Version Info: Cluster Stack={0}, Command Stack={1}, Command Version={2} -> {3}".format(
+        stack_version, command_stack, command_version, version_for_stack_feature_checks))
 
     return version_for_stack_feature_checks
 
@@ -132,33 +128,24 @@ def get_stack_feature_version(config):
   is_stop_command = _is_stop_command(config)
   if not is_stop_command:
     Logger.info(
-      "Stack Feature Version Info: Cluster Stack={0}, Cluster Current Version={1}, Command Stack={2}, Command Version={3}, Upgrade Direction={4} -> {5}".format(
-        stack_version, current_cluster_version, command_stack, command_version, upgrade_direction,
+      "Stack Feature Version Info: Cluster Stack={0}, Command Stack={1}, Command Version={2}, Upgrade Direction={3} -> {4}".format(
+        stack_version, command_stack, command_version, upgrade_direction,
         version_for_stack_feature_checks))
 
     return version_for_stack_feature_checks
 
-  # something like 2.5.0.0-5678 (or None)
-  downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
+  is_downgrade = upgrade_direction.lower() == Direction.DOWNGRADE.lower()
   # guaranteed to have a STOP command now during an UPGRADE/DOWNGRADE, check direction
-  if upgrade_direction.lower() == Direction.DOWNGRADE.lower():
-    if downgrade_from_version is None:
-      Logger.warning(
-        "Unable to determine the version being downgraded when stopping services, using {0}".format(
-          version_for_stack_feature_checks))
-    else:
-      version_for_stack_feature_checks = downgrade_from_version
+  if is_downgrade:
+    from resource_management.libraries.functions import upgrade_summary
+    version_for_stack_feature_checks = upgrade_summary.get_source_version(default_version = version_for_stack_feature_checks)
   else:
     # UPGRADE
-    if current_cluster_version is not None:
-      version_for_stack_feature_checks = current_cluster_version
-    else:
       version_for_stack_feature_checks = command_version if command_version is not None else stack_version
 
   Logger.info(
-    "Stack Feature Version Info: Cluster Stack={0}, Cluster Current Version={1}, Command Stack={2}, Command Version={3}, Upgrade Direction={4}, stop_command={5} -> {6}".format(
-      stack_version, current_cluster_version, command_stack, command_version, upgrade_direction,
+    "Stack Feature Version Info: Cluster Stack={0}, Command Stack={1}, Command Version={2}, Upgrade Direction={3}, stop_command={4} -> {5}".format(
+      stack_version, command_stack, command_version, upgrade_direction,
       is_stop_command, version_for_stack_feature_checks))
 
   return version_for_stack_feature_checks

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py b/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
new file mode 100644
index 0000000..f2f5e42
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/upgrade_summary.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from collections import namedtuple
+from resource_management.libraries.script.script import Script
+
+UpgradeSummary = namedtuple("UpgradeSummary", "type direction orchestration is_revert services")
+UpgradeServiceSummary = namedtuple("UpgradeServiceSummary", "service_name source_stack source_version target_stack target_version")
+
+def get_source_version(service_name = None, default_version=None):
+  """
+  Gets the source (from) version of a service participating in an upgrade. If there is no
+  upgrade or the specific service is not participating, this will return None.
+  :param service_name:  the service name to check for, or None to extract it from the command
+  :param default_version: if the version of the service can't be calculated, this optional
+  default value is returned
+  :return:  the version that the service is upgrading from or None if there is no upgrade or
+  the service is not included in the upgrade.
+  """
+  service_summary = _get_service_summary(service_name)
+  if service_summary is None:
+    return default_version
+
+  return service_summary.source_version
+
+
+def get_target_version(service_name = None, default_version=None):
+  """
+  Gets the target (to) version of a service participating in an upgrade. If there is no
+  upgrade or the specific service is not participating, this will return None.
+  :param service_name:  the service name to check for, or None to extract it from the command
+  :param default_version: if the version of the service can't be calculated, this optional
+  default value is returned
+  :return:  the version that the service is upgrading to or None if there is no upgrade or
+  the service is not included in the upgrade.
+  """
+  service_summary = _get_service_summary(service_name)
+  if service_summary is None:
+    return default_version
+
+  return service_summary.target_version
+
+
+
+def get_upgrade_summary():
+  """
+  Gets a summary of an upgrade in progress, including type, direction, orchestration and from/to
+  repository versions.
+  """
+  config = Script.get_config()
+  if "upgradeSummary" not in config or not config["upgradeSummary"]:
+    return None
+
+  upgrade_summary = config["upgradeSummary"]
+  service_summary_dict = {}
+
+  service_summary = upgrade_summary["services"]
+  for service_name, service_summary_json in service_summary.iteritems():
+    service_summary =  UpgradeServiceSummary(service_name = service_name,
+      source_stack = service_summary_json["sourceStackId"],
+      source_version = service_summary_json["sourceVersion"],
+      target_stack = service_summary_json["targetStackId"],
+      target_version = service_summary_json["targetVersion"])
+
+    service_summary_dict[service_name] = service_summary
+
+  return UpgradeSummary(type=upgrade_summary["type"], direction=upgrade_summary["direction"],
+    orchestration=upgrade_summary["orchestration"], is_revert = upgrade_summary["isRevert"],
+    services = service_summary_dict)
+
+def _get_service_summary(service_name):
+  """
+  Gets the service summary for the upgrade/downgrade for the given service, or None if
+  the service isn't participating.
+  :param service_name:  the service name
+  :return:  the service summary or None
+  """
+  upgrade_summary = get_upgrade_summary()
+  if upgrade_summary is None:
+    return None
+
+  if service_name is None:
+    config = Script.get_config()
+    service_name = config['serviceName']
+
+  service_summary = upgrade_summary.services
+  if service_name not in service_summary:
+    return None
+
+  return service_summary[service_name]

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
index e4b2540..7b20259 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
@@ -34,6 +34,7 @@ import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ConfigHelper;
@@ -43,6 +44,9 @@ import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.UpgradeContext;
+import org.apache.ambari.server.state.UpgradeContext.UpgradeSummary;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
@@ -71,6 +75,9 @@ public class ExecutionCommandWrapper {
   @Inject
   private Gson gson;
 
+  @Inject
+  private UpgradeContextFactory upgradeContextFactory;
+
   /**
    * Used for injecting hooks and common-services into the command.
    */
@@ -230,8 +237,6 @@ public class ExecutionCommandWrapper {
           if (!commandParams.containsKey(KeyNames.VERSION)
               && executionCommand.getRoleCommand() != RoleCommand.INSTALL) {
             commandParams.put(KeyNames.VERSION, repositoryVersion.getVersion());
-            executionCommand.getHostLevelParams().put(KeyNames.CURRENT_VERSION, repositoryVersion.getVersion());
-
           }
 
           StackId stackId = repositoryVersion.getStackId();
@@ -263,6 +268,15 @@ public class ExecutionCommandWrapper {
       // we are "loading-late": components that have not yet upgraded in an EU will have the correct versions.
       executionCommand.setComponentVersions(cluster);
 
+      // provide some basic information about a cluster upgrade if there is one
+      // in progress
+      UpgradeEntity upgrade = cluster.getUpgradeInProgress();
+      if (null != upgrade) {
+        UpgradeContext upgradeContext = upgradeContextFactory.create(cluster, upgrade);
+        UpgradeSummary upgradeSummary = upgradeContext.getUpgradeSummary();
+        executionCommand.setUpgradeSummary(upgradeSummary);
+      }
+
     } catch (ClusterNotFoundException cnfe) {
       // it's possible that there are commands without clusters; in such cases,
       // just return the de-serialized command and don't try to read configs

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 9b6b2f5..0906044 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -31,6 +31,7 @@ import org.apache.ambari.server.RoleCommand;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.UpgradeContext.UpgradeSummary;
 import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -156,6 +157,9 @@ public class ExecutionCommand extends AgentCommand {
   @SerializedName("componentVersionMap")
   private Map<String, Map<String, String>> componentVersionMap = new HashMap<>();
 
+  @SerializedName("upgradeSummary")
+  private UpgradeSummary upgradeSummary;
+
   public void setConfigurationCredentials(Map<String, Map<String, String>> configurationCredentials) {
     this.configurationCredentials = configurationCredentials;
   }
@@ -497,23 +501,13 @@ public class ExecutionCommand extends AgentCommand {
     /**
      * The version of the component to send down with the command. Normally,
      * this is simply the repository version of the component. However, during
-     * ugprades, this value may change depending on the progress of the upgrade
+     * upgrades, this value may change depending on the progress of the upgrade
      * and the type/direction.
      */
     @Experimental(
         feature = ExperimentalFeature.PATCH_UPGRADES,
         comment = "Change this to reflect the component version")
     String VERSION = "version";
-
-    /**
-     * Put on hostLevelParams to indicate the version that the component should
-     * be.
-     */
-    @Deprecated
-    @Experimental(
-        feature = ExperimentalFeature.PATCH_UPGRADES,
-        comment = "This should be replaced by a map of all service component versions")
-    String CURRENT_VERSION = "current_version";
   }
 
   /**
@@ -549,4 +543,23 @@ public class ExecutionCommand extends AgentCommand {
 
     this.componentVersionMap = componentVersionMap;
   }
+
+  /**
+   * Sets the upgrade summary if there is an active upgrade in the cluster.
+   *
+   * @param upgradeSummary
+   *          the upgrade or {@code null} for none.
+   */
+  public void setUpgradeSummary(UpgradeSummary upgradeSummary) {
+    this.upgradeSummary = upgradeSummary;
+  }
+
+  /**
+   * Gets the upgrade summary if there is an active upgrade in the cluster.
+   *
+   * @return the upgrade or {@code null} for none.
+   */
+  public UpgradeSummary getUpgradeSummary() {
+    return upgradeSummary;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 7e26fd7..60f5cce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -2541,7 +2541,6 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     execCmd.setCommandParams(commandParams);
 
     execCmd.setRepositoryFile(customCommandExecutionHelper.getCommandRepository(cluster, component, host));
-    hostParams.put(KeyNames.CURRENT_VERSION, repoVersion.getVersion());
 
     if ((execCmd != null) && (execCmd.getConfigurationTags().containsKey("cluster-env"))) {
       LOG.debug("AmbariManagementControllerImpl.createHostAction: created ExecutionCommand for host {}, role {}, roleCommand {}, and command ID {}, with cluster-env tags {}",
@@ -3632,7 +3631,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
               clusterServiceMasterForDecommissionMap.put(componentHost.getClusterName(), serviceMasterMap);
 
               Map<String, Set<String>> masterSlaveHostsMap = new HashMap<>();
-              masterSlaveHostsMap.put(masterComponentName, new HashSet<String>(Collections.singletonList(componentHost.getHostName())));
+              masterSlaveHostsMap.put(masterComponentName, new HashSet<>(Collections.singletonList(componentHost.getHostName())));
               clusterMasterSlaveHostsMap.put(componentHost.getClusterName(), masterSlaveHostsMap);
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index e6a420e..3d7f533 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -84,6 +84,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.base.Objects;
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
+import com.google.gson.annotations.SerializedName;
 import com.google.inject.Inject;
 import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
@@ -179,8 +180,9 @@ public class UpgradeContext {
   private boolean m_autoSkipManualVerification = false;
 
   /**
-   * A set of services which are included in this upgrade. If this is empty,
-   * then all cluster services are included.
+   * A set of services which are included in this upgrade. This will never be
+   * empty - if all services of a cluster are included, then the cluster's
+   * current list of services is populated.
    */
   private final Set<String> m_services = new HashSet<>();
 
@@ -856,6 +858,44 @@ public class UpgradeContext {
   }
 
   /**
+   * Gets a POJO of the upgrade suitable to serialize.
+   *
+   * @return the upgrade summary as a POJO.
+   */
+  public UpgradeSummary getUpgradeSummary() {
+    UpgradeSummary summary = new UpgradeSummary();
+    summary.direction = m_direction;
+    summary.type = m_type;
+    summary.orchestration = m_orchestration;
+    summary.isRevert = m_isRevert;
+
+    summary.services = new HashMap<>();
+
+    for (String serviceName : m_services) {
+      RepositoryVersionEntity sourceRepositoryVersion = m_sourceRepositoryMap.get(serviceName);
+      RepositoryVersionEntity targetRepositoryVersion = m_targetRepositoryMap.get(serviceName);
+      if (null == sourceRepositoryVersion || null == targetRepositoryVersion) {
+        LOG.warn("Unable to get the source/target repositories for {} for the upgrade summary",
+            serviceName);
+        continue;
+      }
+
+      UpgradeServiceSummary serviceSummary = new UpgradeServiceSummary();
+      serviceSummary.sourceRepositoryId = sourceRepositoryVersion.getId();
+      serviceSummary.sourceStackId = sourceRepositoryVersion.getStackId().getStackId();
+      serviceSummary.sourceVersion = sourceRepositoryVersion.getVersion();
+
+      serviceSummary.targetRepositoryId = targetRepositoryVersion.getId();
+      serviceSummary.targetStackId = targetRepositoryVersion.getStackId().getStackId();
+      serviceSummary.targetVersion = targetRepositoryVersion.getVersion();
+
+      summary.services.put(serviceName, serviceSummary);
+    }
+
+    return summary;
+  }
+
+  /**
    * Gets the set of services which will participate in the upgrade. The
    * services available in the repository are comapred against those installed
    * in the cluster to arrive at the final subset.
@@ -1243,4 +1283,49 @@ public class UpgradeContext {
       return hostOrderItems;
     }
   }
+
+  /**
+   * The {@link UpgradeSummary} class is a simple POJO used to serialize the
+   * infomration about and upgrade.
+   */
+  public static class UpgradeSummary {
+    @SerializedName("direction")
+    public Direction direction;
+
+    @SerializedName("type")
+    public UpgradeType type;
+
+    @SerializedName("orchestration")
+    public RepositoryType orchestration;
+
+    @SerializedName("isRevert")
+    public boolean isRevert = false;
+
+    @SerializedName("services")
+    public Map<String, UpgradeServiceSummary> services;
+  }
+
+  /**
+   * The {@link UpgradeServiceSummary} class is used as a way to encapsulate the
+   * service source and target versions during an upgrade.
+   */
+  public static class UpgradeServiceSummary {
+    @SerializedName("sourceRepositoryId")
+    public long sourceRepositoryId;
+
+    @SerializedName("targetRepositoryId")
+    public long targetRepositoryId;
+
+    @SerializedName("sourceStackId")
+    public String sourceStackId;
+
+    @SerializedName("targetStackId")
+    public String targetStackId;
+
+    @SerializedName("sourceVersion")
+    public String sourceVersion;
+
+    @SerializedName("targetVersion")
+    public String targetVersion;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index c9c66ac..933515b 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -125,7 +125,7 @@ def falcon(type, action = None, upgrade_type=None):
     # Generate atlas-application.properties.xml file
     if params.falcon_atlas_support and params.enable_atlas_hook:
       # If Atlas is added later than Falcon, this package will be absent.
-      if check_stack_feature(StackFeature.ATLAS_INSTALL_HOOK_PACKAGE_SUPPORT,params.current_version_formatted):
+      if check_stack_feature(StackFeature.ATLAS_INSTALL_HOOK_PACKAGE_SUPPORT,params.version):
         install_atlas_hook_packages(params.atlas_plugin_package, params.atlas_ubuntu_plugin_package, params.host_sys_prepped,
                                     params.agent_stack_retry_on_unavailability, params.agent_stack_retry_count)
 
@@ -134,7 +134,7 @@ def falcon(type, action = None, upgrade_type=None):
 
       # Falcon 0.10 uses FALCON_EXTRA_CLASS_PATH.
       # Setup symlinks for older versions.
-      if params.current_version_formatted and check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, params.current_version_formatted):
+      if check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, params.version):
         setup_atlas_jar_symlinks("falcon", params.falcon_webinf_lib)
 
   if type == 'server':

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 3773918..b8e709a 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -27,10 +27,9 @@ from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 import os
 from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
 
 config = Script.get_config()
 stack_root = status_params.stack_root
@@ -39,17 +38,13 @@ stack_name = status_params.stack_name
 agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability']
 agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int)
 
-# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
-version = default("/commandParams/version", None)
+version = stack_features.get_stack_feature_version(config)
 
 stack_version_unformatted = status_params.stack_version_unformatted
 stack_version_formatted = status_params.stack_version_formatted
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 jdk_location = config['hostLevelParams']['jdk_location']
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-current_version_formatted = format_stack_version(current_version)
 
 etc_prefix_dir = "/etc/falcon"
 
@@ -57,7 +52,7 @@ etc_prefix_dir = "/etc/falcon"
 hadoop_home_dir = stack_select.get_hadoop_dir("home")
 hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
 
-if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
+if check_stack_feature(StackFeature.ROLLING_UPGRADE, version):
   # if this is a server action, then use the server binaries; smoke tests
   # use the client binaries
   server_role_dir_mapping = { 'FALCON_SERVER' : 'falcon-server',
@@ -135,7 +130,7 @@ kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executab
 supports_hive_dr = config['configurations']['falcon-env']['supports_hive_dr']
 # HDP 2.4 still supported the /usr/$STACK/$VERSION/falcon/data-mirroring folder, which had to be copied to HDFS
 # In HDP 2.5, an empty data-mirroring folder has to be created, and the extensions folder has to be uploaded to HDFS.
-supports_data_mirroring = supports_hive_dr and (stack_version_formatted and not check_stack_feature(StackFeature.FALCON_EXTENSIONS, stack_version_formatted))
+supports_data_mirroring = supports_hive_dr and not check_stack_feature(StackFeature.FALCON_EXTENSIONS, version)
 
 local_data_mirroring_dir = format('{stack_root}/current/falcon-server/data-mirroring')
 dfs_data_mirroring_dir = "/apps/data-mirroring"
@@ -156,22 +151,22 @@ falcon_atlas_support = False
 atlas_hook_cp = ""
 if enable_atlas_hook:
 
-  # stack_version doesn't contain a minor number of the stack (only first two numbers: 2.3). Get it from current_version_formatted
-  falcon_atlas_support = current_version_formatted and check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, current_version_formatted) \
-      or check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT, stack_version_formatted)
+  # stack_version doesn't contain a minor number of the stack (only first two numbers: 2.3). Get it from the command version
+  falcon_atlas_support = check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, version) \
+      or check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT, version)
 
-  if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, stack_version_formatted):
+  if check_stack_feature(StackFeature.ATLAS_CONF_DIR_IN_PATH, version):
     atlas_conf_dir = format('{stack_root}/current/atlas-server/conf')
     atlas_home_dir = format('{stack_root}/current/atlas-server')
     atlas_hook_cp = atlas_conf_dir + os.pathsep + os.path.join(atlas_home_dir, "hook", "falcon", "*") + os.pathsep
-  elif check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, stack_version_formatted):
+  elif check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, version):
     atlas_hook_cp = format('{stack_root}/current/atlas-client/hook/falcon/*') + os.pathsep
 
 atlas_application_class_addition = ""
 if falcon_atlas_support:
   # Some stack versions do not support Atlas Falcon hook. See stack_features.json
   # Packaging was different in older versions.
-  if current_version_formatted and check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, current_version_formatted):
+  if check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, version):
     atlas_application_class_addition = ",\\\norg.apache.falcon.atlas.service.AtlasService"
     atlas_plugin_package = "atlas-metadata*-falcon-plugin"
     atlas_ubuntu_plugin_package = "atlas-metadata.*-falcon-plugin"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index f94248b..b75ff13 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -180,10 +180,8 @@ class HiveMetastoreDefault(HiveMetastore):
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
-    if params.current_version is not None:
-      current_version = format_stack_version(params.current_version)
-      if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, current_version)):
-        schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
+    if not(check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version_for_stack_feature_checks)):
+      schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
     env_dict = {
       'HIVE_CONF_DIR': schematool_hive_server_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 17db489..9121923 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -70,10 +70,7 @@ def deregister():
   # By now <stack-selector-tool> has been called to set 'current' to target-stack
   if "downgrade" == params.upgrade_direction:
     # hive_bin
-    downgrade_version = params.current_version
-    if params.downgrade_from_version:
-      downgrade_version = params.downgrade_from_version
-    hive_execute_path = _get_hive_execute_path(downgrade_version)
+    hive_execute_path = _get_hive_execute_path(params.version_for_stack_feature_checks)
 
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 --deregister ' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
@@ -114,12 +111,7 @@ def _get_current_hiveserver_version():
 
   try:
     # When downgrading the source version should be the version we are downgrading from
-    if "downgrade" == params.upgrade_direction:
-      if not params.downgrade_from_version:
-        raise Fail('The version from which we are downgrading from should be provided in \'downgrade_from_version\'')
-      source_version = params.downgrade_from_version
-    else:
-      source_version = params.current_version
+    source_version = params.version_for_stack_feature_checks
     hive_execute_path = _get_hive_execute_path(source_version)
     version_hive_bin = params.hive_bin
     formatted_source_version = format_stack_version(source_version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
index 22b4061..05b395d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -64,7 +64,7 @@ def hive_service(name, action='start', upgrade_type=None):
     cmd = format("{start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.err {pid_file} {hive_server_conf_dir} {hive_log_dir}")
 
 
-    if params.security_enabled and params.current_version and check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, params.current_version):
+    if params.security_enabled and check_stack_feature(StackFeature.HIVE_SERVER2_KERBERIZED_ENV, params.version_for_stack_feature_checks):
       hive_kinit_cmd = format("{kinit_path_local} -kt {hive_server2_keytab} {hive_principal}; ")
       Execute(hive_kinit_cmd, user=params.hive_user)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index d46b6ce..886156a 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -39,6 +39,7 @@ from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries import functions
 from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
@@ -88,13 +89,6 @@ major_stack_version = get_major_version(stack_version_formatted_major)
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 version = default("/commandParams/version", None)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
-# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
 # get the correct version to use for checking stack features
 version_for_stack_feature_checks = get_stack_feature_version(config)
 
@@ -317,7 +311,8 @@ driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
 # normally, the JDBC driver would be referenced by <stack-root>/current/.../foo.jar
 # but in RU if <stack-selector-tool> is called and the restart fails, then this means that current pointer
 # is now pointing to the upgraded version location; that's bad for the cp command
-source_jdbc_file = format("{stack_root}/{current_version}/hive/lib/{jdbc_jar_name}")
+version_for_source_jdbc_file = upgrade_summary.get_source_version(default_version = version_for_stack_feature_checks)
+source_jdbc_file = format("{stack_root}/{version_for_source_jdbc_file}/hive/lib/{jdbc_jar_name}")
 
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
index e6d7339..101cd40 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka.py
@@ -52,8 +52,8 @@ def kafka(upgrade_type=None):
     # similarly we need preserve port as well during the upgrade
 
     if upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE and \
-      check_stack_feature(StackFeature.CREATE_KAFKA_BROKER_ID, params.current_version) and \
-      check_stack_feature(StackFeature.KAFKA_LISTENERS, params.version):
+      check_stack_feature(StackFeature.CREATE_KAFKA_BROKER_ID, params.version_for_stack_feature_checks) and \
+      check_stack_feature(StackFeature.KAFKA_LISTENERS, params.version_for_stack_feature_checks):
       if len(params.kafka_hosts) > 0 and params.hostname in params.kafka_hosts:
         brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
         kafka_server_config['broker.id'] = brokerid

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
index 266bb42..769e6ec 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
@@ -47,19 +47,16 @@ class KafkaBroker(Script):
     import params
     env.set_params(params)
 
+    # grab the current version of the component
+    pre_upgrade_version = stack_select.get_role_component_current_stack_version()
+
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       stack_select.select_packages(params.version)
 
     # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary.
-    if params.current_version and params.version and params.upgrade_direction:
-      src_version = dst_version = None
-      if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_stack_version(params.current_version)
-        dst_version = format_stack_version(params.version)
-      else:
-        # These represent the original values during the UPGRADE direction
-        src_version = format_stack_version(params.version)
-        dst_version = format_stack_version(params.downgrade_from_version)
+    if pre_upgrade_version and params.version_for_stack_feature_checks and params.upgrade_direction:
+      src_version = format_stack_version(pre_upgrade_version)
+      dst_version = format_stack_version(params.version_for_stack_feature_checks)
 
       if not check_stack_feature(StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, src_version) and check_stack_feature(StackFeature.KAFKA_ACL_MIGRATION_SUPPORT, dst_version):
         # Calling the acl migration script requires the configs to be present.

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 8aa4fc2..349ff5c 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -46,10 +46,6 @@ retryAble = default("/commandParams/command_retry_enabled", False)
 # Version being upgraded/downgraded to
 version = default("/commandParams/version", None)
 
-# Version that is CURRENT.
-current_version = default("/hostLevelParams/current_version", None)
-
-
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
@@ -61,10 +57,6 @@ stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBERO
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
 stack_supports_core_site_for_ranger_plugin = check_stack_feature(StackFeature.CORE_SITE_FOR_RANGER_PLUGINS_SUPPORT, version_for_stack_feature_checks)
 
-# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
 hostname = config['hostname']
 
 # default kafka parameters

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
index b6e4046..e79a8ad 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/upgrade.py
@@ -43,9 +43,6 @@ def run_migration(env, upgrade_type):
   if params.upgrade_direction is None:
     raise Fail('Parameter "upgrade_direction" is missing.')
 
-  if params.upgrade_direction == Direction.DOWNGRADE and params.downgrade_from_version is None:
-    raise Fail('Parameter "downgrade_from_version" is missing.')
-
   if not params.security_enabled:
     Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
     return
@@ -53,13 +50,11 @@ def run_migration(env, upgrade_type):
   Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
 
   # If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
-  kafka_acls_script = None
+  kafka_acls_script = format("{stack_root}/{version_for_stack_feature_checks}/kafka/bin/kafka-acls.sh")
   command_suffix = ""
   if params.upgrade_direction == Direction.UPGRADE:
-    kafka_acls_script = format("{stack_root}/{version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--upgradeAcls"
   elif params.upgrade_direction == Direction.DOWNGRADE:
-    kafka_acls_script = format("{stack_root}/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--downgradeAcls"
 
   if kafka_acls_script is not None:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 9b0bbfc..c9b229f 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -35,6 +35,7 @@ from status_params import *
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
@@ -66,7 +67,7 @@ stack_supports_core_site_for_ranger_plugin = check_stack_feature(StackFeature.CO
 
 # This is the version whose state is CURRENT. During an RU, this is the source version.
 # DO NOT format it since we need the build number too.
-upgrade_from_version = default("/hostLevelParams/current_version", None)
+upgrade_from_version = upgrade_summary.get_source_version()
 
 source_stack = default("/commandParams/source_stack", None)
 source_stack_name = get_stack_name(source_stack)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index aa5bc30..64e1cc1 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -424,8 +424,8 @@ def copy_atlas_hive_hook_to_dfs_share_lib(upgrade_type=None, upgrade_direction=N
                  "and performing a Downgrade.")
     return
 
-  current_version = get_current_version()
-  atlas_hive_hook_dir = format("{stack_root}/{current_version}/atlas/hook/hive/")
+  effective_version = get_current_version()
+  atlas_hive_hook_dir = format("{stack_root}/{effective_version}/atlas/hook/hive/")
   if not os.path.exists(atlas_hive_hook_dir):
     Logger.error(format("ERROR. Atlas is installed in cluster but this Oozie server doesn't "
                         "contain directory {atlas_hive_hook_dir}"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 65ee1a7..bd45558 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -76,7 +76,6 @@ stack_supports_ranger_tagsync_ssl_xml_support = check_stack_feature(StackFeature
 stack_supports_ranger_solr_configs = check_stack_feature(StackFeature.RANGER_SOLR_CONFIG_SUPPORT, version_for_stack_feature_checks)
 stack_supports_secure_ssl_password = check_stack_feature(StackFeature.SECURE_RANGER_SSL_PASSWORD, version_for_stack_feature_checks)
 
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
 ranger_conf    = '/etc/ranger/admin/conf'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py
index b8e8f78..34c6b9a 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_linux.py
@@ -38,7 +38,6 @@ stack_version_formatted = status_params.stack_version_formatted
 stack_root = status_params.stack_root
 
 stack_name = status_params.stack_name
-current_version = default("/hostLevelParams/current_version", None)
 component_directory = status_params.component_directory
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
index 88bb2e0..7728ac6 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_metastore.py
@@ -158,7 +158,7 @@ class HiveMetastoreDefault(HiveMetastore):
     # ensure that the JDBC drive is present for the schema tool; if it's not
     # present, then download it first
     if params.hive_jdbc_driver in params.hive_jdbc_drivers_list and params.hive_use_existing_db:
-      source = format("/usr/iop/{current_version}/hive/lib/{jdbc_jar_name}")
+      source = params.source_jdbc_file
       target_directory = format("/usr/iop/{version}/hive/lib")
       if not os.path.exists(source):
         # download it
@@ -178,9 +178,9 @@ class HiveMetastoreDefault(HiveMetastore):
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
-    if params.current_version is not None:
-      current_version = format_stack_version(params.current_version)
-      if compare_versions(current_version, "4.1.0.0") < 0:
+    if params.version_for_stack_feature_checks is not None:
+      version_for_stack_feature_checks = format_stack_version(params.version_for_stack_feature_checks)
+      if compare_versions(version_for_stack_feature_checks, "4.1.0.0") < 0:
         schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
     env_dict = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server_upgrade.py
index 318fcca..0105a7d 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/hive_server_upgrade.py
@@ -142,10 +142,7 @@ def post_upgrade_deregister():
   # By now hdp-select has been called to set 'current' to target-stack
   if "downgrade" == params.upgrade_direction:
     # hive_bin
-    downgrade_version = params.current_version
-    if params.downgrade_from_version:
-      downgrade_version = params.downgrade_from_version
-    hive_execute_path = _get_hive_execute_path(downgrade_version)
+    hive_execute_path = _get_hive_execute_path(params.version_for_stack_feature_checks)
 
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 --deregister ' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/params.py
index e5fe128..e9d62b1 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/HIVE/package/scripts/params.py
@@ -34,6 +34,8 @@ from resource_management.libraries.functions.copy_tarball import STACK_VERSION_P
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import upgrade_summary
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
 from resource_management.libraries import functions
 
@@ -56,15 +58,11 @@ stack_is_21 = False
 # It cannot be used during the initial Cluser Install because the version is not yet known.
 version = default("/commandParams/version", None)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
 # Upgrade direction
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
-# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+# get the correct version to use for checking stack features
+version_for_stack_feature_checks = get_stack_feature_version(config)
 
 component_directory = status_params.component_directory
 hadoop_bin_dir = "/usr/bin"
@@ -160,6 +158,8 @@ execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + hadoop_
 hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
 hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
 
+version_for_source_jdbc_file = upgrade_summary.get_source_version(default_version = version_for_stack_feature_checks)
+
 hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
 hive_metastore_db_type = config['configurations']['hive-env']['hive_database_type']
 #HACK Temporarily use dbType=azuredb while invoking schematool

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
index cb5954e..b8d44f0 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/kafka_broker.py
@@ -52,19 +52,16 @@ class KafkaBroker(Script):
     import params
     env.set_params(params)
 
+    # grab the current version of the component
+    pre_upgrade_version = stack_select.get_role_component_current_stack_version()
+
     if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
       stack_select.select_packages(params.version)
 
     # This is extremely important since it should only be called if crossing the IOP 4.2 boundary.
-    if params.current_version and params.version and params.upgrade_direction:
-      src_version = dst_version = None
-      if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_stack_version(params.current_version)
-        dst_version = format_stack_version(params.version)
-      else:
-        # These represent the original values during the UPGRADE direction
-        src_version = format_stack_version(params.version)
-        dst_version = format_stack_version(params.downgrade_from_version)
+    if pre_upgrade_version and params.version_for_stack_feature_checks and params.upgrade_direction:
+      src_version = format_stack_version(pre_upgrade_version)
+      dst_version = format_stack_version(params.version_for_stack_feature_checks)
 
       if compare_versions(src_version, '4.2.0.0') < 0 and compare_versions(dst_version, '4.2.0.0') >= 0:
         # Upgrade from IOP 4.1 to 4.2, Calling the acl migration script requires the configs to be present.

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
index bc19704..799866a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
@@ -21,6 +21,7 @@ from resource_management.libraries.functions import format
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import format_stack_version, compare_versions
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from utils import get_bare_principal
 
 from resource_management.libraries.functions.get_stack_version import get_stack_version
@@ -40,8 +41,7 @@ tmp_dir = Script.get_tmp_dir()
 stack_name = default("/hostLevelParams/stack_name", None)
 
 version = default("/commandParams/version", None)
-# Version that is CURRENT.
-current_version = default("/hostLevelParams/current_version", None)
+version_for_stack_feature_checks = get_stack_feature_version(config)
 
 host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
 
@@ -49,9 +49,6 @@ stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 iop_stack_version = format_stack_version(stack_version_unformatted)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 
-# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
 
 # default kafka parameters
 kafka_home = '/usr/iop/current/kafka-broker'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
index ba6396e..e405cb5 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
@@ -44,9 +44,6 @@ def run_migration(env, upgrade_type):
   if params.upgrade_direction is None:
     raise Fail('Parameter "upgrade_direction" is missing.')
 
-  if params.upgrade_direction == Direction.DOWNGRADE and params.downgrade_from_version is None:
-    raise Fail('Parameter "downgrade_from_version" is missing.')
-
   if not params.security_enabled:
     Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
     return
@@ -54,13 +51,11 @@ def run_migration(env, upgrade_type):
   Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
 
   # If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
-  kafka_acls_script = None
+  kafka_acls_script = format("/usr/hdp/{version_for_stack_feature_checks}/kafka/bin/kafka-acls.sh")
   command_suffix = ""
   if params.upgrade_direction == Direction.UPGRADE:
-    kafka_acls_script = format("/usr/hdp/{version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--upgradeAcls"
   elif params.upgrade_direction == Direction.DOWNGRADE:
-    kafka_acls_script = format("/usr/hdp/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
     command_suffix = "--downgradeAcls"
 
   if kafka_acls_script is not None:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py
index d5d90f6..62dfa49 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/params.py
@@ -42,7 +42,6 @@ java64_home = config['hostLevelParams']['java_home']
 version = default("/commandParams/version", None)
 
 # current host stack version
-current_version = default("/hostLevelParams/current_version", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 iop_stack_version = format_stack_version(stack_version_unformatted)
 
@@ -96,7 +95,7 @@ for host in config['clusterHostInfo']['zookeeper_hosts']:
   if index < len(config['clusterHostInfo']['zookeeper_hosts']):
     zookeeper_quorum += ","
 
-if compare_versions(format_stack_version(current_version), '4.2.0.0') >= 0:
+if compare_versions(format_stack_version(version), '4.2.0.0') >= 0:
   if upgrade_direction is not None and upgrade_direction == Direction.DOWNGRADE and version is not None and compare_versions(format_stack_version(version), '4.2.0.0') < 0:
     lib_dir=default("/configurations/solr-env/solr_lib_dir", None)
   else:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_service.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_service.py
index 105aac6..2363f92 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_service.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_service.py
@@ -37,7 +37,7 @@ def solr_service(action='start'):
 
     Execute (params.solr_home+'/server/scripts/cloud-scripts/zkcli.sh -zkhost ' + params.zookeeper_hosts_list + ' -cmd makepath ' + params.zookeeper_chroot, user=params.solr_user, ignore_failures=True )
 
-    if (params.upgrade_direction is not None and params.upgrade_direction == Direction.UPGRADE) or (compare_versions(format_stack_version(params.current_version), '4.2.0.0') >= 0):
+    if (params.upgrade_direction is not None and params.upgrade_direction == Direction.UPGRADE) or (compare_versions(format_stack_version(params.version), '4.2.0.0') >= 0):
       solr_home_dir = params.solr_data_dir
     else:
       solr_home_dir = params.lib_dir + "/data"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_upgrade.py
index fa2feb0..7a1864d 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_upgrade.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/SOLR/package/scripts/solr_upgrade.py
@@ -73,13 +73,13 @@ class SolrServerUpgrade(Script):
     import params
     env.set_params(params)
 
-    if compare_versions(format_stack_version(params.current_version), '4.2.0.0') >= 0:
+    if compare_versions(format_stack_version(params.version), '4.2.0.0') >= 0:
       solr_home_dir=params.solr_data_dir
     else: #4.1.0.0
       solr_home_dir=params.old_lib_dir + "/data"
 
     unique = get_unique_id_and_date()
-    backup_solr_dir="/tmp/upgrades/{0}/solr_{1}".format(params.current_version, unique)
+    backup_solr_dir="/tmp/upgrades/{0}/solr_{1}".format(params.version, unique)
     backup_solr_cores="/tmp/solr/cores"
 
     if os.path.isdir(solr_home_dir) and not os.path.isdir(backup_solr_dir):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/params.py
index e9b8144..6af29dd 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/params.py
@@ -38,7 +38,6 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
 version = default("/commandParams/version", None)
-current_version = default("/hostLevelParams/current_version", None)
 
 #hadoop params
 role_root = "zookeeper-client"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
index 0cb62bf..985ded1 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper.py
@@ -72,8 +72,8 @@ def zookeeper(type = None, upgrade_type=None):
          content = myid
     )
     # This path may be missing after Ambari upgrade. We need to create it.
-    if (upgrade_type == "rolling") and (not os.path.exists("/usr/iop/current/zookeeper-server")) and params.current_version:
-      conf_select(params.stack_name, "zookeeper", params.current_version)
+    if (upgrade_type == "rolling") and (not os.path.exists("/usr/iop/current/zookeeper-server")) and params.version:
+      conf_select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
       #Execute(format("stack-select set zookeeper-server {version}"))
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_service.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_service.py
index 1dc24cd..0727970 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_service.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/ZOOKEEPER/package/scripts/zookeeper_service.py
@@ -27,9 +27,9 @@ def zookeeper_service(action='start', upgrade_type=None):
   import params
 
   # This path may be missing after Ambari upgrade. We need to create it.
-  if upgrade_type is None and not os.path.exists("/usr/iop/current/zookeeper-server") and params.current_version \
+  if upgrade_type is None and not os.path.exists("/usr/iop/current/zookeeper-server") and params.version \
     and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
-    conf_select.select(params.stack_name, "zookeeper", params.current_version)
+    conf_select.select(params.stack_name, "zookeeper", params.version)
     stack_select.select("zookeeper-server", params.version)
 
   cmd = format("env ZOOCFGDIR={config_dir} ZOOCFG=zoo.cfg {zk_bin}/zkServer.sh")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
index 4f589f8..c497054 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/hooks/after-INSTALL/scripts/params.py
@@ -39,9 +39,6 @@ sudo = AMBARI_SUDO_BINARY
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/SOLR/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/SOLR/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/SOLR/package/scripts/params.py
index b708026..3230c3f 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/SOLR/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/SOLR/package/scripts/params.py
@@ -48,8 +48,6 @@ stack_name = default("/hostLevelParams/stack_name", None)
 retryAble = default("/commandParams/command_retry_enabled", False)
 version = default("/commandParams/version", None)
 
-
-current_version = default("/hostLevelParams/current_version", None)
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 stack_version = default("/commandParams/version", None)
 sudo = AMBARI_SUDO_BINARY

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/params.py
index 2c672aa..c0c314d 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HBASE/package/scripts/params.py
@@ -99,8 +99,7 @@ hbase_javaopts_properties = config['configurations']['hbase-javaopts-properties'
 
 hbase_javaopts_properties = str(hbase_javaopts_properties)	
 if hbase_javaopts_properties.find('-Diop.version') == -1:
-  current_version = default("/hostLevelParams/current_version", None)
-  iop_full_version = format_stack_version(current_version)
+  iop_full_version = format_stack_version(version)
   hbase_javaopts_properties = hbase_javaopts_properties+ ' -Diop.version=' + str(iop_full_version)
 
 regionserver_heapsize = ensure_unit_for_memory(config['configurations']['hbase-env']['hbase_regionserver_heapsize'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7f413d3/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
index adfbead..2705e5a 100755
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/HIVE/package/scripts/hive_metastore.py
@@ -213,9 +213,9 @@ class HiveMetastoreDefault(HiveMetastore):
     # since the configurations have not been written out yet during an upgrade
     # we need to choose the original legacy location
     schematool_hive_server_conf_dir = params.hive_server_conf_dir
-    if params.current_version is not None:
-      current_version = format_stack_version(params.current_version)
-      if compare_versions(current_version, "4.1.0.0") < 0:
+    if params.version_for_stack_feature_checks:
+      version = format_stack_version(params.version_for_stack_feature_checks)
+      if compare_versions(version, "4.1.0.0") < 0:
         schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF
 
     env_dict = {


Mime
View raw message