ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From srima...@apache.org
Subject ambari git commit: AMBARI-12280. HiveServer2 upgrade/downgrade should deregister old server after new one is started (srimanth)
Date Tue, 07 Jul 2015 20:04:38 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 6f2b5c2f9 -> 23cdd769a


AMBARI-12280. HiveServer2 upgrade/downgrade should deregister old server after new one is
started (srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/23cdd769
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/23cdd769
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/23cdd769

Branch: refs/heads/trunk
Commit: 23cdd769ae1ef056b2afc770fea672ca4421f447
Parents: 6f2b5c2
Author: Srimanth Gunturi <sgunturi@hortonworks.com>
Authored: Sat Jul 4 19:11:06 2015 -0700
Committer: Srimanth Gunturi <sgunturi@hortonworks.com>
Committed: Tue Jul 7 13:04:25 2015 -0700

----------------------------------------------------------------------
 .../libraries/functions/hdp_select.py           | 29 +++++++++-
 .../libraries/script/script.py                  | 16 ++++-
 .../internal/UpgradeResourceProvider.java       | 14 +++++
 .../ambari/server/orm/dao/UpgradeDAO.java       | 17 ++++++
 .../server/orm/entities/UpgradeEntity.java      |  2 +
 .../ambari/server/state/UpgradeContext.java     | 17 ++++++
 .../0.12.0.2.0/package/scripts/hive_server.py   |  9 ++-
 .../package/scripts/hive_server_upgrade.py      | 61 +++++++++++++++-----
 .../0.12.0.2.0/package/scripts/params_linux.py  |  4 ++
 .../ambari/server/orm/dao/UpgradeDAOTest.java   | 37 ++++++++++++
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 11 ++--
 11 files changed, 187 insertions(+), 30 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py
b/ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py
index 4583808..d0ee9ad 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/hdp_select.py
@@ -26,6 +26,7 @@ from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_hdp_version import get_hdp_version
 from resource_management.libraries.script.script import Script
 from resource_management.core.shell import call
+from resource_management.libraries.functions.version import format_hdp_stack_version
 
 # hdp-select set oozie-server 2.2.0.0-1234
 TEMPLATE = ('hdp-select', 'set')
@@ -149,7 +150,7 @@ def get_role_component_current_hdp_version():
   return current_hdp_version
 
 
-def get_hadoop_dir(target, force_latest_on_upgrade=False, upgrade_stack_only=False):
+def get_hadoop_dir(target, force_latest_on_upgrade=False):
   """
   Return the hadoop shared directory in the following override order
   1. Use default for 2.1 and lower
@@ -160,7 +161,6 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False, upgrade_stack_only=Fal
   :target: the target directory
   :force_latest_on_upgrade: if True, then this will return the "current" directory
   without the HDP version built into the path, such as /usr/hdp/current/hadoop-client
-  :upgrade_stack_only: if True, provides upgrade stack target if present and not current
   """
 
   if not target in HADOOP_DIR_DEFAULTS:
@@ -186,7 +186,7 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False, upgrade_stack_only=Fal
         # determine if hdp-select has been run and if not, then use the current
         # hdp version until this component is upgraded
         current_hdp_version = get_role_component_current_hdp_version()
-        if current_hdp_version is not None and stack_version != current_hdp_version and not
upgrade_stack_only:
+        if current_hdp_version is not None and stack_version != current_hdp_version:
           stack_version = current_hdp_version
 
         if target == "home":
@@ -197,6 +197,29 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False, upgrade_stack_only=Fal
 
   return hadoop_dir
 
+def get_hadoop_dir_for_stack_version(target, stack_version):
+  """
+  Return the hadoop shared directory for the provided stack version. This is necessary
+  when folder paths of downgrade-source stack-version are needed after hdp-select. 
+  :target: the target directory
+  :stack_version: stack version to get hadoop dir for
+  """
+
+  if not target in HADOOP_DIR_DEFAULTS:
+    raise Fail("Target {0} not defined".format(target))
+
+  hadoop_dir = HADOOP_DIR_DEFAULTS[target]
+
+  formatted_stack_version = format_hdp_stack_version(stack_version)
+  if Script.is_hdp_stack_greater_or_equal_to(formatted_stack_version, "2.2"):
+    # home uses a different template
+    if target == "home":
+      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_version, "hadoop")
+    else:
+      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_version, "hadoop", target)
+
+  return hadoop_dir
+
 
 def _get_upgrade_stack():
   """

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index b074fe5..c422ac8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -304,11 +304,21 @@ class Script(object):
     :param compare_to_version: the version to compare to
     :return: True if the command's stack is greater than the specified version
     """
-    hdp_stack_version = Script.get_hdp_stack_version()
-    if hdp_stack_version is None or hdp_stack_version == "":
+    return Script.is_hdp_stack_greater_or_equal_to(Script.get_hdp_stack_version(), compare_to_version)
+
+  @staticmethod
+  def is_hdp_stack_greater_or_equal_to(formatted_hdp_stack_version, compare_to_version):
+    """
+    Gets whether the provided formatted_hdp_stack_version (normalized)
+    is greater than or equal to the specified stack version
+    :param formatted_hdp_stack_version: the version of stack to compare
+    :param compare_to_version: the version of stack to compare to
+    :return: True if the command's stack is greater than the specified version
+    """
+    if formatted_hdp_stack_version is None or formatted_hdp_stack_version == "":
       return False
 
-    return compare_versions(hdp_stack_version, compare_to_version) >= 0
+    return compare_versions(formatted_hdp_stack_version, compare_to_version) >= 0
 
   @staticmethod
   def is_hdp_stack_less_than(compare_to_version):

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 65e9eaf..cd0a709 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -141,6 +141,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
   private static final String COMMAND_PARAM_RESTART_TYPE = "restart_type";
   private static final String COMMAND_PARAM_TASKS = "tasks";
   private static final String COMMAND_PARAM_STRUCT_OUT = "structured_out";
+  private static final String COMMAND_DOWNGRADE_FROM_VERSION = "downgrade_from_version";
 
   /**
    * The original "current" stack of the cluster before the upgrade started.
@@ -569,6 +570,15 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     UpgradeContext ctx = new UpgradeContext(resolver, sourceStackId, targetStackId, version,
         direction);
 
+    if (direction.isDowngrade()) {
+      if (requestMap.containsKey(UPGRADE_FROM_VERSION)) {
+        ctx.setDowngradeFromVersion((String) requestMap.get(UPGRADE_FROM_VERSION));
+      } else {
+        UpgradeEntity lastUpgradeItemForCluster = s_upgradeDAO.findLastUpgradeForCluster(cluster.getClusterId());
+        ctx.setDowngradeFromVersion(lastUpgradeItemForCluster.getToVersion());
+      }
+    }
+
     List<UpgradeGroupHolder> groups = s_upgradeHelper.createSequence(pack, ctx);
 
     if (groups.isEmpty()) {
@@ -844,6 +854,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     params.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
     params.put(COMMAND_PARAM_ORIGINAL_STACK, context.getOriginalStackId().getStackId());
     params.put(COMMAND_PARAM_TARGET_STACK, context.getTargetStackId().getStackId());
+    params.put(COMMAND_DOWNGRADE_FROM_VERSION, context.getDowngradeFromVersion());
 
     // Because custom task may end up calling a script/function inside a
     // service, it is necessary to set the
@@ -919,6 +930,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     restartCommandParams.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
     restartCommandParams.put(COMMAND_PARAM_ORIGINAL_STACK,context.getOriginalStackId().getStackId());
     restartCommandParams.put(COMMAND_PARAM_TARGET_STACK, context.getTargetStackId().getStackId());
+    restartCommandParams.put(COMMAND_DOWNGRADE_FROM_VERSION, context.getDowngradeFromVersion());
 
     ActionExecutionContext actionContext = new ActionExecutionContext(cluster.getClusterName(),
         "RESTART", filters, restartCommandParams);
@@ -968,6 +980,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     commandParams.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
     commandParams.put(COMMAND_PARAM_ORIGINAL_STACK, context.getOriginalStackId().getStackId());
     commandParams.put(COMMAND_PARAM_TARGET_STACK, context.getTargetStackId().getStackId());
+    commandParams.put(COMMAND_DOWNGRADE_FROM_VERSION, context.getDowngradeFromVersion());
 
     ActionExecutionContext actionContext = new ActionExecutionContext(cluster.getClusterName(),
         "SERVICE_CHECK", filters, commandParams);
@@ -1012,6 +1025,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
     commandParams.put(COMMAND_PARAM_DIRECTION, context.getDirection().name().toLowerCase());
     commandParams.put(COMMAND_PARAM_ORIGINAL_STACK, context.getOriginalStackId().getStackId());
     commandParams.put(COMMAND_PARAM_TARGET_STACK, context.getTargetStackId().getStackId());
+    commandParams.put(COMMAND_DOWNGRADE_FROM_VERSION, context.getDowngradeFromVersion());
 
     String itemDetail = entity.getText();
     String stageText = StringUtils.abbreviate(entity.getText(), 255);

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
index e6ba152..bc0652c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/UpgradeDAO.java
@@ -26,6 +26,7 @@ import org.apache.ambari.server.orm.RequiresSession;
 import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.orm.entities.UpgradeGroupEntity;
 import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.eclipse.persistence.config.HintValues;
 import org.eclipse.persistence.config.QueryHints;
 
@@ -155,6 +156,22 @@ public class UpgradeDAO {
     return daoUtils.selectSingle(query);
   }
 
+  /**
+   * @param requestId the request id
+   * @param stageId the stage id
+   * @return the upgrade entity, or {@code null} if not found
+   */
+  @RequiresSession
+  public UpgradeEntity findLastUpgradeForCluster(long clusterId) {
+    TypedQuery<UpgradeEntity> query = entityManagerProvider.get().createNamedQuery(
+        "UpgradeEntity.findLatestForCluster", UpgradeEntity.class);
+    query.setMaxResults(1);
+    query.setParameter("clusterId", clusterId);
+    query.setParameter("direction", Direction.UPGRADE);
+
+    query.setHint(QueryHints.REFRESH, HintValues.TRUE);
 
+    return daoUtils.selectSingle(query);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
index e8723e0..802ea03 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/UpgradeEntity.java
@@ -48,6 +48,8 @@ import org.apache.ambari.server.state.stack.upgrade.Direction;
       query = "SELECT u FROM UpgradeEntity u WHERE u.clusterId = :clusterId"),
   @NamedQuery(name = "UpgradeEntity.findUpgrade",
       query = "SELECT u FROM UpgradeEntity u WHERE u.upgradeId = :upgradeId"),
+  @NamedQuery(name = "UpgradeEntity.findLatestForCluster",
+      query = "SELECT u FROM UpgradeEntity u WHERE u.clusterId = :clusterId AND u.direction
= :direction ORDER BY u.upgradeId DESC"),
 })
 public class UpgradeEntity {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 6dbde09..86dbccd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -53,6 +53,7 @@ public class UpgradeContext {
   private List<ServiceComponentHost> m_unhealthy = new ArrayList<ServiceComponentHost>();
   private Map<String, String> m_serviceNames = new HashMap<String, String>();
   private Map<String, String> m_componentNames = new HashMap<String, String>();
+  private String m_downgradeFromVersion = null;
 
   /**
    * Constructor.
@@ -220,6 +221,22 @@ public class UpgradeContext {
     m_componentNames.put(key, displayName);
   }
 
+  /**
+   * This method returns the non-finalized version we are downgrading from.
+   * 
+   * @return version cluster is downgrading from
+   */
+  public String getDowngradeFromVersion() {
+    return m_downgradeFromVersion;
+  }
 
+  /**
+   * Set the HDP stack version we are downgrading from.
+   *  
+   * @param downgradeFromVersion
+   */
+  public void setDowngradeFromVersion(String downgradeFromVersion) {
+    this.m_downgradeFromVersion = downgradeFromVersion;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index b20c461..3ec1747 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -84,14 +84,17 @@ class HiveServerDefault(HiveServer):
 
     setup_ranger_hive(rolling_upgrade=rolling_restart)
     hive_service( 'hiveserver2', action = 'start', rolling_restart=rolling_restart)
+    if rolling_restart:
+      hive_server_upgrade.post_upgrade_deregister()
 
   def stop(self, env, rolling_restart=False):
     import params
     env.set_params(params)
 
-    if rolling_restart:
-      hive_server_upgrade.pre_upgrade_deregister()
-    else:
+    # During rolling upgrade, HiveServer2 should not be stopped before new server is available.
+    # Once new server is started, old one is stopped by the --deregister command which is

+    # invoked by the 'hive_server_upgrade.post_upgrade_deregister()' method
+    if not rolling_restart:
       hive_service( 'hiveserver2', action = 'stop' )
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 7533dc9..d60e961 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -29,7 +29,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.libraries.functions.version import compare_versions
 
 
-def pre_upgrade_deregister():
+def post_upgrade_deregister():
   """
   Runs the "hive --service hiveserver2 --deregister <version>" command to
   de-provision the server in preparation for an upgrade. This will contact
@@ -42,7 +42,7 @@ def pre_upgrade_deregister():
   """
   import params
 
-  Logger.info('HiveServer2 executing "deregister" command in preparation for upgrade...')
+  Logger.info('HiveServer2 executing "deregister" command to complete upgrade...')
 
   if params.security_enabled:
     kinit_command=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};
")
@@ -62,23 +62,42 @@ def pre_upgrade_deregister():
   hive_execute_path = params.execute_path
   # If upgrading, the upgrade-target hive binary should be used to call the --deregister
command.
   # If downgrading, the downgrade-source hive binary should be used to call the --deregister
command.
-  if "upgrade" == params.upgrade_direction:
+  # By now hdp-select has been called to set 'current' to target-stack
+  if "downgrade" == params.upgrade_direction:
     # hive_bin
-    upgrade_target_version = format_hdp_stack_version(params.version)
-    if upgrade_target_version and compare_versions(upgrade_target_version, "2.2") >= 0:
-      upgrade_target_hive_bin = format('/usr/hdp/{version}/hive/bin')
-      if (os.pathsep + params.hive_bin) in hive_execute_path:
-        hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep
+ upgrade_target_hive_bin)
-    # hadoop_bin_dir
-    upgrade_target_hadoop_bin = hdp_select.get_hadoop_dir("bin", upgrade_stack_only=True)
-    upgrade_source_hadoop_bin = params.hadoop_bin_dir
-    if upgrade_target_hadoop_bin and len(upgrade_target_hadoop_bin) > 0 and (os.pathsep
+ upgrade_source_hadoop_bin) in hive_execute_path:
-      hive_execute_path = hive_execute_path.replace(os.pathsep + upgrade_source_hadoop_bin,
os.pathsep + upgrade_target_hadoop_bin)
+    downgrade_version = params.current_version
+    if params.downgrade_from_version:
+      downgrade_version = params.downgrade_from_version
+    hive_execute_path = _get_hive_execute_path(downgrade_version)
 
   command = format('hive --config {hive_server_conf_dir} --service hiveserver2 --deregister
' + current_hiveserver_version)
   Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
 
 
+def _get_hive_execute_path(hdp_stack_version):
+  """
+  Returns the exact execute path to use for the given stack-version.
+  This method does not return the "current" path
+  :param hdp_stack_version: Exact stack-version to use in the new path
+  :return: Hive execute path for the exact hdp stack-version
+  """
+  import params
+
+  hive_execute_path = params.execute_path
+  formatted_stack_version = format_hdp_stack_version(hdp_stack_version)
+  if formatted_stack_version and compare_versions(formatted_stack_version, "2.2") >= 0:
+    # hive_bin
+    new_hive_bin = format('/usr/hdp/{hdp_stack_version}/hive/bin')
+    if (os.pathsep + params.hive_bin) in hive_execute_path:
+      hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep
+ new_hive_bin)
+    # hadoop_bin_dir
+    new_hadoop_bin = hdp_select.get_hadoop_dir_for_stack_version("bin", hdp_stack_version)
+    old_hadoop_bin = params.hadoop_bin_dir
+    if new_hadoop_bin and len(new_hadoop_bin) > 0 and (os.pathsep + old_hadoop_bin) in
hive_execute_path:
+      hive_execute_path = hive_execute_path.replace(os.pathsep + old_hadoop_bin, os.pathsep
+ new_hadoop_bin)
+  return hive_execute_path
+
+
 def _get_current_hiveserver_version():
   """
   Runs "hive --version" and parses the result in order
@@ -89,8 +108,20 @@ def _get_current_hiveserver_version():
   import params
 
   try:
-    command = 'hive --version'
-    return_code, hdp_output = shell.call(command, user=params.hive_user, path=params.execute_path)
+    # When downgrading the source version should be the version we are downgrading from
+    if "downgrade" == params.upgrade_direction:
+      if not params.downgrade_from_version:
+        raise Fail('The version from which we are downgrading from should be provided in
\'downgrade_from_version\'')
+      source_version = params.downgrade_from_version
+    else:
+      source_version = params.current_version
+    hive_execute_path = _get_hive_execute_path(source_version)
+    version_hive_bin = params.hive_bin
+    formatted_source_version = format_hdp_stack_version(source_version)
+    if formatted_source_version and compare_versions(formatted_source_version, "2.2") >=
0:
+      version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
+    command = format('{version_hive_bin}/hive --version')
+    return_code, hdp_output = shell.call(command, user=params.hive_user, path=hive_execute_path)
   except Exception, e:
     Logger.error(str(e))
     raise Fail('Unable to execute hive --version command to retrieve the hiveserver2 version.')

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index db9d9da..bee455f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -60,6 +60,10 @@ version = default("/commandParams/version", None)
 # current host stack version
 current_version = default("/hostLevelParams/current_version", None)
 
+# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target
version
+# downgrade_from_version provides the source-version the downgrade is happening from 
+downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+
 # Upgrade direction
 upgrade_direction = default("/commandParams/upgrade_direction", None)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
index e5e78ef..0b12e97 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/UpgradeDAOTest.java
@@ -34,6 +34,7 @@ import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.orm.entities.UpgradeGroupEntity;
 import org.apache.ambari.server.orm.entities.UpgradeItemEntity;
 import org.apache.ambari.server.state.UpgradeState;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -128,4 +129,40 @@ public class UpgradeDAOTest {
     assertEquals("group title", group.getTitle());
   }
 
+  /**
+   * Create upgrades and downgrades and verify only latest upgrade is given
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testFindLastUpgradeForCluster() throws Exception {
+    // create upgrade entities
+    UpgradeEntity entity1 = new UpgradeEntity();
+    entity1.setId(11L);
+    entity1.setClusterId(Long.valueOf(1));
+    entity1.setDirection(Direction.UPGRADE);
+    entity1.setRequestId(Long.valueOf(1));
+    entity1.setFromVersion("2.2.0.0-1234");
+    entity1.setToVersion("2.3.0.0-4567");
+    dao.create(entity1);
+    UpgradeEntity entity2 = new UpgradeEntity();
+    entity2.setId(22L);
+    entity2.setClusterId(Long.valueOf(1));
+    entity2.setDirection(Direction.DOWNGRADE);
+    entity2.setRequestId(Long.valueOf(1));
+    entity2.setFromVersion("2.3.0.0-4567");
+    entity2.setToVersion("2.2.0.0-1234");
+    dao.create(entity2);
+    UpgradeEntity entity3 = new UpgradeEntity();
+    entity3.setId(33L);
+    entity3.setClusterId(Long.valueOf(1));
+    entity3.setDirection(Direction.UPGRADE);
+    entity3.setRequestId(Long.valueOf(1));
+    entity3.setFromVersion("2.2.0.0-1234");
+    entity3.setToVersion("2.3.1.1-4567");
+    dao.create(entity3);
+    UpgradeEntity lastUpgradeForCluster = dao.findLastUpgradeForCluster(1);
+    assertNotNull(lastUpgradeForCluster);
+    assertEquals(33L, (long)lastUpgradeForCluster.getId());
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/23cdd769/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index e1d4a6f..7ca3d78 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -693,11 +693,11 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
      call_mocks = call_side_effects
     )
 
-    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server
--service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
+    self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'),
sudo=True,)
+    self.assertResourceCalledByIndex(33, 'Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server
--service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
       path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
       tries=1, user='hive')
 
-    self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'),
sudo=True,)
 
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=True))
@@ -717,12 +717,11 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
      call_mocks = call_side_effects
     )
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service hiveserver2
--deregister 1.2.1.2.3.0.0-2434',
+    self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'),
sudo=True,)
+    self.assertResourceCalledByIndex(33, 'Execute', 'hive --config /etc/hive/conf.server
--service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
       path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
       tries=1, user='hive')
 
-    self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'),
sudo=True,)
-
   def test_stop_during_upgrade_bad_hive_version(self):
     try:
       self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
@@ -730,11 +729,11 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
        hdp_stack_version = self.UPGRADE_STACK_VERSION,
        target = RMFTestCase.TARGET_COMMON_SERVICES,
        call_mocks = [(0,"BAD VERSION")])
-
       self.fail("Invalid hive version should have caused an exception")
     except:
       pass
 
+    self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hive-server2', '2.2.1.0-2065'),
sudo=True,)
     self.assertNoMoreResources()
 
   @patch("resource_management.libraries.functions.security_commons.build_expectations")


Mime
View raw message