ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alejan...@apache.org
Subject ambari git commit: AMBARI-14307. RU: Kafka broker restart failed on downgrade from HDP 2.3 to 2.2 due to circular symlink of /etc/hadoop/conf (alejandro)
Date Mon, 14 Dec 2015 20:22:43 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 155ae8554 -> 55b84c55b


AMBARI-14307. RU: Kafka broker restart failed on downgrade from HDP 2.3 to 2.2 due to circular
symlink of /etc/hadoop/conf (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/55b84c55
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/55b84c55
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/55b84c55

Branch: refs/heads/trunk
Commit: 55b84c55bcaa79df8955a2249abb4b1f63b0364c
Parents: 155ae85
Author: Alejandro Fernandez <afernandez@hortonworks.com>
Authored: Mon Dec 14 11:08:05 2015 -0800
Committer: Alejandro Fernandez <afernandez@hortonworks.com>
Committed: Mon Dec 14 12:22:36 2015 -0800

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 166 +++++++++++++------
 .../libraries/script/script.py                  |  10 ++
 .../0.8.1.2.2/package/scripts/kafka_broker.py   |   4 +-
 .../custom_actions/scripts/install_packages.py  |  10 +-
 .../custom_actions/scripts/ru_set_all.py        |  21 ++-
 .../python/custom_actions/test_ru_set_all.py    |  11 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |  45 ++++-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   7 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |   8 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |  18 +-
 .../hooks/after-INSTALL/test_after_install.py   |   5 +-
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   2 +
 12 files changed, 222 insertions(+), 85 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index c9c70de..fd83c53 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -32,6 +32,9 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Link
+from resource_management.libraries.functions.default import default
+from resource_management.core.exceptions import Fail
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.core.shell import as_sudo
 
 
@@ -198,13 +201,15 @@ def _valid(stack_name, package, ver):
 def create(stack_name, package, version, dry_run = False):
   """
   Creates a config version for the specified package
-  :stack_name: the name of the stack
-  :package: the name of the package, as-used by conf-select
-  :version: the version number to create
+  :param stack_name: the name of the stack
+  :param package: the name of the package, as-used by conf-select
+  :param version: the version number to create
+  :return List of directories created
   """
-
+  Logger.info("Checking if need to create versioned conf dir /etc/{0}/{1}/0".format(package,
version))
   if not _valid(stack_name, package, version):
-    return
+    Logger.info("Will not create it since parameters are not valid.")
+    return []
 
   command = "dry-run-create" if dry_run else "create-conf-dir"
 
@@ -287,36 +292,90 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
   before the hdp-select/conf-select would have been called.
   """
   hadoop_conf_dir = "/etc/hadoop/conf"
-
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
-    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-
-    stack_info = hdp_select._get_upgrade_stack()
-
-    # if upgrading to >= HDP 2.3
-    if stack_info is not None and Script.is_hdp_stack_greater_or_equal("2.3"):
-      stack_name = stack_info[0]
-      stack_version = stack_info[1]
-
-      # determine if hdp-select has been run and if not, then use the current
-      # hdp version until this component is upgraded
-      if not force_latest_on_upgrade:
-        current_hdp_version = hdp_select.get_role_component_current_hdp_version()
-        if current_hdp_version is not None and stack_version != current_hdp_version:
-          stack_version = current_hdp_version
-
-      # only change the hadoop_conf_dir path, don't conf-select this older version
-      hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(stack_version)
-
-      # ensure the new HDP stack is conf-selected, but only if it exists
-      # there are cases where hadoop might not be installed, such as on a host with only
ZK
-      if os.path.exists(hadoop_conf_dir):
-        select(stack_name, "hadoop", stack_version)
-
+  stack_name = None
+  version = None
+  allow_setting_conf_select_symlink = False
+
+  if not Script.in_stack_upgrade():
+    # During normal operation, the HDP stack must be 2.3 or higher
+    if Script.is_hdp_stack_greater_or_equal("2.2"):
+      hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+    if Script.is_hdp_stack_greater_or_equal("2.3"):
+      hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+      stack_name = default("/hostLevelParams/stack_name", None)
+      version = default("/commandParams/version", None)
+
+      if stack_name and version:
+        version = str(version)
+        allow_setting_conf_select_symlink = True
+  else:
+    # During an upgrade/downgrade, which can be a Rolling or Express Upgrade, need to calculate
it based on the version
+    '''
+    Whenever upgrading to HDP 2.2, or downgrading back to 2.2, need to use /etc/hadoop/conf
+    Whenever upgrading to HDP 2.3, or downgrading back to 2.3, need to use a versioned hadoop
conf dir
+
+    Type__|_Source_|_Target_|_Direction_____________|_Comment_____________________________________________________________
+    Normal|        | 2.2    |                       | Use /etc/hadoop/conf
+    Normal|        | 2.3    |                       | Use /etc/hadoop/conf, which should
be a symlink to /usr/hdp/current/hadoop-client/conf
+    EU    | 2.1    | 2.3    | Upgrade               | Use versioned /usr/hdp/current/hadoop-client/conf
+          |        |        | No Downgrade Allowed  | Invalid
+    EU/RU | 2.2    | 2.2.*  | Any                   | Use /usr/hdp/current/hadoop-client/conf
+    EU/RU | 2.2    | 2.3    | Upgrade               | Use /usr/hdp/$version/hadoop/conf,
which should be a symlink destination
+          |        |        | Downgrade             | Use /usr/hdp/current/hadoop-client/conf
+    EU/RU | 2.3    | 2.3.*  | Any                   | Use /usr/hdp/$version/hadoop/conf,
which should be a symlink destination
+    '''
+
+    # The method "is_hdp_stack_greater_or_equal" uses "stack_version" which is the desired
stack, e.g., 2.2 or 2.3
+    # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
+    # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is 
+    # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
+    if Script.is_hdp_stack_greater_or_equal("2.2"):
+      hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+      # This contains the "version", including the build number, that is actually used during
a stack upgrade and
+      # is the version upgrading/downgrading to.
+      stack_info = hdp_select._get_upgrade_stack()
+
+      if stack_info is not None:
+        stack_name = stack_info[0]
+        version = stack_info[1]
+      else:
+        raise Fail("Unable to get parameter 'version'")
+      
+      Logger.info("In the middle of a stack upgrade/downgrade for Stack {0} and destination
version {1}, determining which hadoop conf dir to use.".format(stack_name, version))
+      # This is the version either upgrading or downgrading to.
+      if compare_versions(format_hdp_stack_version(version), "2.3.0.0") >= 0:
+        # Determine if hdp-select has been run and if not, then use the current
+        # hdp version until this component is upgraded.
+        if not force_latest_on_upgrade:
+          current_hdp_version = hdp_select.get_role_component_current_hdp_version()
+          if current_hdp_version is not None and version != current_hdp_version:
+            version = current_hdp_version
+            Logger.info("hdp-select has not yet been called to update the symlink for this
component, keep using version {0}".format(current_hdp_version))
+
+        # Only change the hadoop_conf_dir path, don't conf-select this older version
+        hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(version)
+        Logger.info("Hadoop conf dir: {0}".format(hadoop_conf_dir))
+
+        allow_setting_conf_select_symlink = True
+
+  if allow_setting_conf_select_symlink:
+    # If not in the middle of an upgrade and on HDP 2.3 or higher, or if
+    # upgrading stack to version 2.3.0.0 or higher (which may be upgrade or downgrade), then
consider setting the
+    # symlink for /etc/hadoop/conf.
+    # If a host does not have any HDFS or YARN components (e.g., only ZK), then it will not
contain /etc/hadoop/conf
+    # Therefore, any calls to conf-select will fail.
+    # For that reason, if the hadoop conf directory exists, then make sure it is set.
+    if os.path.exists(hadoop_conf_dir):
+      Logger.info("The hadoop conf dir {0} exists, will call conf-select on it for version
{1}".format(hadoop_conf_dir, version))
+      select(stack_name, "hadoop", version)
+
+  Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
   return hadoop_conf_dir
 
 
-def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True,
link_to_conf_install=False):
+def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_links=True,
link_to="current"):
 
   """
   Assumes HDP 2.3+, moves around directories and creates the conf symlink for the given package.
@@ -335,7 +394,7 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
   :param version: the version number to use with conf-select (2.3.0.0-1234)
   :param dirs: the directories associated with the package (from PACKAGE_DIRS)
   :param skip_existing_links: True to not do any work if already a symlink
-  :param link_to_conf_install:
+  :param link_to: link to "current" or "backup"
   """
   bad_dirs = []
   for dir_def in dirs:
@@ -360,17 +419,21 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
     return
 
   # make backup dir and copy everything in case configure() was called after install()
+  backup_dir = None
   for dir_def in dirs:
     old_conf = dir_def['conf_dir']
     old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
-    conf_install_dir = os.path.join(old_parent, "conf.backup")
-    Execute(("cp", "-R", "-p", old_conf, conf_install_dir),
-      not_if = format("test -e {conf_install_dir}"), sudo = True)
+    backup_dir = os.path.join(old_parent, "conf.backup")
+    Logger.info("Backing up {0} to {1} if destination doesn't exist already.".format(old_conf,
backup_dir))
+    Execute(("cp", "-R", "-p", old_conf, backup_dir),
+      not_if = format("test -e {backup_dir}"), sudo = True)
 
   # we're already in the HDP stack
+  # Create the versioned /etc/[component]/[version]/0 folder.
+  # The component must be installed on the host.
   versioned_confs = create("HDP", package, version, dry_run = True)
 
-  Logger.info("New conf directories: {0}".format(", ".join(versioned_confs)))
+  Logger.info("Package {0} will have new conf directories: {1}".format(package, ", ".join(versioned_confs)))
 
   need_dirs = []
   for d in versioned_confs:
@@ -396,26 +459,31 @@ def convert_conf_directories_to_symlinks(package, version, dirs, skip_existing_l
         only_if = format("ls -d {old_conf}/*"))
 
 
-  # make /usr/hdp/[version]/[component]/conf point to the versioned config.
-  # /usr/hdp/current is already set
+  # Make /usr/hdp/[version]/[component]/conf point to the versioned config.
+  # /usr/hdp/current/[component] is already set to to the correct version, e.g., /usr/hdp/[version]/[component]
   try:
     select("HDP", package, version)
+  except Exception, e:
+    Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
e))
 
-    # no more references to /etc/[component]/conf
+  # Symlink /etc/[component]/conf to /etc/[component]/conf.backup
+  try:
+    # No more references to /etc/[component]/conf
     for dir_def in dirs:
+      # E.g., /etc/[component]/conf
       new_symlink = dir_def['conf_dir']
 
-      # remove new_symlink to pave the way, but only if it's a directory
+      # Remove new_symlink to pave the way, but only if it's a directory
       if not os.path.islink(new_symlink):
         Directory(new_symlink, action="delete")
 
-      # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
-      if link_to_conf_install:
-        Link(new_symlink, to = conf_install_dir)
+      if link_to in ["current", "backup"]:
+        # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
+        if link_to == "backup":
+          Link(new_symlink, to = backup_dir)
+        else:
+          Link(new_symlink, to = dir_def['current_dir'])
       else:
-        Link(new_symlink, to = dir_def['current_dir'])
+        Logger.error("Unsupported 'link_to' argument. Could not link package {0}".format(package))
   except Exception, e:
-    Logger.warning("Could not select the directory: {0}".format(e.message))
-
-  # should conf.backup be removed?
-
+    Logger.warning("Could not change symlink for package {0} to point to {1} directory. Error:
{2}".format(package, link_to, e))

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 9dc402d..09ca118 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -42,6 +42,7 @@ from resource_management.core.resources.packaging import Package
 from resource_management.libraries.functions.version_select_util import get_component_version
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration
 from resource_management.core.resources.system import Execute
 from contextlib import closing
@@ -303,6 +304,15 @@ class Script(object):
 
     return format_hdp_stack_version(stack_version_unformatted)
 
+
+  @staticmethod
+  def in_stack_upgrade():
+    from resource_management.libraries.functions.default import default
+
+    upgrade_direction = default("/commandParams/upgrade_direction", None)
+    return upgrade_direction is not None and upgrade_direction in [Direction.UPGRADE, Direction.DOWNGRADE]
+
+
   @staticmethod
   def is_hdp_stack_greater(formatted_hdp_stack_version, compare_to_version):
     """

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
index 6fcf08a..6c7a776 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
@@ -48,9 +48,11 @@ class KafkaBroker(Script):
     env.set_params(params)
 
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0')
>= 0:
-      conf_select.select(params.stack_name, "kafka", params.version)
       hdp_select.select("kafka-broker", params.version)
 
+    if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0')
>= 0:
+      conf_select.select(params.stack_name, "kafka", params.version)
+
     # This is extremely important since it should only be called if crossing the HDP 2.3.4.0
boundary. 
     if params.current_version and params.version and params.upgrade_direction:
       src_version = dst_version = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index 470661a..a253337 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -33,6 +33,7 @@ from ambari_commons.os_check import OSCheck, OSConst
 from resource_management.libraries.functions.packages_analyzer import allInstalledPackages
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.hdp_select import get_hdp_versions
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
 from resource_management.libraries.functions.repo_version_history \
   import read_actual_version_from_history_file, write_actual_version_to_history_file, REPO_VERSION_HISTORY_FILE
 
@@ -185,14 +186,19 @@ class InstallPackages(Script):
     if args[0] != "HDP":
       Logger.info("Unrecognized stack name {0}, cannot create config links".format(args[0]))
 
-    if version.compare_versions(version.format_hdp_stack_version(args[1]), "2.3.0.0") <
0:
+    if compare_versions(format_hdp_stack_version(args[1]), "2.3.0.0") < 0:
       Logger.info("Configuration symlinks are not needed for {0}, only HDP-2.3+".format(stack_version))
       return
 
+    # if already on HDP 2.3, then there's nothing to do in terms of linking configs
+    if self.current_hdp_stack_version and compare_versions(self.current_hdp_stack_version,
'2.3') >= 0:
+      Logger.info("The current cluster stack of {0} does not require linking configurations".format(stack_version))
+      return
+
     # link configs for all known packages
     for package_name, directories in conf_select.PACKAGE_DIRS.iteritems():
       conf_select.convert_conf_directories_to_symlinks(package_name, stack_version, directories,
-        skip_existing_links = False, link_to_conf_install = True)
+        skip_existing_links = False, link_to = "backup")
 
 
   def compute_actual_version(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
index 885d650..a573420 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
@@ -32,7 +32,7 @@ from resource_management.libraries.functions.version import format_hdp_stack_ver
 from resource_management.core import shell
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Execute, Link
+from resource_management.core.resources.system import Execute, Link, Directory
 
 HDP_SELECT = '/usr/bin/hdp-select'
 
@@ -94,6 +94,7 @@ class UpgradeSetAll(Script):
       Logger.warning("Both 'commandParams/version' and 'commandParams/downgrade_from_version'
must be specified to unlink configs on downgrade.")
       return
 
+    Logger.info("Unlinking all configs when downgrading from HDP 2.3 to 2.2")
 
     # normalize the versions
     stack_23 = format_hdp_stack_version("2.3")
@@ -131,16 +132,22 @@ class UpgradeSetAll(Script):
     # calculate the parent and backup directories
     original_conf_parent_directory = os.path.abspath(os.path.join(original_conf_directory,
os.pardir))
     backup_conf_directory = os.path.join(original_conf_parent_directory, "conf.backup")
-    if not os.path.isdir(backup_conf_directory):
-      Logger.info("Skipping restoring config from backup {0} since it does not exist".format(backup_conf_directory))
-    elif not os.path.islink(original_conf_directory):
-      Logger.info("Skipping the unlink of {0}; it is not a symlink or does not exist".format(original_conf_directory))
-    else:
-      Logger.info("Unlinking {0} and restoring {1}".format(original_conf_directory, backup_conf_directory))
+    Logger.info("Analyzing potential link {0}".format(original_conf_directory))
+
+    if os.path.islink(original_conf_directory):
       # remove the old symlink
       Execute(("rm", original_conf_directory), sudo=True)
+    elif os.path.isdir(original_conf_directory):
+      Directory(original_conf_directory, action="delete")
+    else:
+      Logger.info("  Skipping the unlink of {0}; it is not a symlink or does not exist".format(original_conf_directory))
+
+    if os.path.isdir(backup_conf_directory):
       # rename the backup to the original name
+      Logger.info("  Unlinking {0} and restoring {1}".format(original_conf_directory, backup_conf_directory))
       Execute(("mv", backup_conf_directory, original_conf_directory), sudo=True)
+    else:
+      Logger.info("  Skipping restoring config from backup {0} since it does not exist".format(backup_conf_directory))
 
 
 def link_config(old_conf, link_conf):

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/test_ru_set_all.py b/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
index 2c39614..3090f6b 100644
--- a/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
+++ b/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
@@ -248,7 +248,6 @@ class TestRUSetAll(RMFTestCase):
     # ensure it wasn't called this time
     self.assertFalse(islink_mock.called)
 
-
   @patch("os.path.isdir")
   @patch("os.path.islink")
   def test_unlink_configs_missing_backup(self, islink_mock, isdir_mock):
@@ -267,7 +266,7 @@ class TestRUSetAll(RMFTestCase):
     isdir_mock.return_value = True
     islink_mock.return_value = False
     ru_execute._unlink_config("/fake/config")
-    self.assertEqual(len(env.resource_list), 0)
+    self.assertEqual(len(env.resource_list), 2)
     # Case: missing symlink
     isdir_mock.reset_mock()
     isdir_mock.return_value = True
@@ -276,10 +275,10 @@ class TestRUSetAll(RMFTestCase):
 
     ru_execute._unlink_config("/fake/config")
     self.assertEqual(pprint.pformat(env.resource_list),
-                     "[Execute[('rm', '/fake/config')],\n"
-                     " Execute[('mv', '/fake/conf.backup', "
-                     "'/fake/config')]]")
-
+                     "[Directory['/fake/config'],\n "
+                     "Execute[('mv', '/fake/conf.backup', '/fake/config')],\n "
+                     "Execute[('rm', '/fake/config')],\n "
+                     "Execute[('mv', '/fake/conf.backup', '/fake/config')]]")
 
   @patch("os.path.exists")
   @patch("os.path.islink")

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 3260715..6ae5785 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -38,7 +38,8 @@ class TestPhoenixQueryServer(RMFTestCase):
       command = "configure",
       config_file = "hbase_default.json",
       hdp_stack_version = self.STACK_VERSION,
-      target = RMFTestCase.TARGET_COMMON_SERVICES
+      target = RMFTestCase.TARGET_COMMON_SERVICES,
+      call_mocks = [(0, None, None)]
     )
 
     self.assert_configure_default()
@@ -51,7 +52,8 @@ class TestPhoenixQueryServer(RMFTestCase):
       command = "start",
       config_file = "hbase_default.json",
       hdp_stack_version = self.STACK_VERSION,
-      target = RMFTestCase.TARGET_COMMON_SERVICES
+      target = RMFTestCase.TARGET_COMMON_SERVICES,
+      call_mocks = [(0, None, None)]
     )
     self.assert_configure_default()
     self.assertResourceCalled('Execute',
@@ -69,9 +71,12 @@ class TestPhoenixQueryServer(RMFTestCase):
       command = "stop",
       config_file = "hbase_default.json",
       hdp_stack_version = self.STACK_VERSION,
-      target = RMFTestCase.TARGET_COMMON_SERVICES
+      target = RMFTestCase.TARGET_COMMON_SERVICES,
+      call_mocks = [(0, None, None)]
     )
 
+    self.assert_call_to_get_hadoop_conf_dir()
+
     self.assertResourceCalled('Execute',
       '/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
       on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1
&& ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 )
|| ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
@@ -93,7 +98,8 @@ class TestPhoenixQueryServer(RMFTestCase):
       command = "configure",
       config_file = "hbase_secure.json",
       hdp_stack_version = self.STACK_VERSION,
-      target = RMFTestCase.TARGET_COMMON_SERVICES
+      target = RMFTestCase.TARGET_COMMON_SERVICES,
+      call_mocks = [(0, None, None)]
     )
 
     self.assert_configure_secured()
@@ -106,7 +112,8 @@ class TestPhoenixQueryServer(RMFTestCase):
       command = "start",
       config_file = "hbase_secure.json",
       hdp_stack_version = self.STACK_VERSION,
-      target = RMFTestCase.TARGET_COMMON_SERVICES
+      target = RMFTestCase.TARGET_COMMON_SERVICES,
+      call_mocks = [(0, None, None)]
     )
     self.assert_configure_secured()
     self.assertResourceCalled('Execute',
@@ -124,9 +131,12 @@ class TestPhoenixQueryServer(RMFTestCase):
       command = "stop",
       config_file = "hbase_secure.json",
       hdp_stack_version = self.STACK_VERSION,
-      target = RMFTestCase.TARGET_COMMON_SERVICES
+      target = RMFTestCase.TARGET_COMMON_SERVICES,
+      call_mocks = [(0, None, None)]
     )
 
+    self.assert_call_to_get_hadoop_conf_dir()
+
     self.assertResourceCalled('Execute',
       '/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
       on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1
&& ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 )
|| ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
@@ -141,7 +151,6 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
-
   def test_start_default_24(self):
     raise SkipTest("there's nothing to upgrade to yet")
 
@@ -207,7 +216,18 @@ class TestPhoenixQueryServer(RMFTestCase):
 
     self.assertNoMoreResources()
 
+  def assert_call_to_get_hadoop_conf_dir(self):
+    # From call to conf_select.get_hadoop_conf_dir()
+    self.assertResourceCalled("Execute", ("cp", "-R", "-p", "/etc/hadoop/conf", "/etc/hadoop/conf.backup"),
+                              not_if = "test -e /etc/hadoop/conf.backup",
+                              sudo = True)
+    self.assertResourceCalled("Directory", "/etc/hadoop/conf",
+                              action = ["delete"])
+    self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/usr/hdp/current/hadoop-client/conf")
+
   def assert_configure_default(self):
+    self.assert_call_to_get_hadoop_conf_dir()
+
     self.assertResourceCalled('Directory', '/etc/hbase',
       mode = 0755
     )
@@ -319,6 +339,8 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
 
   def assert_configure_secured(self):
+    self.assert_call_to_get_hadoop_conf_dir()
+
     self.assertResourceCalled('Directory', '/etc/hbase',
       mode = 0755
     )
@@ -446,7 +468,7 @@ class TestPhoenixQueryServer(RMFTestCase):
       classname = "PhoenixQueryServer",
       command = "pre_upgrade_restart",
       config_dict = json_content,
-      call_mocks = [(0, "/etc/hbase/2.3.0.0-1234/0", '')],
+      call_mocks = [(0, "/etc/hbase/2.3.0.0-1234/0", ''), (0, None, None), (0, None, None)],
       hdp_stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -457,4 +479,9 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select',
'set', 'phoenix-server', '2.3.0.0-1234'), sudo=True)
 
-    self.assertNoMoreResources()
+    self.assertResourceCalled("Execute", ("cp", "-R", "-p", "/etc/hadoop/conf", "/etc/hadoop/conf.backup"),
+                              not_if = "test -e /etc/hadoop/conf.backup",
+                              sudo = True)
+    self.assertResourceCalled("Directory", "/etc/hadoop/conf", action = ["delete"])
+    self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/usr/hdp/current/hadoop-client/conf")
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 75aa4a1..2f6ae48 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1690,6 +1690,7 @@ class TestNamenode(RMFTestCase):
     del json_content['commandParams']['version']
     json_content['hostLevelParams']['stack_name'] = 'HDP'
     json_content['hostLevelParams']['stack_version'] = '2.2'
+    json_content['commandParams']['version'] = version
 
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/namenode.py",
@@ -1702,9 +1703,9 @@ class TestNamenode(RMFTestCase):
                        mocks_dict = mocks_dict)
     import sys
     self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
-    self.assertEquals("/usr/hdp/current/hadoop-client/libexec", sys.modules["params"].hadoop_libexec_dir)
-    self.assertEquals("/usr/hdp/current/hadoop-client/bin", sys.modules["params"].hadoop_bin_dir)
-    self.assertEquals("/usr/hdp/current/hadoop-client/sbin", sys.modules["params"].hadoop_bin)
+    self.assertEquals("/usr/hdp/{0}/hadoop/libexec".format(version), sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/hdp/{0}/hadoop/bin".format(version), sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/hdp/{0}/hadoop/sbin".format(version), sys.modules["params"].hadoop_bin)
 
   @patch.object(shell, "call")
   def test_pre_upgrade_restart_23_params(self, call_mock):

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 7edd867..03e8934 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -1358,7 +1358,7 @@ class TestOozieServer(RMFTestCase):
       recursive_chmod = True,
       owner = 'oozie',
       group = 'hadoop',
-      hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+      hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
       type = 'directory',
       action = ['create_on_execute'],
       mode = 0755 )
@@ -1374,7 +1374,7 @@ class TestOozieServer(RMFTestCase):
       user = 'hdfs', 
       dfs_type = '',
       action = ['execute'],
-      hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf' )
+      hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf' )
 
     self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/oozie/bin/oozie-setup.sh
sharelib create -fs hdfs://c6401.ambari.apache.org:8020',
       user='oozie', logoutput = True)
@@ -1431,7 +1431,7 @@ class TestOozieServer(RMFTestCase):
       recursive_chmod = True,
       owner = 'oozie',
       group = 'hadoop',
-      hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+      hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
       type = 'directory',
       action = ['create_on_execute'],
       mode = 0755 )
@@ -1447,7 +1447,7 @@ class TestOozieServer(RMFTestCase):
       user = 'hdfs',
       dfs_type = '',
       action = ['execute'],
-      hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf' )
+      hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf' )
 
     self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/oozie/bin/oozie-setup.sh
sharelib create -fs hdfs://c6401.ambari.apache.org:8020',
       user='oozie', logoutput = True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 48d55b1..1108d8f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -800,6 +800,15 @@ class TestHistoryServer(RMFTestCase):
     )
     put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
 
+  def assert_call_to_get_hadoop_conf_dir(self):
+    # From call to conf_select.get_hadoop_conf_dir()
+    self.assertResourceCalled("Execute", ("cp", "-R", "-p", "/etc/hadoop/conf", "/etc/hadoop/conf.backup"),
+                              not_if = "test -e /etc/hadoop/conf.backup",
+                              sudo = True)
+    self.assertResourceCalled("Directory", "/etc/hadoop/conf",
+                              action = ["delete"])
+    self.assertResourceCalled("Link", "/etc/hadoop/conf", to="/usr/hdp/current/hadoop-client/conf")
+
   @patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value="2.3.0"))
   @patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
@@ -818,13 +827,16 @@ class TestHistoryServer(RMFTestCase):
                        config_dict = json_content,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None), (0, None)],
+                       call_mocks = [(0, None, ''), (0, None, None), (0, None, None), (0,
None, None), (0, None, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select',
'set', 'hadoop-mapreduce-historyserver', version), sudo=True)
     self.assertTrue(call("tez", "hadoop", "hdfs", host_sys_prepped=False) in copy_to_hdfs_mock.call_args_list)
     self.assertTrue(call("slider", "hadoop", "hdfs", host_sys_prepped=False) in copy_to_hdfs_mock.call_args_list)
 
+    # From call to conf_select.get_hadoop_conf_dir()
+    self.assert_call_to_get_hadoop_conf_dir()
+    self.assert_call_to_get_hadoop_conf_dir()
 
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
@@ -839,8 +851,8 @@ class TestHistoryServer(RMFTestCase):
 
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
+    self.assertEquals(5, mocks_dict['call'].call_count)
+    self.assertEquals(5, mocks_dict['checked_call'].call_count)
     self.assertEquals(
       ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop',
'--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
        mocks_dict['checked_call'].call_args_list[0][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 490b3bf..74698e1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -340,6 +340,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', '/etc/pig/conf.backup'),
         not_if = 'test -e /etc/pig/conf.backup',
         sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/pig/conf',
+                              action=['delete'])
+    self.assertResourceCalled("Link", "/etc/pig/conf",
+                              to="/usr/hdp/current/pig-client/conf")
     # pig fails, so no Directory/Link combo
 
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', '/etc/tez/conf.backup'),
@@ -487,7 +491,6 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('Link', '/etc/falcon/conf',
         to = '/usr/hdp/current/falcon-client/conf')
 
-
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', '/etc/spark/conf.backup'),
         not_if = 'test -e /etc/spark/conf.backup',
         sudo = True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/55b84c55/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
index 1c782bd..9be8198 100644
--- a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
+++ b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
@@ -136,6 +136,8 @@ class TestKafkaBroker(RMFTestCase):
 
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'kafka-broker',
version), sudo=True,)
+
+    self.assertResourceCalled("Link", "/etc/kafka/conf", to="/usr/hdp/current/kafka-broker/conf")
     self.assertNoMoreResources()
 
     self.assertEquals(1, mocks_dict['call'].call_count)


Mime
View raw message