ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nc...@apache.org
Subject ambari git commit: AMBARI-12201. Config directories for HDP-2.3 should use conf-select (ncole)
Date Mon, 29 Jun 2015 20:25:41 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 5afeaea3b -> 841e99b05


AMBARI-12201. Config directories for HDP-2.3 should use conf-select (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/841e99b0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/841e99b0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/841e99b0

Branch: refs/heads/trunk
Commit: 841e99b05fa34e3bfa370cce6bf7c13119c76b94
Parents: 5afeaea
Author: Nate Cole <ncole@hortonworks.com>
Authored: Mon Jun 29 09:29:26 2015 -0400
Committer: Nate Cole <ncole@hortonworks.com>
Committed: Mon Jun 29 16:22:41 2015 -0400

----------------------------------------------------------------------
 .../python/resource_management/core/shell.py    |   7 +-
 .../libraries/functions/conf_select.py          | 118 ++++++++++++++++++-
 .../0.12.0.2.0/package/scripts/hive_client.py   |   1 +
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py |   1 +
 .../custom_actions/scripts/install_packages.py  |   6 +
 .../custom_actions/scripts/ru_set_all.py        |  41 +++++++
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |  11 +-
 .../scripts/shared_initialization.py            |  96 ++++++++++++++-
 .../python/custom_actions/test_ru_set_all.py    |  36 ++++++
 .../stacks/2.0.6/HIVE/test_hive_client.py       |  14 ++-
 .../hooks/after-INSTALL/test_after_install.py   |  50 +++++++-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |  14 ++-
 12 files changed, 376 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-common/src/main/python/resource_management/core/shell.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/shell.py b/ambari-common/src/main/python/resource_management/core/shell.py
index 75d4e56..14d65c2 100644
--- a/ambari-common/src/main/python/resource_management/core/shell.py
+++ b/ambari-common/src/main/python/resource_management/core/shell.py
@@ -356,8 +356,11 @@ def _get_environment_str(env):
   return reduce(lambda str,x: '{0} {1}={2}'.format(str,x,quote_bash_args(env[x])), env, '')
 
 def string_cmd_from_args_list(command, auto_escape=True):
-  escape_func = lambda x:quote_bash_args(x) if auto_escape else lambda x:x
-  return ' '.join(escape_func(x) for x in command)
+  if auto_escape:
+    escape_func = lambda x:quote_bash_args(x)
+    return ' '.join(escape_func(x) for x in command)
+  else:
+    return ' '.join(command)
 
 def _print(line):
   sys.stdout.write(line)

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 37b455d..df2b8c3 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -20,11 +20,92 @@ limitations under the License.
 
 __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir"]
 
+import os
 import version
 import hdp_select
 
 from resource_management.core import shell
 from resource_management.libraries.script.script import Script
+from resource_management.core.logger import Logger
+
+PACKAGE_DIRS = {
+  "accumulo": {
+    "conf_dir": "/etc/accumulo/conf",
+    "current_dir": "/usr/hdp/current/accumulo-client/conf"
+  },
+  "falcon": {
+    "conf_dir": "/etc/falcon/conf",
+    "current_dir": "/usr/hdp/current/falcon-client/conf"
+  },
+  "hadoop": {
+    "conf_dir": "/etc/hadoop/conf",
+    "current_dir": "/usr/hdp/current/hadoop-client/conf"
+  },
+  "hbase": {
+    "conf_dir": "/etc/hbase/conf",
+    "current_dir": "/usr/hdp/current/hbase-client/conf"
+  },
+  "hive": {
+    "conf_dir": "/etc/hive/conf",
+    "current_dir": "/usr/hdp/current/hive-client/conf"
+  },
+  "kafka": {
+    "conf_dir": "/etc/kafka/conf",
+    "current_dir": "/usr/hdp/current/kafka-broker/conf"
+  },
+  "knox": {
+    "conf_dir": "/etc/knox/conf",
+    "current_dir": "/usr/hdp/current/knox-server/conf"
+  },
+  "mahout": {
+    "conf_dir": "/etc/mahout/conf",
+    "current_dir": "/usr/hdp/current/mahout-client/conf"
+  },
+  "oozie": {
+    "conf_dir": "/etc/oozie/conf",
+    "current_dir": "/usr/hdp/current/oozie-client/conf"
+  },
+  "phoenix": {
+    "conf_dir": "/etc/phoenix/conf",
+    "current_dir": "/usr/hdp/current/phoenix-client/conf"
+  },
+  "ranger-admin": {
+    "conf_dir": "/etc/ranger/admin/conf",
+    "current_dir": "/usr/hdp/current/ranger-admin/conf"
+  },
+  "ranger-kms": {
+    "conf_dir": "/etc/ranger/kms/conf",
+    "current_dir": "/usr/hdp/current/ranger-kms/conf"
+  },
+  "ranger-usersync": {
+    "conf_dir": "/etc/ranger/kms/usersync",
+    "current_dir": "/usr/hdp/current/ranger-usersync/conf"
+  },
+  "slider": {
+    "conf_dir": "/etc/slider/conf",
+    "current_dir": "/usr/hdp/current/slider-client/conf"
+  },
+  "spark": {
+    "conf_dir": "/etc/spark/conf",
+    "current_dir": "/usr/hdp/current/spark-client/conf"
+  },
+  "sqoop": {
+    "conf_dir": "/etc/sqoop/conf",
+    "current_dir": "/usr/hdp/current/sqoop-client/conf"
+  },
+  "storm": {
+    "conf_dir": "/etc/storm/conf",
+    "current_dir": "/usr/hdp/current/storm-client/conf"
+  },
+  "tez": {
+    "conf_dir": "/etc/tez/conf",
+    "current_dir": "/usr/hdp/current/tez-client/conf"
+  },
+  "zookeeper": {
+    "conf_dir": "/etc/zookeeper/conf",
+    "current_dir": "/usr/hdp/current/zookeeper-client/conf"
+  }
+}
 
 TEMPLATE = "conf-select {0} --package {1} --stack-version {2} --conf-version 0"
 
@@ -38,7 +119,7 @@ def _valid(stack_name, package, ver):
   return True
 
 
-def create(stack_name, package, version):
+def create(stack_name, package, version, dry_run = False):
   """
   Creates a config version for the specified package
   :stack_name: the name of the stack
@@ -49,7 +130,11 @@ def create(stack_name, package, version):
   if not _valid(stack_name, package, version):
     return
 
-  shell.call(TEMPLATE.format("create-conf-dir", package, version), logoutput=False, quiet=True)
+  command = "dry-run-create" if dry_run else "create-conf-dir"
+
+  code, stdout = shell.call(TEMPLATE.format(command, package, version), logoutput=False,
quiet=True)
+
+  return stdout
 
 
 def select(stack_name, package, version, try_create=True):
@@ -115,5 +200,34 @@ def get_hadoop_conf_dir(force_latest_on_upgrade=False):
   return hadoop_conf_dir
 
 
+def create_config_links(stack_id, stack_version):
+  """
+  Creates config links
+  stack_id:  stack id, ie HDP-2.3
+  stack_version:  version to set, ie 2.3.0.0-1234
+  """
+
+  if stack_id is None:
+    Logger.info("Cannot create config links when stack_id is not defined")
+    return
+
+  args = stack_id.upper().split('-')
+  if len(args) != 2:
+    Logger.info("Unrecognized stack id {0}".format(stack_id))
+    return
+
+  if args[0] != "HDP":
+    Logger.info("Unrecognized stack name {0}".format(args[0]))
+
+  if version.compare_versions(version.format_hdp_stack_version(args[1]), "2.3.0.0") <
0:
+    Logger.info("Cannot link configs unless HDP-2.3 or higher")
+    return
+
+  for k, v in PACKAGE_DIRS.iteritems():
+    if os.path.exists(v['conf_dir']):
+      new_conf_dir = create(args[0], k, stack_version, dry_run = True)
 
+      if not os.path.exists(new_conf_dir):
+        Logger.info("Creating conf {0} for {1}".format(new_conf_dir, k))
+        select(args[0], k, stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index d2855b1..0cacb07 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -54,6 +54,7 @@ class HiveClientDefault(HiveClient):
     import params
     env.set_params(params)
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0')
>= 0:
+      conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       hdp_select.select("hadoop-client", params.version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
index 1bd82ae..0a01921 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
@@ -56,6 +56,7 @@ class TezClientLinux(TezClient):
     env.set_params(params)
 
     if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0')
>= 0:
+      conf_select.select(params.stack_name, "tez", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
       hdp_select.select("hadoop-client", params.version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index 8d1c07f..9c1b3d7 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -32,6 +32,7 @@ from resource_management import *
 from resource_management.libraries.functions.list_ambari_managed_repos import list_ambari_managed_repos
 from ambari_commons.os_check import OSCheck, OSConst
 from resource_management.libraries.functions.packages_analyzer import allInstalledPackages
+from resource_management.libraries.functions import conf_select
 from resource_management.core.shell import call
 
 from resource_management.core.logger import Logger
@@ -143,6 +144,11 @@ class InstallPackages(Script):
     if num_errors > 0:
       raise Fail("Failed to distribute repositories/install packages")
 
+    if 'package_installation_result' in self.structured_output and \
+      'actual_version' in self.structured_output and \
+      self.structured_output['package_installation_result'] == 'SUCCESS':
+      conf_select.create_config_links(stack_id, self.structured_output['actual_version'])
+
   def get_actual_version_from_file(self):
     """
     Search the repository version history file for a line that contains repository_version,actual_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
index 2f2a518..88c798d 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
@@ -20,14 +20,19 @@ Ambari Agent
 
 """
 
+import os
+import shutil
 from ambari_commons.os_check import OSCheck
 from resource_management.libraries.script import Script
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.core import shell
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Execute
+from resource_management.core.shell import as_sudo
 
 class UpgradeSetAll(Script):
   """
@@ -58,5 +63,41 @@ class UpgradeSetAll(Script):
         code, out = shell.call(cmd)
         Logger.info("Command: {0}\nCode: {1}, Out: {2}".format(cmd, str(code), str(out)))
 
+      if compare_versions(real_ver, format_hdp_stack_version("2.3")) >= 0:
+        # backup the old and symlink /etc/[component]/conf to /usr/hdp/current/[component]
+        for k, v in conf_select.PACKAGE_DIRS.iteritems():
+          link_config(v['conf_dir'], v['current_dir'])
+
+def link_config(old_conf, link_conf):
+  """
+  Creates a config link following:
+  1. Checks if the old_conf location exists
+  2. If it does, check if it's a link already
+  3. Make a copy to /etc/[component]/conf.backup
+  4. Remove the old directory and create a symlink to link_conf
+  :old_conf: the old config directory, ie /etc/[component]/config
+  :link_conf: the new target for the config directory, ie /usr/hdp/current/[component-dir]/conf
+  """
+  if not os.path.exists(old_conf):
+    Logger.debug("Skipping {0}; it does not exist".format(old_conf))
+    return
+  
+  if os.path.islink(old_conf):
+    Logger.debug("Skipping {0}; it is already a link".format(old_conf))
+    return
+
+  old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
+
+  Logger.info("Linking {0} to {1}".format(old_conf, link_conf))
+
+  old_conf_copy = os.path.join(old_parent, "conf.backup")
+  if not os.path.exists(old_conf_copy):
+    Execute(as_sudo(["cp", "-R", "-p", old_conf, old_conf_copy]), logoutput=True)
+
+  shutil.rmtree(old_conf, ignore_errors=True)
+
+  # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
+  os.symlink(link_conf, old_conf)
+
 if __name__ == "__main__":
   UpgradeSetAll().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
index 71ac3df..d41a889 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
@@ -17,11 +17,12 @@ limitations under the License.
 
 """
 
-import sys
-from resource_management import *
-from shared_initialization import *
+from resource_management.libraries.script.hook import Hook
+from shared_initialization import link_configs
+from shared_initialization import setup_config
+from shared_initialization import setup_hdp_install_directory
+from resource_management.libraries.script import Script
 
-#Hook for hosts with only client without other components
 class AfterInstallHook(Hook):
 
   def hook(self, env):
@@ -31,5 +32,7 @@ class AfterInstallHook(Hook):
     setup_hdp_install_directory()
     setup_config()
 
+    link_configs(self.stroutfile)
+
 if __name__ == "__main__":
   AfterInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 170f72e..fede024 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -17,7 +17,17 @@ limitations under the License.
 
 """
 import os
-from resource_management import *
+import shutil
+import ambari_simplejson as json
+from resource_management.core.shell import as_sudo
+from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.libraries.script import Script
+
 
 def setup_hdp_install_directory():
   # This is a name of marker file.
@@ -42,3 +52,87 @@ def setup_config():
               owner=params.hdfs_user,
               group=params.user_group,
               only_if=format("ls {hadoop_conf_dir}"))
+
+
+def load_version(struct_out_file):
+  """
+  Load version from file.  Made a separate method for testing
+  """
+  json_version = None
+  try:
+    if os.path.exists(struct_out_file):
+      with open(struct_out_file, 'r') as fp:
+        json_info = json.load(fp)
+        json_version = json_info['version']
+  except:
+    pass
+
+  return json_version
+  
+
+def link_configs(struct_out_file):
+  """
+  Links configs, only on a fresh install of HDP-2.3 and higher
+  """
+
+  if not Script.is_hdp_stack_greater_or_equal("2.3"):
+    Logger.info("Can only link configs for HDP-2.3 and higher.")
+    return
+
+  json_version = load_version(struct_out_file)
+
+  if not json_version:
+    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
+    return
+
+  for k, v in conf_select.PACKAGE_DIRS.iteritems():
+    _link_configs(k, json_version, v['conf_dir'], v['current_dir'])
+
+def _link_configs(package, version, old_conf, link_conf):
+  """
+  Link a specific package's configuration directory
+  """
+
+  if not os.path.exists(old_conf):
+    Logger.debug("Skipping {0} as it does not exist.".format(old_conf))
+    return
+
+  # check if conf is a link to the target already
+  if os.path.islink(old_conf):
+    Logger.debug("{0} is already a link to {1}".format(old_conf, os.path.realpath(old_conf)))
+    return
+
+  # make backup dir and copy everything in case configure() was called after install()
+  old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
+  old_conf_copy = os.path.join(old_parent, "conf.install")
+  if not os.path.exists(old_conf_copy):
+    try:
+      Execute(as_sudo(["cp", "-R", "-p", old_conf, old_conf_copy]), logoutput=True)
+    except:
+      pass
+
+  versioned_conf = conf_select.create("HDP", package, version, dry_run = True)
+
+  Logger.info("New conf directory is {0}".format(versioned_conf))
+
+  # make new conf dir and copy everything in case configure() was called after install()
+  if not os.path.exists(versioned_conf):
+    conf_select.create("HDP", package, version)
+    try:
+      Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), versioned_conf], auto_escape=False),
+        logoutput=True)
+    except:
+      pass
+
+  # make /usr/hdp/<version>/hadoop/conf point to the versioned config.
+  # /usr/hdp/current is already set
+  conf_select.select("HDP", package, version)
+
+  # no more references to /etc/[component]/conf
+  shutil.rmtree(old_conf, ignore_errors=True)
+
+  # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
+  os.symlink(link_conf, old_conf)
+      
+  # should conf.install be removed?
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/test_ru_set_all.py b/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
index d9e8c70..e0e203b 100644
--- a/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
+++ b/ambari-server/src/test/python/custom_actions/test_ru_set_all.py
@@ -92,3 +92,39 @@ class TestRUSetAll(RMFTestCase):
 
     call_mock.assert_called_with("hdp-select set all 2.2.1.0-2260")
 
+  @patch("resource_management.core.shell.call")
+  @patch.object(Script, 'get_config')
+  @patch.object(OSCheck, 'is_redhat_family')
+  @patch("ru_set_all.link_config")
+  def test_execution_23(self, link_mock, family_mock, get_config_mock, call_mock):
+    # Mock the config objects
+    json_file_path = os.path.join(self.CUSTOM_ACTIONS_DIR, "ru_execute_tasks_namenode_prepare.json")
+    self.assertTrue(os.path.isfile(json_file_path))
+    with open(json_file_path, "r") as json_file:
+      json_payload = json.load(json_file)
+
+    json_payload['hostLevelParams']['stack_version'] = "2.3"
+    json_payload['commandParams']['version'] = "2.3.0.0-1234"
+
+    config_dict = ConfigDictionary(json_payload)
+
+    family_mock.return_value = True
+    get_config_mock.return_value = config_dict
+    call_mock.side_effect = fake_call   # echo the command
+
+    # Ensure that the json file was actually read.
+    stack_name = default("/hostLevelParams/stack_name", None)
+    stack_version = default("/hostLevelParams/stack_version", None)
+    service_package_folder = default('/roleParams/service_package_folder', None)
+
+    self.assertEqual(stack_name, "HDP")
+    self.assertEqual(stack_version, 2.3)
+    self.assertEqual(service_package_folder, "common-services/HDFS/2.1.0.2.0/package")
+
+    # Begin the test
+    ru_execute = UpgradeSetAll()
+    ru_execute.actionexecute(None)
+
+    self.assertTrue(link_mock.called)
+    call_mock.assert_called_with("hdp-select set all 2.3.0.0-1234")
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index ded432b..07feda0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -206,17 +206,23 @@ class TestHiveClient(RMFTestCase):
                        config_dict = json_content,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None), (0, None)],
+                       call_mocks = [(0, None), (0, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute',
                               ('hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
+    self.assertEquals(4, mocks_dict['call'].call_count)
     self.assertEquals(
-      "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+      "conf-select create-conf-dir --package hive --stack-version 2.3.0.0-1234 --conf-version
0",
        mocks_dict['call'].call_args_list[0][0][0])
     self.assertEquals(
-      "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+      "conf-select set-conf-dir --package hive --stack-version 2.3.0.0-1234 --conf-version
0",
        mocks_dict['call'].call_args_list[1][0][0])
+    self.assertEquals(
+      "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+       mocks_dict['call'].call_args_list[2][0][0])
+    self.assertEquals(
+      "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+       mocks_dict['call'].call_args_list[3][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index fa3f717..2979990 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -18,13 +18,15 @@ See the License for the specific language governing permissions and
 limitations under the License.
 '''
 
+import json
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
-
 @patch("os.path.exists", new = MagicMock(return_value=True))
 class TestHookAfterInstall(RMFTestCase):
+
   def test_hook_default(self):
+
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
                        classname="AfterInstallHook",
                        command="hook",
@@ -38,4 +40,48 @@ class TestHookAfterInstall(RMFTestCase):
                               configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
                               only_if="ls /etc/hadoop/conf")
 
-    self.assertNoMoreResources()
\ No newline at end of file
+    self.assertNoMoreResources()
+
+
+  @patch("shared_initialization.load_version", new = MagicMock(return_value="2.3.0.0-1243"))
+  @patch("resource_management.libraries.functions.conf_select.create")
+  @patch("resource_management.libraries.functions.conf_select.select")
+  @patch("os.symlink")
+  @patch("shutil.rmtree")
+  def test_hook_default_conf_select(self, rmtree_mock, symlink_mock, conf_select_select_mock,
conf_select_create_mock):
+
+    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
+      return "/etc/{0}/{1}/0".format(arg2, arg3)
+
+    conf_select_create_mock.side_effect = mocked_conf_select
+
+    config_file = self.get_src_folder() + "/test/python/stacks/2.0.6/configs/default.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+
+    version = '2.3.0.0-1234'
+    json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
+
+    self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
+                       classname="AfterInstallHook",
+                       command="hook",
+                       config_dict = json_content)
+
+
+    self.assertResourceCalled("Execute",
+      "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E touch /var/lib/ambari-agent/data/hdp-select-set-all.performed
; " \
+      "ambari-sudo.sh /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select
versions | grep ^2.3 | tail -1`",
+      only_if = "ls -d /usr/hdp/2.3*",
+      not_if = "test -f /var/lib/ambari-agent/data/hdp-select-set-all.performed")
+
+
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      configurations = self.getConfig()['configurations']['core-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
+      only_if="ls /usr/hdp/current/hadoop-client/conf")
+
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/841e99b0/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index d26813a..eecb52c 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -105,16 +105,22 @@ class TestTezClient(RMFTestCase):
                        config_dict = json_content,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None), (0, None)],
+                       call_mocks = [(0, None), (0, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute', ('hdp-select', 'set', 'hadoop-client', version),
sudo=True)
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
+    self.assertEquals(4, mocks_dict['call'].call_count)
     self.assertEquals(
-      "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+      "conf-select create-conf-dir --package tez --stack-version 2.3.0.0-1234 --conf-version
0",
        mocks_dict['call'].call_args_list[0][0][0])
     self.assertEquals(
-      "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+      "conf-select set-conf-dir --package tez --stack-version 2.3.0.0-1234 --conf-version
0",
        mocks_dict['call'].call_args_list[1][0][0])
+    self.assertEquals(
+      "conf-select create-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+       mocks_dict['call'].call_args_list[2][0][0])
+    self.assertEquals(
+      "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 --conf-version
0",
+       mocks_dict['call'].call_args_list[3][0][0])


Mime
View raw message