ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jonathanhur...@apache.org
Subject ambari git commit: AMBARI-11220 - Tez Upgrade Pack For HDP-2.2 to HDP-2.3 (jonathanhurley)
Date Mon, 18 May 2015 20:31:01 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 8b0c964a8 -> 67ca6364d


AMBARI-11220 - Tez Upgrade Pack For HDP-2.2 to HDP-2.3 (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/67ca6364
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/67ca6364
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/67ca6364

Branch: refs/heads/trunk
Commit: 67ca6364d5917eba79fd2ead5ba0df6ba876f631
Parents: 8b0c964
Author: Jonathan Hurley <jhurley@hortonworks.com>
Authored: Mon May 18 12:51:53 2015 -0400
Committer: Jonathan Hurley <jhurley@hortonworks.com>
Committed: Mon May 18 16:30:32 2015 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 54 ++++++++++++++---
 .../0.4.0.2.1/package/scripts/params_linux.py   |  7 +--
 .../0.4.0.2.1/package/scripts/service_check.py  | 61 ++++++++++----------
 .../YARN/2.1.0.2.0/package/scripts/service.py   | 33 +++++------
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  2 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  2 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |  2 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     | 12 ++++
 8 files changed, 105 insertions(+), 68 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 7c5c005..31b1625 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -64,6 +64,13 @@ SERVER_ROLE_DIRECTORY_MAP = {
   'ZOOKEEPER_SERVER' : 'zookeeper-server'
 }
 
+# mapping of service check to hdp-select component
+SERVICE_CHECK_DIRECTORY_MAP = {
+  "HDFS_SERVICE_CHECK" : "hadoop-client",
+  "TEZ_SERVICE_CHECK" : "hadoop-client",
+  "PIG_SERVICE_CHECK" : "hadoop-client"
+}
+
 TEMPLATE = "conf-select {0} --package {1} --stack-version {2} --conf-version 0"
 HADOOP_DIR_TEMPLATE = "/usr/hdp/{0}/{1}/{2}"
 HADOOP_DIR_DEFAULTS = {
@@ -129,14 +136,21 @@ def select(stack_name, package, version, try_create=True):
 
   shell.call(TEMPLATE.format("set-conf-dir", package, version), logoutput=False, quiet=False)
 
-def get_hadoop_conf_dir():
+def get_hadoop_conf_dir(force_latest_on_upgrade=False):
   """
   Gets the shared hadoop conf directory using:
   1.  Start with /etc/hadoop/conf
   2.  When the stack is greater than HDP-2.2, use /usr/hdp/current/hadoop-client/conf
   3.  Only when doing a RU and HDP-2.3 or higher, use the value as computed
       by conf-select.  This is in the form /usr/hdp/VERSION/hadoop/conf to make sure
-      the configs are written in the correct place
+      the configs are written in the correct place. However, if the component itself has
+      not yet been upgraded, it should use the hadoop configs from the prior version.
+      This will perform an hdp-select status to determine which version to use.
+  :param force_latest_on_upgrade:  if True, then force the returned path to always
+  be that of the upgrade target version, even if hdp-select has not been called. This
+  is primarily used by hooks like before-ANY to ensure that hadoop environment
+  configurations are written to the correct location since they are written out
+  before the hdp-select/conf-select would have been called.
   """
   hadoop_conf_dir = "/etc/hadoop/conf"
 
@@ -145,11 +159,22 @@ def get_hadoop_conf_dir():
 
     stack_info = _get_upgrade_stack()
 
+    # if upgrading to >= HDP 2.3
     if stack_info is not None and Script.is_hdp_stack_greater_or_equal("2.3"):
       stack_name = stack_info[0]
       stack_version = stack_info[1]
 
+      # ensure the new HDP stack is conf-selected
       select(stack_name, "hadoop", stack_version)
+
+      # determine if hdp-select has been run and if not, then use the current
+      # hdp version until this component is upgraded
+      if not force_latest_on_upgrade:
+        current_hdp_version = get_role_component_current_hdp_version()
+        if current_hdp_version is not None and stack_version != current_hdp_version:
+          stack_version = current_hdp_version
+
+      # only change the hadoop_conf_dir path, don't conf-select this older version
       hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(stack_version)
 
   return hadoop_conf_dir
@@ -194,14 +219,25 @@ def get_role_component_current_hdp_version():
   Gets the current HDP version of the component that this role command is for.
   :return:  the current HDP version of the specified component or None
   """
-  command_role = default("/role", "")
-  if command_role in SERVER_ROLE_DIRECTORY_MAP:
-    hdp_select_component = SERVER_ROLE_DIRECTORY_MAP[command_role]
-    current_hdp_version = get_hdp_version(hdp_select_component)
+  hdp_select_component = None
+  role = default("/role", "")
+  role_command =  default("/roleCommand", "")
+
+  if role in SERVER_ROLE_DIRECTORY_MAP:
+    hdp_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
+  elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
+    hdp_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
+
+  if hdp_select_component is None:
+    return None
 
+  current_hdp_version = get_hdp_version(hdp_select_component)
+
+  if current_hdp_version is None:
+    Logger.warning("Unable to determine hdp-select version for {0}".format(
+      hdp_select_component))
+  else:
     Logger.info("{0} is currently at version {1}".format(
       hdp_select_component, current_hdp_version))
-    
-    return current_hdp_version
 
-  return None
+  return current_hdp_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 1e183de..ab29a95 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -19,11 +19,10 @@ limitations under the License.
 """
 import os
 
-from resource_management import *
+from resource_management.libraries.resources import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import format_hdp_stack_version
 from resource_management.libraries.functions.default import default
-from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 
@@ -47,11 +46,11 @@ hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 tez_etc_dir = "/etc/tez"
 config_dir = "/etc/tez/conf"
-path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
+tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
+  tez_examples_jar = "/usr/hdp/current/tez-client/tez-examples*.jar"
 
 # tez only started linking /usr/hdp/x.x.x.x/tez-client/conf in HDP 2.3+
 if Script.is_hdp_stack_greater_or_equal("2.3"):

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index abff479..98973ae 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -33,46 +33,43 @@ class TezServiceCheckLinux(TezServiceCheck):
     import params
     env.set_params(params)
 
-    if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2')
>= 0:
-      hdp_version = functions.get_hdp_version("hadoop-client")
-
-    path_to_tez_jar = format(params.path_to_tez_examples_jar)
-    wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
-                               "/tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
+    path_to_tez_jar = format(params.tez_examples_jar)
+    wordcount_command = format("jar {path_to_tez_jar} orderedwordcount /tmp/tezsmokeinput/sample-tez-test
/tmp/tezsmokeoutput/")
     test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
-    
-    File( format("{tmp_dir}/sample-tez-test"),
-          content = "foo\nbar\nfoo\nbar\nfoo",
-          mode = 0755
+
+    File(format("{tmp_dir}/sample-tez-test"),
+      content = "foo\nbar\nfoo\nbar\nfoo",
+      mode = 0755
     )
-    
+
     params.HdfsResource("/tmp/tezsmokeinput",
-                        action="create_on_execute",
-                        type="directory",
-                        owner=params.smokeuser,
+      action = "create_on_execute",
+      type = "directory",
+      owner = params.smokeuser,
     )
     params.HdfsResource("/tmp/tezsmokeinput/sample-tez-test",
-                        action="create_on_execute",
-                        type="file",
-                        owner=params.smokeuser,
-                        source=format("{tmp_dir}/sample-tez-test"),
+      action = "create_on_execute",
+      type = "file",
+      owner = params.smokeuser,
+      source = format("{tmp_dir}/sample-tez-test"),
     )
-    params.HdfsResource(None, action="execute")
-
-    ExecuteHadoop( wordcount_command,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
+
+    params.HdfsResource(None, action = "execute")
+
+    ExecuteHadoop(wordcount_command,
+      tries = 3,
+      try_sleep = 5,
+      user = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      bin_dir = params.hadoop_bin_dir
     )
 
-    ExecuteHadoop( test_command,
-                   tries = 10,
-                   try_sleep = 6,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
+    ExecuteHadoop(test_command,
+      tries = 10,
+      try_sleep = 6,
+      user = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      bin_dir = params.hadoop_bin_dir
     )
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
index c827529..3d6b8d1 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
@@ -39,10 +39,14 @@ def service(componentName, action='start', serviceName='yarn'):
   import params
 
   if serviceName == 'mapreduce' and componentName == 'historyserver':
+    delete_pid_file = True
     daemon = format("{mapred_bin}/mr-jobhistory-daemon.sh")
     pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-{componentName}.pid")
     usr = params.mapred_user
   else:
+    # !!! yarn-daemon.sh deletes the PID for us; if we remove it the script
+    # may not work correctly when stopping the service
+    delete_pid_file = False
     daemon = format("{yarn_bin}/yarn-daemon.sh")
     pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-{componentName}.pid")
     usr = params.yarn_user
@@ -54,35 +58,24 @@ def service(componentName, action='start', serviceName='yarn'):
     check_process = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat
{pid_file}` >/dev/null 2>&1")
 
     # Remove the pid file if its corresponding process is not running.
-    File(pid_file,
-         action="delete",
-         not_if=check_process)
+    File(pid_file, action = "delete", not_if = check_process)
 
     # Attempt to start the process. Internally, this is skipped if the process is already
running.
-    Execute(daemon_cmd,
-            user=usr,
-            not_if=check_process
-    )
+    Execute(daemon_cmd, user = usr, not_if = check_process)
 
     # Ensure that the process with the expected PID exists.
-    Execute(check_process,
-            user=usr,
-            not_if=check_process,
-            initial_wait=5
-    )
+    Execute(check_process, user = usr, not_if = check_process, initial_wait = 5)
 
   elif action == 'stop':
     daemon_cmd = format("{cmd} stop {componentName}")
-    Execute(daemon_cmd,
-            user=usr)
+    Execute(daemon_cmd, user=usr)
+
+    # !!! yarn-daemon doesn't need us to delete PIDs
+    if delete_pid_file is True:
+      File(pid_file, action="delete")
 
-    File(pid_file,
-         action="delete")
 
   elif action == 'refreshQueues':
     rm_kinit_cmd = params.rm_kinit_cmd
     refresh_cmd = format("{rm_kinit_cmd} export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} &&
{yarn_container_bin}/yarn rmadmin -refreshQueues")
-
-    Execute(refresh_cmd,
-            user=usr,
-    )
+    Execute(refresh_cmd, user=usr)

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 3031a9f..d4a0400 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -33,7 +33,7 @@ hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 # HDP 2.2+ params

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 9d57c18..e0934ec 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -76,7 +76,7 @@ hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
 versioned_hdp_root = '/usr/hdp/current'
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 889dbd1..c6d632d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -36,7 +36,7 @@ hadoop_lib_home = conf_select.get_hadoop_dir("lib")
 hadoop_bin = conf_select.get_hadoop_dir("sbin")
 hadoop_home = '/usr'
 create_lib_snappy_symlinks = True
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 default_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
 
 # HDP 2.2+ params

http://git-wip-us.apache.org/repos/asf/ambari/blob/67ca6364/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
index 69f7b93..e954af2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
@@ -448,6 +448,18 @@
 
     <service name="TEZ">
       <component name="TEZ_CLIENT">
+        <pre-upgrade>
+          <task xsi:type="configure">
+            <type>tez-site</type>
+            <key>tez.am.view-acls</key>
+            <value>*</value>
+          </task>
+          <task xsi:type="configure">
+            <type>tez-site</type>
+            <key>tez.task.generate.counters.per.io</key>
+            <value>true</value>
+          </task>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart" />
         </upgrade>


Mime
View raw message