ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alejan...@apache.org
Subject git commit: AMBARI-7842. Ambari to manage tarballs on HDFS (alejandro)
Date Wed, 29 Oct 2014 00:24:28 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 552792b93 -> d6652e8dc


AMBARI-7842. Ambari to manage tarballs on HDFS (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d6652e8d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d6652e8d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d6652e8d

Branch: refs/heads/trunk
Commit: d6652e8dc8fd5642ce89cfb683f62c4c98cd3bff
Parents: 552792b
Author: Alejandro Fernandez <afernandez@hortonworks.com>
Authored: Mon Oct 27 17:37:56 2014 -0700
Committer: Alejandro Fernandez <afernandez@hortonworks.com>
Committed: Tue Oct 28 17:23:45 2014 -0700

----------------------------------------------------------------------
 .../dynamic_variable_interpretation.py          | 117 +++++--------------
 .../HIVE/package/scripts/hive_server.py         |   3 +-
 .../services/HIVE/package/scripts/webhcat.py    |   8 +-
 .../YARN/package/scripts/historyserver.py       |   2 +-
 .../HDP/2.2/configuration/cluster-env.xml       |  28 ++---
 .../HIVE/configuration/webhcat-site.xml         |  20 ++--
 .../2.2/services/TEZ/configuration/tez-site.xml |   2 +-
 .../YARN/configuration-mapred/mapred-site.xml   |  17 ++-
 .../services/YARN/configuration/yarn-site.xml   |   2 +-
 .../test/python/stacks/2.2/configs/default.json |   8 +-
 .../test/python/stacks/2.2/configs/secured.json |   8 +-
 ambari-web/app/data/HDP2/site_properties.js     |  26 +++++
 12 files changed, 114 insertions(+), 127 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
index 728620e..4410fef 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
@@ -30,16 +30,16 @@ from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
 from resource_management.core.resources.system import Execute
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
+from resource_management.core import shell
 
 """
 This file provides helper methods needed for the versioning of RPMs. Specifically, it does
dynamic variable
-interpretation to replace strings like {{ hdp_stack_version }} and {{ component_version }}
where the value of the
+interpretation to replace strings like {{ hdp_stack_version }}  where the value of the
 variables cannot be determined ahead of time, but rather, depends on what files are found.
 
 It assumes that {{ hdp_stack_version }} is constructed as ${major.minor.patch.rev}-${build_number}
 E.g., 998.2.2.1.0-998
 Please note that "-${build_number}" is optional.
-Whereas {{ component_version }} is up to the Component to define, may be 3.0.1 or 301.
 """
 
 # These values must be the suffix of the properties in cluster-env.xml
@@ -53,7 +53,7 @@ def _get_tar_source_and_dest_folder(tarball_prefix):
   :return: Returns a tuple of (x, y) after verifying the properties
   """
   component_tar_source_file = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(),
TAR_SOURCE_SUFFIX), None)
-  # E.g., /usr/hdp/current/hadoop-client/tez-{{ component_version }}.{{ hdp_stack_version
}}.tar.gz
+  # E.g., /usr/hdp/current/hadoop-client/tez-{{ hdp_stack_version }}.tar.gz
 
   component_tar_destination_folder = default("/configurations/cluster-env/%s%s" % (tarball_prefix.lower(),
TAR_DESTINATION_FOLDER_SUFFIX), None)
   # E.g., hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/
@@ -76,77 +76,12 @@ def _get_tar_source_and_dest_folder(tarball_prefix):
   return component_tar_source_file, component_tar_destination_folder
 
 
-def _create_regex_pattern(file_path, hdp_stack_version):
-  """
-  :param file_path: Input file path
-  :param hdp_stack_version: Stack version, such as 2.2.0.0
-  :return: Returns an expression that uses file system regex that can be used with ls and
hadoop fs -ls
-  """
-  # Perform the variable interpretation
-  file_path_pattern = file_path
-  if "{{ component_version }}" in file_path_pattern:
-    file_path_pattern = file_path_pattern.replace("{{ component_version }}", "*")
-
-  # IMPORTANT, the build version was used in HDP 2.2, but may not be needed in future versions.
-  if "{{ hdp_stack_version }}" in file_path_pattern:
-    file_path_pattern = file_path_pattern.replace("{{ hdp_stack_version }}", hdp_stack_version
+ "*")   # the trailing "*" is the optional build number
-  return file_path_pattern
-
-
-def _populate_source_and_dests(tarball_prefix, source_file_pattern, component_tar_destination_folder,
hdp_stack_version):
-  """
-  :param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, pig
-  :param source_file_pattern: Regex pattern of the source file from the local file system
-  :param component_tar_destination_folder: Destination folder to copy the file to in HDFS
-  :param hdp_stack_version: Stack version number without the build version. E.g., 2.2.0.0
-  :return: Returns a list of tuples (x, y), where x is the source file in the local file
system,
-  and y is the destination file path in HDFS
-  """
-  source_and_dest_pairs = []
-
-  for file in glob.glob(source_file_pattern):
-    file_base_name = os.path.basename(file)
-    component_version = None
-    hdp_build_version = None
-
-    # Attempt to retrieve the hdp_build_version and component_version.
-    # In case the build number (which is optional) has dots, attempt to match as many as
possible.
-    pattern = "%s-(.*)\\.%s-?([0-9\\.]*)\\..*" % (tarball_prefix, str(hdp_stack_version).replace(".",
"\\."))
-    m = re.search(pattern, file_base_name)
-    if m and len(m.groups()) == 2:
-      component_version = str(m.group(1))
-      hdp_build_version = str(m.group(2))   # optional, so may be empty.
-
-    missing_a_variable = False
-    # The destination_file_path will be interpreted as well.
-    destination_file_path = os.path.join(component_tar_destination_folder, file_base_name)
-
-    if "{{ component_version }}" in destination_file_path:
-      if component_version:
-        destination_file_path = destination_file_path.replace("{{ component_version }}",
component_version)
-      else:
-        missing_a_variable = True
-
-    if "{{ hdp_stack_version }}" in destination_file_path:
-      if hdp_build_version and hdp_build_version.strip() != "":
-        destination_file_path = destination_file_path.replace("{{ hdp_stack_version }}",
"%s-%s" %
-                                                              (hdp_stack_version, hdp_build_version))
-      else:
-        destination_file_path = destination_file_path.replace("{{ hdp_stack_version }}",
"%s" % hdp_stack_version)
-
-    if missing_a_variable:
-      print("WARNING. Could not identify Component version in file %s , "
-            "so will not copy to HDFS." % str(file))
-    else:
-      source_and_dest_pairs.append((file, destination_file_path))
-  return source_and_dest_pairs
-
-
-def _copy_files(source_and_dest_pairs, file_owner, kinit_if_needed):
+def _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed):
   """
   :param source_and_dest_pairs: List of tuples (x, y), where x is the source file in the
local file system,
   and y is the destination file path in HDFS
-  :param file_owner: Owner to set for the file copied to HDFS
+  :param file_owner: Owner to set for the file copied to HDFS (typically hdfs account)
+  :param group_owner: Owning group to set for the file copied to HDFS (typically hadoop group)
   :param kinit_if_needed: kinit command if it is needed, otherwise an empty string
   :return: Returns 0 if at least one file was copied and no exceptions occurred, and 1 otherwise.
 
@@ -164,12 +99,13 @@ def _copy_files(source_and_dest_pairs, file_owner, kinit_if_needed):
         params.HdfsDirectory(destination_dir,
                              action="create",
                              owner=file_owner,
-                             mode=0777
+                             mode=0555
         )
 
         CopyFromLocal(source,
-                      mode=0755,
+                      mode=0444,
                       owner=file_owner,
+                      group=group_owner,
                       dest_dir=destination_dir,
                       kinnit_if_needed=kinit_if_needed,
                       hdfs_user=params.hdfs_user,
@@ -181,11 +117,12 @@ def _copy_files(source_and_dest_pairs, file_owner, kinit_if_needed):
   return return_value
 
 
-def copy_tarballs_to_hdfs(tarball_prefix, component_user, file_owner):
+def copy_tarballs_to_hdfs(tarball_prefix, component_user, file_owner, group_owner):
   """
   :param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, pig
   :param component_user: User that will execute the Hadoop commands
-  :param file_owner: Owner of the files copied to HDFS
+  :param file_owner: Owner of the files copied to HDFS (typically hdfs account)
+  :param group_owner: Group owner of the files copied to HDFS (typically hadoop group)
   :return: Returns 0 on success, 1 if no files were copied, and in some cases may raise an
exception.
 
   In order to call this function, params.py must have all of the following,
@@ -200,16 +137,27 @@ def copy_tarballs_to_hdfs(tarball_prefix, component_user, file_owner):
 
   component_tar_source_file, component_tar_destination_folder = _get_tar_source_and_dest_folder(tarball_prefix)
   if not component_tar_source_file or not component_tar_destination_folder:
+    Logger.warning("Could not retrieve properties for tarball with prefix: %s" % str(tarball_prefix))
+    return 1
+
+  if not os.path.exists(component_tar_source_file):
+    Logger.warning("Could not find file: %s" % str(component_tar_source_file))
     return 1
 
-  source_file_pattern = _create_regex_pattern(component_tar_source_file, params.hdp_stack_version)
-  # This is just the last segment
-  file_name_pattern = source_file_pattern.split('/')[-1:][0]
-  tar_destination_folder_pattern = _create_regex_pattern(component_tar_destination_folder,
params.hdp_stack_version)
+  get_hdp_version_cmd = "/usr/bin/hdp-select versions"
+  code, out = shell.call(get_hdp_version_cmd)
+  if code != 0 or not out.startswith(params.hdp_stack_version):
+    Logger.Warning("Could not verify HDP version by calling '%s'. Return Code: %s, Output:
%s." %
+                   (get_hdp_version_cmd, str(code), str(out)))
+    return 1
+
+  hdp_version = out.strip() # this should include the build number
+
+  file_name = os.path.basename(component_tar_source_file)
+  destination_file = os.path.join(component_tar_destination_folder, file_name)
+  destination_file = destination_file.replace("{{ hdp_stack_version }}", hdp_version)
 
-  # Pattern for searching the file in HDFS. E.g. value, hdfs:///hdp/apps/2.2.0.0*/tez/tez-*.2.2.0.0*.tar.gz
-  hdfs_file_pattern = os.path.join(tar_destination_folder_pattern, file_name_pattern)
-  does_hdfs_file_exist_cmd = "fs -ls %s" % hdfs_file_pattern
+  does_hdfs_file_exist_cmd = "fs -ls %s" % destination_file
 
   kinit_if_needed = ""
   if params.security_enabled:
@@ -234,7 +182,6 @@ def copy_tarballs_to_hdfs(tarball_prefix, component_user, file_owner):
     pass
 
   if not does_hdfs_file_exist:
-    source_and_dest_pairs = _populate_source_and_dests(tarball_prefix, source_file_pattern,
-                                                        component_tar_destination_folder,
params.hdp_stack_version)
-    return _copy_files(source_and_dest_pairs, file_owner, kinit_if_needed)
+    source_and_dest_pairs = [(component_tar_source_file, destination_file), ]
+    return _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed)
   return 1

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
index 5e2000d..4e55cfb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
@@ -41,7 +41,8 @@ class HiveServer(Script):
     self.configure(env) # FOR SECURITY
 
     # This function is needed in HDP 2.2, but it is safe to call in earlier versions.
-    copy_tarballs_to_hdfs('tez', params.tez_user, params.hdfs_user)
+    copy_tarballs_to_hdfs('mapreduce', params.tez_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('tez', params.tez_user, params.hdfs_user, params.user_group)
 
     hive_service( 'hiveserver2',
                   action = 'start'

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
index 62d37a8..6156ccf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py
@@ -81,10 +81,10 @@ def webhcat():
 
   # TODO, these checks that are specific to HDP 2.2 and greater should really be in a script
specific to that stack.
   if compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
-    copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user)
-    copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user)
-    copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, params.hdfs_user)
-    copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user)
+    copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user, params.user_group)
   else:
     CopyFromLocal(params.hadoop_streeming_jars,
                   owner=params.webhcat_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
index 29692fc..2485d05 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
@@ -38,7 +38,7 @@ class HistoryServer(Script):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    copy_tarballs_to_hdfs('mr', params.mapred_user, params.hdfs_user)
+    copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
   def stop(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
index da15055..281b821 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/configuration/cluster-env.xml
@@ -23,17 +23,19 @@
 <configuration>
 
   <!-- The properties that end in tar_source describe the pattern of where the tar.gz
files come from.
-  They will replace {{ hdp_stack_version }} with the "#.#.#.#" value followed by -* (which
is the build number in HDP 2.2),
-  and treat {{ component_version }} as a wildcard.
+  They will replace {{ hdp_stack_version }} with the "#.#.#.#" value followed by -* (which
is the build number in HDP 2.2).
   When copying those tarballs, Ambari will look up the corresponding tar_destination_folder
property to know where it
   should be copied to.
   All of the destination folders must begin with hdfs://
   Please note that the spaces inside of {{ ... }} are important.
+
+  IMPORTANT: Any properties included here must also be declared in site_properties.js
+
   -->
-  <!-- Tez tarball is needed by Hive Server when using the Tez execution egine. -->
+  <!-- Tez tarball is needed by Hive Server when using the Tez execution engine. -->
   <property>
     <name>tez_tar_source</name>
-    <value>/usr/hdp/current/tez-client/lib/tez-{{ component_version }}.{{ hdp_stack_version
}}.tar.gz</value>
+    <value>/usr/hdp/current/tez-client/lib/tez.tar.gz</value>
     <description>Source file path that uses dynamic variables and regex to copy the
file to HDFS.</description>
   </property>
   <property>
@@ -45,7 +47,7 @@
   <!-- Hive tarball is needed by WebHCat. -->
   <property>
     <name>hive_tar_source</name>
-    <value>/usr/hdp/current/hive-client/hive-{{ component_version }}.{{ hdp_stack_version
}}.tar.gz</value>
+    <value>/usr/hdp/current/hive-client/hive.tar.gz</value>
     <description>Source file path that uses dynamic variables and regex to copy the
file to HDFS.</description>
   </property>
   <property>
@@ -57,7 +59,7 @@
   <!-- Pig tarball is needed by WebHCat. -->
   <property>
     <name>pig_tar_source</name>
-    <value>/usr/hdp/current/pig-client/pig-{{ component_version }}.{{ hdp_stack_version
}}.tar.gz</value>
+    <value>/usr/hdp/current/pig-client/pig.tar.gz</value>
     <description>Source file path that uses dynamic variables and regex to copy the
file to HDFS.</description>
   </property>
   <property>
@@ -69,19 +71,19 @@
   <!-- Hadoop Streaming jar is needed by WebHCat. -->
   <property>
     <name>hadoop-streaming_tar_source</name>
-    <value>/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming-{{ component_version
}}.{{ hdp_stack_version }}.jar</value>
+    <value>/usr/hdp/current/hadoop-mapreduce-client/hadoop-streaming.jar</value>
     <description>Source file path that uses dynamic variables and regex to copy the
file to HDFS.</description>
   </property>
   <property>
     <name>hadoop-streaming_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/</value>
+    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/</value>
     <description>Destination HDFS folder for the file.</description>
   </property>
 
   <!-- Sqoop tarball is needed by WebHCat. -->
   <property>
     <name>sqoop_tar_source</name>
-    <value>/usr/hdp/current/sqoop-client/sqoop-{{ component_version }}.{{ hdp_stack_version
}}.tar.gz</value>
+    <value>/usr/hdp/current/sqoop-client/sqoop.tar.gz</value>
     <description>Source file path that uses dynamic variables and regex to copy the
file to HDFS.</description>
   </property>
   <property>
@@ -92,13 +94,13 @@
 
   <!-- MapReduce2 tarball -->
   <property>
-    <name>mr_tar_source</name>
-    <value>/usr/hdp/current/hadoop-client/mr-{{ component_version }}.{{ hdp_stack_version
}}.tar.gz</value>
+    <name>mapreduce_tar_source</name>
+    <value>/usr/hdp/current/hadoop-client/mapreduce.tar.gz</value>
     <description>Source file path that uses dynamic variables and regex to copy the
file to HDFS.</description>
   </property>
   <property>
-    <name>mr_tar_destination_folder</name>
-    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/</value>
+    <name>mapreduce_tar_destination_folder</name>
+    <value>hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/</value>
     <description>Destination HDFS folder for the file.</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
index 5182d82..0454c1c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/webhcat-site.xml
@@ -43,13 +43,13 @@ limitations under the License.
 
   <property>
     <name>templeton.pig.archive</name>
-    <value>hdfs:///hdp/apps/${hdp.version}/pig/pig-0.14.0.${hdp.version}.tar.gz</value>
+    <value>hdfs:///hdp/apps/${hdp.version}/pig/pig.tar.gz</value>
     <description>The path to the Pig archive in HDFS.</description>
   </property>
 
   <property>
     <name>templeton.pig.path</name>
-    <value>pig-0.14.0.${hdp.version}.tar.gz/pig/bin/pig</value>
+    <value>pig.tar.gz/pig/bin/pig</value>
     <description>The path to the Pig executable.</description>
   </property>
 
@@ -61,43 +61,43 @@ limitations under the License.
 
   <property>
     <name>templeton.hive.archive</name>
-    <value>hdfs:///hdp/apps/${hdp.version}/hive/hive-0.14.0.${hdp.version}.tar.gz</value>
+    <value>hdfs:///hdp/apps/${hdp.version}/hive/hive.tar.gz</value>
     <description>The path to the Hive archive.</description>
   </property>
 
   <property>
     <name>templeton.hive.home</name>
-    <value>hive-0.14.0.${hdp.version}.tar.gz/hive</value>
+    <value>hive.tar.gz/hive</value>
     <description>The path to the Hive home within the tar. Has no effect if templeton.hive.archive
is not set.</description>
   </property>
 
   <property>
     <name>templeton.hcat.home</name>
-    <value>hive-0.14.0.${hdp.version}.tar.gz/hive/hcatalog</value>
+    <value>hive.tar.gz/hive/hcatalog</value>
     <description>The path to the HCat home within the tar. Has no effect if templeton.hive.archive
is not set.</description>
   </property>
 
   <property>
     <name>templeton.hive.path</name>
-    <value>hive-0.14.0.${hdp.version}.tar.gz/hive/bin/hive</value>
+    <value>hive.tar.gz/hive/bin/hive</value>
     <description>The path to the Hive executable.</description>
   </property>
 
   <property>
     <name>templeton.sqoop.archive</name>
-    <value>hdfs:///hdp/apps/${hdp.version}/sqoop/sqoop-1.4.5.${hdp.version}.tar.gz</value>
+    <value>hdfs:///hdp/apps/${hdp.version}/sqoop/sqoop.tar.gz</value>
     <description>The path to the Sqoop archive in HDFS.</description>
   </property>
 
   <property>
     <name>templeton.sqoop.path</name>
-    <value>sqoop-1.4.5.${hdp.version}.tar.gz/sqoop/bin/sqoop</value>
+    <value>sqoop.tar.gz/sqoop/bin/sqoop</value>
     <description>The path to the Sqoop executable.</description>
   </property>
 
   <property>
     <name>templeton.sqoop.home</name>
-    <value>sqoop-1.4.5.${hdp.version}.tar.gz/sqoop</value>
+    <value>sqoop.tar.gz/sqoop</value>
     <description>The path to the Sqoop home within the tar. Has no effect if
       templeton.sqoop.archive is not set.
     </description>
@@ -105,7 +105,7 @@ limitations under the License.
 
   <property>
     <name>templeton.streaming.jar</name>
-    <value>hdfs:///hdp/apps/${hdp.version}/mr/hadoop-streaming-2.6.0.${hdp.version}.jar</value>
+    <value>hdfs:///hdp/apps/${hdp.version}/mapreduce/hadoop-streaming.jar</value>
     <description>The hdfs path to the Hadoop streaming jar file.</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
index 9bac52a..b988f4a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/TEZ/configuration/tez-site.xml
@@ -21,7 +21,7 @@
 
   <property>
     <name>tez.lib.uris</name>
-    <value>hdfs:///hdp/apps/${hdp.version}/tez/tez-0.6.0.${hdp.version}.tar.gz</value>
+    <value>hdfs:///hdp/apps/${hdp.version}/tez/tez.tar.gz</value>
     <description>Comma-delimited list of the location of the Tez libraries which will
be localized for DAGs.
       Specifying a single .tar.gz or .tgz assumes that a compressed version of the tez libs
is being used. This is uncompressed into a tezlibs directory when running containers, and
tezlibs/;tezlibs/lib/ are added to the classpath (after . and .*).
       If multiple files are specified - files are localized as regular files, contents of
directories are localized as regular files (non-recursive).

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
index 10b621f..79707bb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-site.xml
@@ -34,7 +34,7 @@
 
   <property>
     <name>mapreduce.application.classpath</name>
-    <value>$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/common/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/yarn/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop-2.6.0.${hdp.version}/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.jar</value>
+    <value>$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/mapreduce/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/mapreduce/lib/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/common/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/common/lib/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/yarn/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/yarn/lib/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/hdfs/*:$PWD/mr-framework/hadoop-${hdp.version}/share/hadoop/hdfs/lib/*:/usr/hdp/${hdp.version}/hadoop/lib/hadoop-lzo-0.6.0.${hdp.version}.jar:/etc/hadoop/conf/secure</value>
     <description>
       CLASSPATH for MR applications. A comma-separated list of CLASSPATH
       entries.
@@ -43,13 +43,13 @@
 
   <property>
     <name>mapreduce.application.framework.path</name>
-    <value>hdfs:///hdp/apps/${hdp.version}/mr/mr-2.6.0.${hdp.version}.tar.gz#mr-framework</value>
+    <value>/hdp/apps/${hdp.version}/mapreduce/mapreduce.tar.gz#mr-framework</value>
     <description></description>
   </property>
 
   <property>
     <name>yarn.app.mapreduce.am.admin-command-opts</name>
-    <value>-Xmx256m -Dhdp.version=${hdp.version}</value>
+    <value>-Dhdp.version=${hdp.version}</value>
     <description>
       Java opts for the MR App Master processes.
       The following symbol, if present, will be interpolated: @taskid@ is replaced
@@ -65,5 +65,16 @@
     </description>
   </property>
 
+  <property>
+    <name>mapreduce.admin.map.child.java.opts</name>
+    <value>-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}</value>
+    <description></description>
+  </property>
+
+  <property>
+    <name>mapreduce.admin.reduce.child.java.opts</name>
+    <value>-server -XX:NewRatio=8 -Djava.net.preferIPv4Stack=true -Dhdp.version=${hdp.version}</value>
+    <description></description>
+  </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
index 6ba2c95..d32348b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration/yarn-site.xml
@@ -23,7 +23,7 @@
 
   <property>
     <name>yarn.application.classpath</name>
-    <value>/etc/hadoop/conf,/usr/hdp/current/hadoop-client/*,/usr/hdp/current/hadoop-client/lib/*,/usr/hdp/current/hadoop-hdfs-client/*,/usr/hdp/current/hadoop-hdfs-client/lib/*,/usr/hdp/current/hadoop-yarn-client/*,/usr/hdp/current/hadoop-yarn-client/lib/*,/usr/hdp/current/hadoop-mapreduce-client/*,/usr/hdp/current/hadoop-mapreduce-client/lib/*</value>
+    <value>$HADOOP_CONF_DIR,/usr/hdp/${hdp.version}/hadoop/*,/usr/hdp/${hdp.version}/hadoop/lib/*,/usr/hdp/${hdp.version}/hadoop-hdfs/*,/usr/hdp/${hdp.version}/hadoop-hdfs/lib/*,/usr/hdp/${hdp.version}/hadoop-yarn/*,/usr/hdp/${hdp.version}/hadoop-yarn/lib/*,/usr/hdp/${hdp.version}/hadoop-mapreduce/*,/usr/hdp/${hdp.version}/hadoop-mapreduce/lib/*</value>
     <description>Classpath for typical applications.</description>
   </property>
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/test/python/stacks/2.2/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/default.json b/ambari-server/src/test/python/stacks/2.2/configs/default.json
index 888b0ca..2862970 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/default.json
@@ -53,10 +53,10 @@
         },
         "webhcat-site": {
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
-            "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig-{{
component_version }}.{{ hdp_stack_version }}.tar.gz",
-            "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive-{{
component_version }}.{{ hdp_stack_version }}.tar.gz",
-            "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop-{{
component_version }}.{{ hdp_stack_version }}.tar.gz",
-            "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming-{{
component_version }}.{{ hdp_stack_version }}.jar"
+            "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+            "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+            "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+            "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming.jar"
         },
         "slider-log4j": {
             "content": "log4jproperties\nline2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-server/src/test/python/stacks/2.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/secured.json b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
index 7607a5d..deea45f 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
@@ -48,10 +48,10 @@
         },
         "webhcat-site": {
             "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
-            "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig-{{
component_version }}.{{ hdp_stack_version }}.tar.gz",
-            "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive-{{
component_version }}.{{ hdp_stack_version }}.tar.gz",
-            "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop-{{
component_version }}.{{ hdp_stack_version }}.tar.gz",
-            "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming-{{
component_version }}.{{ hdp_stack_version }}.jar"
+            "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+            "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+            "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+            "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/hadoop-streaming.jar"
         },
         "slider-log4j": {
             "content": "log4jproperties\nline2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6652e8d/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index a13e94a..cfc9a4d 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -2212,6 +2212,32 @@ module.exports =
       "serviceName": "MISC",
       "filename": "cluster-env.xml"
     },
+    {
+      "id": "puppet var",
+      "name": "mapreduce_tar_source",
+      "displayName": "Mapreduce tarball source",
+      "description": "Source file path that uses dynamic variables and regex to copy the
file to HDFS.",
+      "defaultValue": '',
+      "isRequired": true,
+      "isOverridable": false,
+      "isVisible": false,
+      "isEditable": false,
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml"
+    },
+    {
+      "id": "puppet var",
+      "name": "mapreduce_tar_destination_folder",
+      "displayName": "Mapreduce tarball destination folder",
+      "description": "Destination HDFS folder for the file.",
+      "defaultValue": '',
+      "isRequired": true,
+      "isOverridable": false,
+      "isVisible": false,
+      "isEditable": false,
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml"
+    },
 
   /**********************************************MAPREDUCE2***************************************/
     {


Mime
View raw message