ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From odiache...@apache.org
Subject ambari git commit: AMBARI-14029. HAWQ support on Namenode HA (mithmatt via odiachenko).
Date Mon, 28 Dec 2015 18:29:58 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 fc5ac34d2 -> 029f529b6


AMBARI-14029. HAWQ support on Namenode HA (mithmatt via odiachenko).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/029f529b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/029f529b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/029f529b

Branch: refs/heads/branch-2.2
Commit: 029f529b65cd3b6dfd2d88785e7986f6a68827a7
Parents: fc5ac34
Author: Oleksandr Diachenko <odiachenko@pivotal.io>
Authored: Mon Dec 28 10:29:53 2015 -0800
Committer: Oleksandr Diachenko <odiachenko@pivotal.io>
Committed: Mon Dec 28 10:29:53 2015 -0800

----------------------------------------------------------------------
 .../HAWQ/2.0.0/package/scripts/common.py        | 70 +++++++++++---------
 .../HAWQ/2.0.0/package/scripts/params.py        | 30 ++++-----
 2 files changed, 53 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/029f529b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
index abe210d..8a62172 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
@@ -27,7 +27,6 @@ from resource_management.core.logger import Logger
 from resource_management.core.system import System
 from resource_management.core.exceptions import Fail
 from resource_management.core.resources.accounts import Group, User
-from resource_management.core.source import Template
 import xml.etree.ElementTree as ET
 
 import utils
@@ -79,19 +78,42 @@ def setup_common_configurations():
   """
   Sets up the config files common to master, standby and segment nodes.
   """
+  __update_hdfs_client()
+  __update_yarn_client()
+  __update_hawq_site()
+  __set_osparams()
+
+def __update_hdfs_client():
+  """
+  Writes hdfs-client.xml on the local filesystem on hawq nodes.
+  If hdfs ha is enabled, appends related parameters to hdfs-client.xml
+  """
   import params
 
-  substituted_conf_dict = __substitute_hostnames_in_hawq_site()
-  XmlConfig("hawq-site.xml",
+  hdfs_client_dict = params.hdfs_client.copy()
+  dfs_nameservice = params.hdfs_site.get('dfs.nameservices')
+
+  # Adds additional parameters required for HDFS HA, if HDFS HA is enabled
+  # Temporary logic, this logic will be moved to ambari-web to expose these parameters on
UI once HDFS HA is enabled
+  if dfs_nameservice:
+    ha_namenodes = 'dfs.ha.namenodes.{0}'.format(dfs_nameservice)
+    ha_nn_list = [ha_nn.strip() for ha_nn in params.hdfs_site[ha_namenodes].split(',')]
+    required_keys = ('dfs.nameservices', ha_namenodes,
+                     'dfs.namenode.rpc-address.{0}.{1}'.format(dfs_nameservice, ha_nn_list[0]),
+                     'dfs.namenode.http-address.{0}.{1}'.format(dfs_nameservice, ha_nn_list[0]),
+                     'dfs.namenode.rpc-address.{0}.{1}'.format(dfs_nameservice, ha_nn_list[1]),
+                     'dfs.namenode.http-address.{0}.{1}'.format(dfs_nameservice, ha_nn_list[1]))
+
+    for key in required_keys:
+      hdfs_client_dict[key] = params.hdfs_site[key]
+
+  XmlConfig("hdfs-client.xml",
             conf_dir=constants.hawq_config_dir,
-            configurations=substituted_conf_dict,
-            configuration_attributes=params.config['configuration_attributes']['hawq-site'],
+            configurations=ConfigDictionary(hdfs_client_dict),
+            configuration_attributes=params.config['configuration_attributes']['hdfs-client'],
             owner=constants.hawq_user,
             group=constants.hawq_group,
             mode=0644)
-  if "yarn-site" in params.config["configurations"]:
-    __update_yarn_client()
-  __set_osparams()
 
 
 def __update_yarn_client():
@@ -146,33 +168,19 @@ def __update_yarn_client():
             mode=0644)
 
 
-def __substitute_hostnames_in_hawq_site():
+def __update_hawq_site():
   """
-  Temporary function to replace localhost with actual HAWQ component hostnames.
-  This function will be in place till the entire HAWQ plugin code along with the UI
-  changes are submitted to the trunk.
+  Sets up hawq-site.xml
   """
   import params
 
-  LOCALHOST = "localhost"
-  
-  # in case there is no standby
-  hawqstandby_host_desired_value = params.hawqstandby_host if params.hawqstandby_host is
not None else 'none' 
-  
-  substituted_hawq_site = params.hawq_site.copy()
-  hawq_site_property_map = {"hawq_master_address_host": params.hawqmaster_host,
-                            "hawq_standby_address_host": hawqstandby_host_desired_value,
-                            "hawq_rm_yarn_address": params.rm_host,
-                            "hawq_rm_yarn_scheduler_address": params.rm_host,
-                            "hawq_dfs_url": params.namenode_host
-                            }
-
-  for property, desired_value in hawq_site_property_map.iteritems():
-    if desired_value is not None:
-      # Replace localhost with required component hostname
-      substituted_hawq_site[property] = re.sub(LOCALHOST, desired_value, substituted_hawq_site[property])
-
-  return substituted_hawq_site
+  XmlConfig("hawq-site.xml",
+            conf_dir=constants.hawq_config_dir,
+            configurations=params.hawq_site,
+            configuration_attributes=params.config['configuration_attributes']['hawq-site'],
+            owner=constants.hawq_user,
+            group=constants.hawq_group,
+            mode=0644)
 
 
 def __set_osparams():

http://git-wip-us.apache.org/repos/asf/ambari/blob/029f529b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index eb8a26c..62870f0 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -34,19 +34,6 @@ def __get_component_host(component):
   return component_host
 
 
-def __get_namenode_host():
-  """
-  Gets the namenode host; active namenode in case of HA
-  """
-  namenode_host = __get_component_host('namenode_host')
-  
-  # hostname of the active HDFS HA Namenode (only used when HA is enabled)
-  dfs_ha_namenode_active = default('/configurations/hadoop-env/dfs_ha_initial_namenode_active',
None)
-  if dfs_ha_namenode_active is not None:
-    namenode_host = dfs_ha_namenode_active
-  return namenode_host
-
-
 hostname = config['hostname']
 
 # Users and Groups
@@ -54,34 +41,45 @@ hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
 user_group = config['configurations']['cluster-env']['user_group']
 hawq_password = config['configurations']['hawq-env']['hawq_password']
 
+
 # HAWQ Hostnames
 hawqmaster_host = __get_component_host('hawqmaster_hosts')
 hawqstandby_host = __get_component_host('hawqstandby_hosts')
 hawqsegment_hosts = default('/clusterHostInfo/hawqsegment_hosts', [])
 
+
 # HDFS
 hdfs_site = config['configurations']['hdfs-site']
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 
 # HDFSResource partial function
-HdfsResource = functools.partial(HdfsResource, user=hdfs_superuser, hdfs_site=hdfs_site,
default_fs=default_fs)
+HdfsResource = functools.partial(HdfsResource,
+                                 user=hdfs_superuser,
+                                 hdfs_site=hdfs_site,
+                                 default_fs=default_fs)
 
-namenode_host= __get_namenode_host()
 
 # YARN
 # Note: YARN is not mandatory for HAWQ. It is required only when the users set HAWQ to use
YARN as resource manager
 rm_host = __get_component_host('rm_host')
 yarn_ha_enabled = default('/configurations/yarn-site/yarn.resourcemanager.ha.enabled', False)
 
+
 # Config files
 gpcheck_content = config['configurations']['gpcheck-env']['content']
 # database user limits
 hawq_limits = config['configurations']['hawq-limits-env']
 # sysctl parameters
 hawq_sysctl = config['configurations']['hawq-sysctl-env']
-
+# hawq config
 hawq_site = config['configurations']['hawq-site']
+# hdfs-client for enabling HAWQ to work with HDFS namenode HA
+hdfs_client = config['configurations']['hdfs-client']
+# yarn-client for enabling HAWQ to work with YARN resource manager HA
 yarn_client = config['configurations']['yarn-client']
+
+
+# Directories and ports
 hawq_master_dir = hawq_site.get('hawq_master_directory')
 hawq_segment_dir = hawq_site.get('hawq_segment_directory')
 hawq_master_temp_dir = hawq_site.get('hawq_master_temp_directory')


Mime
View raw message