ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aonis...@apache.org
Subject [1/2] git commit: AMBARI-4209. Fix HA fails on 2.0.8 (Eugene Chekanskiy via aonishuk)
Date Thu, 02 Jan 2014 18:01:17 GMT
Updated Branches:
  refs/heads/trunk e66947e81 -> afa0d7baf


AMBARI-4209. Fix HA fails on 2.0.8 (Eugene Chekanskiy via aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/afa0d7ba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/afa0d7ba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/afa0d7ba

Branch: refs/heads/trunk
Commit: afa0d7baf239ee68be69ec3fac8a2628ca2d5a72
Parents: 72d92b8
Author: Andrew Onischuk <aonishuk@hortonworks.com>
Authored: Thu Jan 2 09:58:33 2014 -0800
Committer: Andrew Onischuk <aonishuk@hortonworks.com>
Committed: Thu Jan 2 10:01:02 2014 -0800

----------------------------------------------------------------------
 .../HDFS/package/scripts/hdfs_namenode.py       |  3 ++-
 .../services/HDFS/package/scripts/namenode.py   |  2 ++
 .../services/HDFS/package/scripts/params.py     | 11 ++++------
 .../HDFS/package/scripts/service_check.py       | 21 ++++++++++----------
 .../services/HDFS/package/scripts/utils.py      |  6 +++---
 5 files changed, 22 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/afa0d7ba/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/hdfs_namenode.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/hdfs_namenode.py
index 1f2abc8..aafaf72 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/hdfs_namenode.py
@@ -25,7 +25,8 @@ import urlparse
 
 def namenode(action=None, format=True):
   import params
-
+  #we need this directory to be present before any action(HA manual steps for
+  #additional namenode)
   if action == "configure":
     create_name_dirs(params.dfs_name_dir)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/afa0d7ba/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/namenode.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/namenode.py
index 9b0fe43..9a1dba36 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/namenode.py
@@ -27,6 +27,8 @@ class NameNode(Script):
 
     self.install_packages(env)
     env.set_params(params)
+    #TODO remove when config action will be implemented
+    self.config(env)
 
   def start(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/afa0d7ba/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/params.py
index a51134b..568a8a8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/params.py
@@ -161,13 +161,14 @@ dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']#,"
 # HDFS High Availability properties
 dfs_ha_enabled = False
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
-dfs_ha_namenode_ids = default(format("hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"),
None)
+dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"),
None)
 if dfs_ha_namenode_ids:
-  dfs_ha_namenode_ids_array_len = len(dfs_ha_namenode_ids.split(","))
+  dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
+  dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
   if dfs_ha_namenode_ids_array_len > 1:
     dfs_ha_enabled = True
 if dfs_ha_enabled:
-  for nn_id in dfs_ha_namenode_ids:
+  for nn_id in dfs_ha_namemodes_ids_list:
     nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
     if hostname in nn_host:
       namenode_id = nn_id
@@ -177,7 +178,3 @@ journalnode_address = default('/configurations/hdfs-site/dfs.journalnode.http-ad
 if journalnode_address:
   journalnode_port = journalnode_address.split(":")[1]
 
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/afa0d7ba/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/service_check.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/service_check.py
index 88077a4..d27b13a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/service_check.py
@@ -90,16 +90,17 @@ class HdfsServiceCheck(Script):
               tries=5
       )
 
-    if params.has_zkfc_hosts:
-      pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
-      pid_file = format("{pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
-      check_zkfc_process_cmd = format(
-        "ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null
2>&1")
-      Execute(check_zkfc_process_cmd,
-              logoutput=True,
-              try_sleep=3,
-              tries=5
-      )
+    if params.is_namenode_master:
+      if params.has_zkfc_hosts:
+        pid_dir = format("{hadoop_pid_dir_prefix}/{hdfs_user}")
+        pid_file = format("{pid_dir}/hadoop-{hdfs_user}-zkfc.pid")
+        check_zkfc_process_cmd = format(
+          "ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null
2>&1")
+        Execute(check_zkfc_process_cmd,
+                logoutput=True,
+                try_sleep=3,
+                tries=5
+        )
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/afa0d7ba/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/utils.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/utils.py
index cc878a8..225cd2e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.8/services/HDFS/package/scripts/utils.py
@@ -102,7 +102,7 @@ def hdfs_directory(name=None, owner=None, group=None,
                 try_sleep=try_sleep,
                 tries=tries,
                 not_if=format(
-                  "{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
+                  "! {dir_absent_in_stub} && {dfs_check_nn_status_cmd} &&
"
                   "{dir_exists} && ! {namenode_safe_mode_off}"),
                 only_if=format(
                   "{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
@@ -112,7 +112,7 @@ def hdfs_directory(name=None, owner=None, group=None,
   )
   Execute(record_dir_in_stub,
           user=params.hdfs_user,
-          only_if=format("! {dir_absent_in_stub}")
+          only_if=format("{dir_absent_in_stub}")
   )
 
   recursive = "-R" if recursive_chown else ""
@@ -128,7 +128,7 @@ def hdfs_directory(name=None, owner=None, group=None,
   for cmd in perm_cmds:
     ExecuteHadoop(cmd,
                   user=params.hdfs_user,
-                  only_if=format("{dir_absent_in_stub} && {dfs_check_nn_status_cmd}
&& {namenode_safe_mode_off} && {dir_exists}"),
+                  only_if=format("! {dir_absent_in_stub} && {dfs_check_nn_status_cmd}
&& {namenode_safe_mode_off} && {dir_exists}"),
                   try_sleep=try_sleep,
                   tries=tries,
                   conf_dir=params.hadoop_conf_dir


Mime
View raw message