ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aonis...@apache.org
Subject [1/2] ambari git commit: AMBARI-10118. Enabling ResourceManager HA failed in Service Check for HDFS (aonishuk)
Date Wed, 18 Mar 2015 12:39:55 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 862566914 -> e542dfcf1
  refs/heads/trunk bef5a71de -> 365dc3b54


AMBARI-10118. Enabling ResourceManager HA failed in Service Check for HDFS (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/365dc3b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/365dc3b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/365dc3b5

Branch: refs/heads/trunk
Commit: 365dc3b540b166fc87aaa9ab556f4a39a2b1c008
Parents: bef5a71
Author: Andrew Onishuk <aonishuk@hortonworks.com>
Authored: Wed Mar 18 14:39:46 2015 +0200
Committer: Andrew Onishuk <aonishuk@hortonworks.com>
Committed: Wed Mar 18 14:39:46 2015 +0200

----------------------------------------------------------------------
 .../HDFS/2.1.0.2.0/package/scripts/service_check.py | 16 ++++++++--------
 .../python/stacks/2.0.6/HDFS/test_service_check.py  | 12 ++++++------
 2 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/365dc3b5/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
index 0196300..ffbe658 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
@@ -33,7 +33,7 @@ class HdfsServiceCheck(Script):
 
     create_dir_cmd = format("fs -mkdir {dir}")
     chmod_command = format("fs -chmod 777 {dir}")
-    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir}
fs -test -e {dir}"), params.smoke_user)
+    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config {hadoop_conf_dir}
fs -test -e {dir}"), params.hdfs_user)
     cleanup_cmd = format("fs -rm {tmp_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale file
     #that needs cleanup; exit code is fn of second command
@@ -41,11 +41,11 @@ class HdfsServiceCheck(Script):
       "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd {tmp_file}")
     test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
-      Execute(format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}"),
-        user=params.smoke_user
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
+        user=params.hdfs_user
       )
     ExecuteHadoop(safemode_command,
-                  user=params.smoke_user,
+                  user=params.hdfs_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
@@ -53,7 +53,7 @@ class HdfsServiceCheck(Script):
                   bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(create_dir_cmd,
-                  user=params.smoke_user,
+                  user=params.hdfs_user,
                   logoutput=True,
                   not_if=test_dir_exists,
                   conf_dir=params.hadoop_conf_dir,
@@ -62,7 +62,7 @@ class HdfsServiceCheck(Script):
                   bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(chmod_command,
-                  user=params.smoke_user,
+                  user=params.hdfs_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
@@ -70,7 +70,7 @@ class HdfsServiceCheck(Script):
                   bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(create_file_cmd,
-                  user=params.smoke_user,
+                  user=params.hdfs_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,
@@ -78,7 +78,7 @@ class HdfsServiceCheck(Script):
                   bin_dir=params.hadoop_bin_dir
     )
     ExecuteHadoop(test_cmd,
-                  user=params.smoke_user,
+                  user=params.hdfs_user,
                   logoutput=True,
                   conf_dir=params.hadoop_conf_dir,
                   try_sleep=3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/365dc3b5/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index e24ff8d..43ddf6c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -57,16 +57,16 @@ class TestServiceCheck(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         try_sleep = 3,
         bin_dir = '/usr/bin',
-        user = 'ambari-qa',
+        user = 'hdfs',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
         conf_dir = '/etc/hadoop/conf',
         logoutput = True,
-        not_if = "ambari-sudo.sh su ambari-qa -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop
--config /etc/hadoop/conf fs -test -e /tmp'",
+        not_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop
--config /etc/hadoop/conf fs -test -e /tmp'",
         try_sleep = 3,
         tries = 5,
         bin_dir = '/usr/bin',
-        user = 'ambari-qa',
+        user = 'hdfs',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
         conf_dir = '/etc/hadoop/conf',
@@ -74,7 +74,7 @@ class TestServiceCheck(RMFTestCase):
         try_sleep = 3,
         tries = 5,
         bin_dir = '/usr/bin',
-        user = 'ambari-qa',
+        user = 'hdfs',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config /etc/hadoop/conf
fs -put /etc/passwd /tmp/',
         logoutput = True,
@@ -82,7 +82,7 @@ class TestServiceCheck(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         bin_dir = '/usr/bin',
         try_sleep = 3,
-        user = 'ambari-qa',
+        user = 'hdfs',
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
         logoutput = True,
@@ -90,5 +90,5 @@ class TestServiceCheck(RMFTestCase):
         conf_dir = '/etc/hadoop/conf',
         bin_dir = '/usr/bin',
         try_sleep = 3,
-        user = 'ambari-qa',
+        user = 'hdfs',
     )


Mime
View raw message