ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From swa...@apache.org
Subject [3/3] ambari git commit: Revert "AMBARI-8174. Reverting all patches. (swagle)"
Date Fri, 07 Nov 2014 04:59:49 GMT
Revert "AMBARI-8174. Reverting all patches. (swagle)"

This reverts commit a4b8c9b30f2756cb24ee372e5741d76db7b517a1.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/37af1a64
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/37af1a64
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/37af1a64

Branch: refs/heads/trunk
Commit: 37af1a64f4eb32c839933ad1b436eae5da3136a8
Parents: be0a3c1
Author: Siddharth Wagle <swagle@hortonworks.com>
Authored: Thu Nov 6 20:59:11 2014 -0800
Committer: Siddharth Wagle <swagle@hortonworks.com>
Committed: Thu Nov 6 20:59:11 2014 -0800

----------------------------------------------------------------------
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  2 -
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 --------
 .../services/HDFS/package/scripts/params.py     |  2 -
 .../services/HDFS/package/scripts/utils.py      | 12 +----
 .../services/HDFS/configuration/hadoop-env.xml  |  7 ---
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 57 --------------------
 6 files changed, 2 insertions(+), 100 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/37af1a64/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index ddb2b0b..fa3b118 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -136,5 +136,3 @@ if has_nagios:
 
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
-
-dn_proc_user=hdfs_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/37af1a64/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index c192682..25c1067 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,25 +81,3 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)
-
-def setup_hadoop_env(replace=False):
-  import params
-
-  if params.security_enabled:
-    tc_owner = "root"
-  else:
-    tc_owner = params.hdfs_user
-  Directory(params.hadoop_conf_empty_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
-  Link(params.hadoop_conf_dir,
-       to=params.hadoop_conf_empty_dir,
-       not_if=format("ls {hadoop_conf_dir}")
-  )
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-       owner=tc_owner,
-       content=InlineTemplate(params.hadoop_env_sh_template),
-       replace=replace
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/37af1a64/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 7e446f1..22ce519 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -253,5 +253,3 @@ ttnode_heapsize = "1024m"
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
 mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
-
-dn_proc_user=hdfs_user
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/37af1a64/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 08358f5..a52dc77 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -20,7 +20,7 @@ import os
 
 from resource_management import *
 import re
-import hdfs
+
 
 def service(action=None, name=None, user=None, create_pid_dir=False,
             create_log_dir=False):
@@ -100,15 +100,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
           pass  # Pid file content is invalid
         except OSError:
           pass  # Process is not running
-    pass
-
-    # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
-    # in secure mode.
-    if user == 'root':
-      params.dn_proc_user = 'root'
-      hdfs.setup_hadoop_env(replace=True)
-    pass
-  pass
+
 
   hadoop_env_exports_str = ''
   for exp in hadoop_env_exports.items():

http://git-wip-us.apache.org/repos/asf/ambari/blob/37af1a64/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 9b43c20..4c60fb2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -134,13 +134,6 @@ export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
-
-HDFS_DN_PROC_USER={{dn_proc_user}}
-if [ $HDFS_DN_PROC_USER="root" ]; then
-  export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
-else
-  export HADOOP_SECURE_DN_USER=""
-fi
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/37af1a64/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index b376dfb..6b5fe6a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -19,7 +19,6 @@ limitations under the License.
 '''
 from ambari_commons import OSCheck
 import json
-import os
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 
@@ -109,20 +108,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null
2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null
2>&1',
@@ -153,20 +138,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null
2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null
2>&1',
@@ -224,20 +195,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null
2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null
2>&1',
@@ -272,20 +229,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null
2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null
2>&1',


Mime
View raw message