ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aonis...@apache.org
Subject [1/2] git commit: AMBARI-5592. Ambari attempts to format HDFS on every reboot on Ubuntu (aonishuk)
Date Wed, 07 May 2014 12:57:35 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 5c11ca71b -> afce48094


AMBARI-5592. Ambari attempts to format HDFS on every reboot on Ubuntu (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f3e60556
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f3e60556
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f3e60556

Branch: refs/heads/trunk
Commit: f3e605566cb0fec80ca631c2cffe90fd72573d15
Parents: 5c11ca7
Author: Andrew Onishuk <aonishuk@hortonworks.com>
Authored: Wed May 7 15:49:34 2014 +0300
Committer: Andrew Onishuk <aonishuk@hortonworks.com>
Committed: Wed May 7 15:49:34 2014 +0300

----------------------------------------------------------------------
 .../services/HDFS/package/files/checkForFormat.sh    |  6 ++++++
 .../services/HDFS/package/scripts/hdfs_namenode.py   | 15 ++++++++++-----
 .../2.0.6/services/HDFS/package/scripts/params.py    |  3 ++-
 .../test/python/stacks/2.0.6/HDFS/test_namenode.py   | 14 ++++++++------
 4 files changed, 26 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f3e60556/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
index f92f613..d22d901 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/files/checkForFormat.sh
@@ -24,6 +24,8 @@ export hdfs_user=$1
 shift
 export conf_dir=$1
 shift
+export old_mark_dir=$1
+shift
 export mark_dir=$1
 shift
 export name_dirs=$*
@@ -38,6 +40,10 @@ if [[ -f ${mark_file} ]] ; then
   mkdir -p ${mark_dir}
 fi
 
+if [[ -d $old_mark_dir ]] ; then
+  mv ${old_mark_dir} ${mark_dir}
+fi
+
 if [[ ! -d $mark_dir ]] ; then
   for dir in `echo $name_dirs | tr ',' ' '` ; do
     echo "NameNode Dirname = $dir"

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3e60556/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index 9138d66..e7ed33b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -101,6 +101,7 @@ def create_hdfs_directories(check):
 def format_namenode(force=None):
   import params
 
+  old_mark_dir = params.namenode_formatted_old_mark_dir
   mark_dir = params.namenode_formatted_mark_dir
   dfs_name_dir = params.dfs_name_dir
   hdfs_user = params.hdfs_user
@@ -115,11 +116,15 @@ def format_namenode(force=None):
            content=StaticFile("checkForFormat.sh"),
            mode=0755)
       Execute(format(
-        "/tmp/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {mark_dir} "
-        "{dfs_name_dir}"),
-              not_if=format("test -d {mark_dir}"),
-              path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin")
-    Execute(format("mkdir -p {mark_dir}"))
+        "/tmp/checkForFormat.sh {hdfs_user} {hadoop_conf_dir} {old_mark_dir} "
+        "{mark_dir} {dfs_name_dir}"),
+              not_if=format("test -d {old_mark_dir} || test -d {mark_dir}"),
+              path="/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin"
+      )
+    
+      Directory(mark_dir,
+        recursive = True
+      )
 
 
 def decommission():

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3e60556/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index cd42a7a..7bc8ca7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -124,7 +124,8 @@ namenode_dirs_stub_filename = "namenode_dirs_created"
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
 smoke_hdfs_user_mode = 0770
 
-namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
+namenode_formatted_old_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
+namenode_formatted_mark_dir = format("/var/lib/hdfs/namenode/formatted/")
 
 fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3e60556/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index a7d4171..54d2175 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -43,11 +43,12 @@ class TestNamenode(RMFTestCase):
                               content = StaticFile('checkForFormat.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/
/hadoop/hdfs/namenode',
+    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/
/var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/',
+                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/
|| test -d /var/lib/hdfs/namenode/formatted/',
                               )
-    self.assertResourceCalled('Execute', 'mkdir -p /var/run/hadoop/hdfs/namenode/formatted/',
+    self.assertResourceCalled('Directory', '/var/lib/hdfs/namenode/formatted/',
+                              recursive = True,
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
                               owner = 'hdfs',
@@ -148,11 +149,12 @@ class TestNamenode(RMFTestCase):
                               content = StaticFile('checkForFormat.sh'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/
/hadoop/hdfs/namenode',
+    self.assertResourceCalled('Execute', '/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /var/run/hadoop/hdfs/namenode/formatted/
/var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode',
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
-                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/',
+                              not_if = 'test -d /var/run/hadoop/hdfs/namenode/formatted/
|| test -d /var/lib/hdfs/namenode/formatted/',
                               )
-    self.assertResourceCalled('Execute', 'mkdir -p /var/run/hadoop/hdfs/namenode/formatted/',
+    self.assertResourceCalled('Directory', '/var/lib/hdfs/namenode/formatted/',
+                              recursive = True
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
                               owner = 'hdfs',


Mime
View raw message