ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-17764 Oozie service check failed after deployment (dsen)
Date Mon, 18 Jul 2016 10:00:40 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 492396bd3 -> 24706f2e2


AMBARI-17764 Oozie service check failed after deployment (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/24706f2e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/24706f2e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/24706f2e

Branch: refs/heads/trunk
Commit: 24706f2e26c11c9b7a7af771fcb77fbcc81acdff
Parents: 492396b
Author: Dmytro Sen <dsen@apache.org>
Authored: Mon Jul 18 13:00:25 2016 +0300
Committer: Dmytro Sen <dsen@apache.org>
Committed: Mon Jul 18 13:00:25 2016 +0300

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 34 +++++++++++++-------
 .../stacks/2.0.6/common/test_stack_advisor.py   | 10 +++---
 .../stacks/2.1/common/test_stack_advisor.py     |  4 +--
 .../stacks/2.2/common/test_stack_advisor.py     | 13 ++++----
 4 files changed, 36 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/24706f2e/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index ee37fe0..5806e52 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -307,22 +307,32 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
       putHDFSSiteProperty('dfs.datanode.data.dir', dataDirs)
     else:
       dataDirs = hdfsSiteProperties['dfs.datanode.data.dir'].split(",")
-    #dfs.datanode.du.reserved should be set to 10-15% of volume size
-    mountPoints = []
-    mountPointDiskAvailableSpace = [] #kBytes
+
+    # dfs.datanode.du.reserved should be set to 10-15% of volume size
+    # For each host selects maximum size of the volume. Then gets minimum for all hosts.
+    # This ensures that each host will have at least one data dir with available space.
+    reservedSizeRecommendation = 0l #kBytes
     for host in hosts["items"]:
+      mountPoints = []
+      mountPointDiskAvailableSpace = [] #kBytes
       for diskInfo in host["Hosts"]["disk_info"]:
         mountPoints.append(diskInfo["mountpoint"])
         mountPointDiskAvailableSpace.append(long(diskInfo["size"]))
-    maxFreeVolumeSize = 0l #kBytes
-    for dataDir in dataDirs:
-      mp = getMountPointForDir(dataDir, mountPoints)
-      for i in range(len(mountPoints)):
-        if mp == mountPoints[i]:
-          if mountPointDiskAvailableSpace[i] > maxFreeVolumeSize:
-            maxFreeVolumeSize = mountPointDiskAvailableSpace[i]
-
-    putHDFSSiteProperty('dfs.datanode.du.reserved', maxFreeVolumeSize * 1024 / 8) #Bytes
+
+      maxFreeVolumeSizeForHost = 0l #kBytes
+      for dataDir in dataDirs:
+        mp = getMountPointForDir(dataDir, mountPoints)
+        for i in range(len(mountPoints)):
+          if mp == mountPoints[i]:
+            if mountPointDiskAvailableSpace[i] > maxFreeVolumeSizeForHost:
+              maxFreeVolumeSizeForHost = mountPointDiskAvailableSpace[i]
+
+      if not reservedSizeRecommendation or maxFreeVolumeSizeForHost and maxFreeVolumeSizeForHost
< reservedSizeRecommendation:
+        reservedSizeRecommendation = maxFreeVolumeSizeForHost
+
+    if reservedSizeRecommendation:
+      reservedSizeRecommendation = max(reservedSizeRecommendation * 1024 / 8, 1073741824)
# At least 1Gb is reserved
+      putHDFSSiteProperty('dfs.datanode.du.reserved', reservedSizeRecommendation) #Bytes
 
     # recommendations for "hadoop.proxyuser.*.hosts", "hadoop.proxyuser.*.groups" properties
in core-site
     self.recommendHadoopProxyUsers(configurations, services, hosts)

http://git-wip-us.apache.org/repos/asf/ambari/blob/24706f2e/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 1feeed2..380861f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -983,7 +983,7 @@ class TestHDP206StackAdvisor(TestCase):
             "rack_info": "/default-rack",
             "total_mem": 2097152,
             "disk_info": [{
-              "size": '8',
+              "size": '80000000',
               "mountpoint": "/"
             }]
           }
@@ -1000,7 +1000,7 @@ class TestHDP206StackAdvisor(TestCase):
             "rack_info": "/default-rack",
             "total_mem": 1048576,
             "disk_info": [{
-              "size": '8',
+              "size": '800000000',
               "mountpoint": "/"
             }]
           }
@@ -1106,7 +1106,7 @@ class TestHDP206StackAdvisor(TestCase):
                 'hdfs-site':
                   {'properties': 
                      {'dfs.datanode.data.dir': '/hadoop/hdfs/data',
-                      'dfs.datanode.du.reserved': '1024'}},
+                      'dfs.datanode.du.reserved': '10240000000'}},
                 'hive-env':
                   {'properties':
                      {'hive_user': 'hive',
@@ -1162,7 +1162,7 @@ class TestHDP206StackAdvisor(TestCase):
                 'hdfs-site':
                   {'properties': 
                      {'dfs.datanode.data.dir': '/hadoop/hdfs/data',
-                      'dfs.datanode.du.reserved': '1024'}},
+                      'dfs.datanode.du.reserved': '10240000000'}},
                 'hadoop-env':
                   {'properties':
                      {'hdfs_user': 'hdfs1',
@@ -1182,7 +1182,7 @@ class TestHDP206StackAdvisor(TestCase):
     expected["hdfs-site"] = {
       'properties': {
         'dfs.datanode.data.dir': '/hadoop/hdfs/data',
-        'dfs.datanode.du.reserved': '1024',
+        'dfs.datanode.du.reserved': '10240000000',
         'dfs.internal.nameservices': 'mycluster',
         'dfs.ha.namenodes.mycluster': 'nn1,nn2'
       },

http://git-wip-us.apache.org/repos/asf/ambari/blob/24706f2e/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
index 1232d97..e174461 100644
--- a/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
@@ -364,7 +364,7 @@ class TestHDP21StackAdvisor(TestCase):
         {
           "Hosts": {
             "disk_info": [{
-              "size": '8',
+              "size": '80000000',
               "mountpoint": "/"
             }]
           }
@@ -404,7 +404,7 @@ class TestHDP21StackAdvisor(TestCase):
       "hdfs-site": {
         "properties": {
           'dfs.datanode.data.dir': '/hadoop/hdfs/data',
-          'dfs.datanode.du.reserved': '1024'
+          'dfs.datanode.du.reserved': '10240000000'
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/24706f2e/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 91acb91..e4afc30 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -2968,7 +2968,7 @@ class TestHDP22StackAdvisor(TestCase):
       },
       'hdfs-site': {
         'properties': {
-          'dfs.datanode.du.reserved': '1024',
+          'dfs.datanode.du.reserved': '10240000000',
           'dfs.datanode.max.transfer.threads': '16384',
           'dfs.namenode.safemode.threshold-pct': '1.000',
           'dfs.datanode.failed.volumes.tolerated': '1',
@@ -3086,6 +3086,7 @@ class TestHDP22StackAdvisor(TestCase):
                 "configurations": configurations,
                 "ambari-server-properties": {"ambari-server.user":"ambari_user"}
                 }
+    # One host has bigger volume size. Minimum should be used for the calculations of dfs.datanode.du.reserved
     hosts = {
       "items" : [
         {
@@ -3100,7 +3101,7 @@ class TestHDP22StackAdvisor(TestCase):
             "rack_info" : "/default-rack",
             "total_mem" : 2097152,
             "disk_info": [{
-              "size": '8',
+              "size": '80000000',
               "mountpoint": "/"
             }]
           }
@@ -3117,7 +3118,7 @@ class TestHDP22StackAdvisor(TestCase):
             "rack_info" : "/default-rack",
             "total_mem" : 10485760,
             "disk_info": [{
-              "size": '8',
+              "size": '80000000000',
               "mountpoint": "/"
             }]
           }
@@ -3145,7 +3146,7 @@ class TestHDP22StackAdvisor(TestCase):
             "rack_info" : "/default-rack",
             "total_mem" : 2097152,
             "disk_info": [{
-              "size": '8',
+              "size": '80000000',
               "mountpoint": "/"
             }]
           }
@@ -3172,7 +3173,7 @@ class TestHDP22StackAdvisor(TestCase):
             "rack_info" : "/default-rack",
             "total_mem" : 2097152,
             "disk_info": [{
-              "size": '8',
+              "size": '80000000',
               "mountpoint": "/"
             }]
           }
@@ -3201,7 +3202,7 @@ class TestHDP22StackAdvisor(TestCase):
             "rack_info" : "/default-rack",
             "total_mem" : 2097152,
             "disk_info": [{
-              "size": '8',
+              "size": '80000000',
               "mountpoint": "/"
             }]
           }


Mime
View raw message