ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From adenis...@apache.org
Subject ambari git commit: AMBARI-17939. Validations are missing for HDFS properties recommended by HAWQ. (adenissov)
Date Thu, 28 Jul 2016 23:19:00 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 9eb8fb96f -> 7a3c28ee7


AMBARI-17939. Validations are missing for HDFS properties recommended by HAWQ. (adenissov)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7a3c28ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7a3c28ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7a3c28ee

Branch: refs/heads/branch-2.4
Commit: 7a3c28ee7aad660b68f68b0f3e4eee20b6232a64
Parents: 9eb8fb9
Author: Alexander Denissov <adenissov@pivotal.io>
Authored: Thu Jul 28 16:18:51 2016 -0700
Committer: Alexander Denissov <adenissov@pivotal.io>
Committed: Thu Jul 28 16:18:51 2016 -0700

----------------------------------------------------------------------
 .../HAWQ/2.0.0/service_advisor.py               |  77 ++++++++----
 .../stacks/HDP/2.3/services/stack_advisor.py    |  12 --
 .../HAWQ/test_service_advisor.py                | 120 ++++++++++++++++---
 .../stacks/2.3/common/test_stack_advisor.py     | 111 -----------------
 4 files changed, 158 insertions(+), 162 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7a3c28ee/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/service_advisor.py
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/service_advisor.py
index 9f85518..0777dc6 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/service_advisor.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/service_advisor.py
@@ -139,22 +139,8 @@ class HAWQ200ServiceAdvisor(service_advisor.ServiceAdvisor):
     if "hdfs-site" in services["configurations"]:
       hdfs_site = services["configurations"]["hdfs-site"]["properties"]
       putHdfsSiteProperty = self.putProperty(configurations, "hdfs-site", services)
-      putHdfsSitePropertyAttribute = self.putPropertyAttribute(configurations, "hdfs-site")
-
-      hdfs_site_desired_values = {
-        "dfs.allow.truncate" : "true",
-        "dfs.block.access.token.enable" : is_secured,
-        "dfs.block.local-path-access.user" : "gpadmin",
-        "dfs.client.read.shortcircuit" : "true",
-        "dfs.client.use.legacy.blockreader.local" : "false",
-        "dfs.datanode.data.dir.perm" : "750",
-        "dfs.datanode.handler.count" : "60",
-        "dfs.datanode.max.transfer.threads" : "40960",
-        "dfs.namenode.accesstime.precision" : "0",
-        "dfs.namenode.handler.count" : "200",
-        "dfs.support.append" : "true"
-      }
-      for property, desired_value in hdfs_site_desired_values.iteritems():
+
+      for property, desired_value in self.getHDFSSiteDesiredValues(self.isSecurityEnabled(services)).iteritems():
         if property not in hdfs_site or hdfs_site[property] != desired_value:
           putHdfsSiteProperty(property, desired_value)
 
@@ -163,11 +149,7 @@ class HAWQ200ServiceAdvisor(service_advisor.ServiceAdvisor):
       core_site = services["configurations"]["core-site"]["properties"]
       putCoreSiteProperty = self.putProperty(configurations, "core-site", services)
 
-      core_site_desired_values = {
-        "ipc.client.connection.maxidletime" : "3600000",
-        "ipc.server.listen.queue.size" : "3300"
-      }
-      for property, desired_value in core_site_desired_values.iteritems():
+      for property, desired_value in self.getCORESiteDesiredValues().iteritems():
         if property not in core_site or core_site[property] != desired_value:
           putCoreSiteProperty(property, desired_value)
 
@@ -303,6 +285,19 @@ class HAWQ200ServiceAdvisor(service_advisor.ServiceAdvisor):
     method = self.validateHAWQHdfsClientConfigurations
     resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults,
services, hosts, siteName, method)
     items.extend(resultItems)
+
+    # validate recommended properties in hdfs-site
+    siteName = "hdfs-site"
+    method = self.validateHDFSSiteConfigurations
+    resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults,
services, hosts, siteName, method)
+    items.extend(resultItems)
+
+    # validate recommended properties in core-site
+    siteName = "core-site"
+    method = self.validateCORESiteConfigurations
+    resultItems = self.validateConfigurationsForSite(configurations, recommendedDefaults,
services, hosts, siteName, method)
+    items.extend(resultItems)
+
     return items
 
   def isHawqMasterPortConflict(self, configurations):
@@ -416,3 +411,43 @@ class HAWQ200ServiceAdvisor(service_advisor.ServiceAdvisor):
         validationItems.append({"config-name": PROP_NAME, "item": self.getWarnItem(message.format(PROP_NAME,
str(MIN_NUM_SEGMENT_THRESHOLD)))})
 
     return self.toConfigurationValidationProblems(validationItems, "hdfs-client")
+
+  def validateHDFSSiteConfigurations(self, properties, recommendedDefaults, configurations,
services, hosts):
+    hdfs_site = properties
+    validationItems = []
+    for property, desired_value in self.getHDFSSiteDesiredValues(self.isSecurityEnabled(services)).iteritems():
+      if property not in hdfs_site or hdfs_site[property] != desired_value:
+        message = "HAWQ requires this property to be set to the recommended value of " +
desired_value
+        item = self.getErrorItem(message) if property == "dfs.allow.truncate" else self.getWarnItem(message)
+        validationItems.append({"config-name": property, "item": item})
+    return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
+
+  def validateCORESiteConfigurations(self, properties, recommendedDefaults, configurations,
services, hosts):
+    core_site = properties
+    validationItems = []
+    for property, desired_value in self.getCORESiteDesiredValues().iteritems():
+      if property not in core_site or core_site[property] != desired_value:
+        message = "HAWQ requires this property to be set to the recommended value of " +
desired_value
+        validationItems.append({"config-name": property, "item": self.getWarnItem(message)})
+    return self.toConfigurationValidationProblems(validationItems, "core-site")
+
+  def getHDFSSiteDesiredValues(self, is_secure):
+    hdfs_site_desired_values = {
+      "dfs.allow.truncate" : "true",
+      "dfs.block.access.token.enable" : str(is_secure).lower(),
+      "dfs.block.local-path-access.user" : "gpadmin",
+      "dfs.client.read.shortcircuit" : "true",
+      "dfs.client.use.legacy.blockreader.local" : "false",
+      "dfs.datanode.data.dir.perm" : "750",
+      "dfs.datanode.handler.count" : "60",
+      "dfs.datanode.max.transfer.threads" : "40960",
+      "dfs.namenode.accesstime.precision" : "0",
+      "dfs.support.append" : "true"
+    }
+    return hdfs_site_desired_values
+
+  def getCORESiteDesiredValues(self):
+    core_site_desired_values = {
+      "ipc.server.listen.queue.size" : "3300"
+    }
+    return core_site_desired_values
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a3c28ee/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index c2c2ff6..94c3117 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -299,11 +299,6 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     putHdfsSiteProperty = self.putProperty(configurations, "hdfs-site", services)
     putHdfsSitePropertyAttribute = self.putPropertyAttribute(configurations, "hdfs-site")
 
-    servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
-    if "HAWQ" in servicesList:
-      # Set dfs.allow.truncate to true
-      putHdfsSiteProperty('dfs.allow.truncate', 'true')
-
     if ('ranger-hdfs-plugin-properties' in services['configurations']) and ('ranger-hdfs-plugin-enabled'
in services['configurations']['ranger-hdfs-plugin-properties']['properties']):
       rangerPluginEnabled = ''
       if 'ranger-hdfs-plugin-properties' in configurations and 'ranger-hdfs-plugin-enabled'
in  configurations['ranger-hdfs-plugin-properties']['properties']:
@@ -812,13 +807,6 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
                                     "item": self.getWarnItem(
                                       "dfs.namenode.inode.attributes.provider.class needs
to be set to 'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer' if Ranger HDFS
Plugin is enabled.")})
 
-    # Check if dfs.allow.truncate is true
-    if "HAWQ" in servicesList and \
-        not ("dfs.allow.truncate" in services["configurations"]["hdfs-site"]["properties"]
and \
-        services["configurations"]["hdfs-site"]["properties"]["dfs.allow.truncate"].lower()
== 'true'):
-        validationItems.append({"config-name": "dfs.allow.truncate",
-                                "item": self.getWarnItem("HAWQ requires dfs.allow.truncate
in hdfs-site.xml set to True.")})
-
     validationProblems = self.toConfigurationValidationProblems(validationItems, "hdfs-site")
     validationProblems.extend(parentValidationProblems)
     return validationProblems

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a3c28ee/ambari-server/src/test/python/common-services/HAWQ/test_service_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/common-services/HAWQ/test_service_advisor.py b/ambari-server/src/test/python/common-services/HAWQ/test_service_advisor.py
index b3edae4..7ececa0 100644
--- a/ambari-server/src/test/python/common-services/HAWQ/test_service_advisor.py
+++ b/ambari-server/src/test/python/common-services/HAWQ/test_service_advisor.py
@@ -79,6 +79,21 @@ class TestHAWQ200ServiceAdvisor(TestCase):
         service["StackServices"]["advisor_name"] = "HAWQ200ServiceAdvisor"
         service["StackServices"]["advisor_path"] = self.hawq200ServiceAdvisorPath
 
+  def getDesiredHDFSSiteValues(self, is_secure):
+    hdfs_site_desired_values = {
+      "dfs.allow.truncate" : "true",
+      "dfs.block.access.token.enable" : str(is_secure).lower(),
+      "dfs.block.local-path-access.user" : "gpadmin",
+      "dfs.client.read.shortcircuit" : "true",
+      "dfs.client.use.legacy.blockreader.local" : "false",
+      "dfs.datanode.data.dir.perm" : "750",
+      "dfs.datanode.handler.count" : "60",
+      "dfs.datanode.max.transfer.threads" : "40960",
+      "dfs.namenode.accesstime.precision" : "0",
+      "dfs.support.append" : "true"
+    }
+    return hdfs_site_desired_values
+
   @patch("socket.getfqdn")
   def test_getHostsForMasterComponent(self, getfqdn_mock):
     getfqdn_mock.return_value = "c6401.ambari.apache.org"
@@ -265,27 +280,14 @@ class TestHAWQ200ServiceAdvisor(TestCase):
     }
 
     ## Test that HDFS parameters required by HAWQ are recommended
-    self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, None, services,
hosts)
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.allow.truncate"], "true")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.block.access.token.enable"],
"false")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.block.local-path-access.user"],
"gpadmin")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.client.read.shortcircuit"],
"true")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.client.use.legacy.blockreader.local"],
"false")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.datanode.data.dir.perm"],
"750")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.datanode.handler.count"],
"60")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.datanode.max.transfer.threads"],
"40960")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.namenode.accesstime.precision"],
"0")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.namenode.handler.count"],
"200")
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.support.append"], "true")
-
-    self.assertEquals(configurations["core-site"]["properties"]["ipc.client.connection.maxidletime"],
"3600000")
-    self.assertEquals(configurations["core-site"]["properties"]["ipc.server.listen.queue.size"],
"3300")
-
-
     configurations["cluster-env"]["properties"]["security_enabled"]="false"
     self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, None, services,
hosts)
-    self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.block.access.token.enable"],
"false")
+    hdfs_site_desired_values = self.getDesiredHDFSSiteValues(False)
+    for property, value in hdfs_site_desired_values.iteritems():
+      self.assertEquals(configurations["hdfs-site"]["properties"][property], value)
+    self.assertEquals(configurations["core-site"]["properties"]["ipc.server.listen.queue.size"],
"3300")
 
+    # Kerberos causes 1 property to be recommended differently
     configurations["cluster-env"]["properties"]["security_enabled"]="true"
     self.serviceAdvisor.getServiceConfigurationRecommendations(configurations, None, services,
hosts)
     self.assertEquals(configurations["hdfs-site"]["properties"]["dfs.block.access.token.enable"],
"true")
@@ -1166,3 +1168,85 @@ class TestHAWQ200ServiceAdvisor(TestCase):
     problems = self.serviceAdvisor.validateHAWQHdfsClientConfigurations(properties, defaults,
configurations, services, hosts)
     self.assertEqual(len(problems), 1)
     self.assertEqual(problems[0], expected)
+
+  def test_validateHDFSSiteConfigurations(self):
+    services = {
+      "services":  [
+        { "StackServices": {"service_name": "HAWQ"},
+          "components": [{
+            "StackServiceComponents": {
+              "component_name": "HAWQSEGMENT",
+              "hostnames": []
+            }}]
+          }],
+      "configurations": {"hdfs-site": {}, "core-site": {}}
+    }
+
+    # setup default configuration values for non-kerberos case
+    configurations = services["configurations"]
+    configurations["cluster-env"] = {"properties": {"security_enabled": "false"}}
+    defaults = {}
+    hosts = {}
+    desired_values = self.getDesiredHDFSSiteValues(False)
+
+    # check all properties setup correctly in hdfs-site
+    configurations["hdfs-site"]["properties"] = desired_values.copy()
+    problems = self.serviceAdvisor.validateHDFSSiteConfigurations(configurations["hdfs-site"]["properties"],
defaults, configurations, services, hosts)
+    self.assertEqual(len(problems), 0)
+
+    # check overall number of validations for hdfs-site
+    configurations["hdfs-site"]["properties"] = {}
+    problems = self.serviceAdvisor.validateHDFSSiteConfigurations(configurations["hdfs-site"]["properties"],
defaults, configurations, services, hosts)
+    self.assertEqual(len(problems), 10)
+
+    # check individual properties
+    for property in desired_values.keys():
+      # populate all properties as to desired configuration
+      configurations["hdfs-site"]["properties"] = desired_values.copy()
+      # test when the given property is missing
+      configurations["hdfs-site"]["properties"].pop(property)
+      expected = {
+        'config-type': 'hdfs-site',
+        'message': 'HAWQ requires this property to be set to the recommended value of ' +
desired_values[property],
+        'type': 'configuration',
+        'config-name': property,
+        'level': 'ERROR' if property == 'dfs.allow.truncate' else 'WARN'
+      }
+      problems = self.serviceAdvisor.validateHDFSSiteConfigurations(configurations["hdfs-site"]["properties"],
defaults, configurations, services, hosts)
+      self.assertEqual(len(problems), 1)
+      self.assertEqual(problems[0], expected)
+
+      # test when the given property has a non-desired value
+      configurations["hdfs-site"]["properties"][property] = "foo"
+      problems = self.serviceAdvisor.validateHDFSSiteConfigurations(configurations["hdfs-site"]["properties"],
defaults, configurations, services, hosts)
+      self.assertEqual(len(problems), 1)
+      self.assertEqual(problems[0], expected)
+
+    # check all properties setup correctly in core-site
+    configurations["core-site"]["properties"] = {"ipc.server.listen.queue.size" : "3300"}
+    problems = self.serviceAdvisor.validateCORESiteConfigurations(configurations["core-site"]["properties"],
defaults, configurations, services, hosts)
+    self.assertEqual(len(problems), 0)
+
+    # check overall number of validations for core-site
+    configurations["core-site"]["properties"] = {}
+    problems = self.serviceAdvisor.validateCORESiteConfigurations(configurations["core-site"]["properties"],
defaults, configurations, services, hosts)
+    self.assertEqual(len(problems), 1)
+
+    # check incorrect core-site property
+    expected = {
+      'config-type': 'core-site',
+      'message': 'HAWQ requires this property to be set to the recommended value of 3300',
+      'type': 'configuration',
+      'config-name': 'ipc.server.listen.queue.size',
+      'level': 'WARN'
+    }
+    configurations["core-site"]["properties"] = {"ipc.server.listen.queue.size" : "0"}
+    problems = self.serviceAdvisor.validateCORESiteConfigurations(configurations["core-site"]["properties"],
defaults, configurations, services, hosts)
+    self.assertEqual(len(problems), 1)
+    self.assertEqual(problems[0], expected)
+
+    # check missing core-site property
+    configurations["core-site"]["properties"].pop("ipc.server.listen.queue.size")
+    problems = self.serviceAdvisor.validateCORESiteConfigurations(configurations["core-site"]["properties"],
defaults, configurations, services, hosts)
+    self.assertEqual(len(problems), 1)
+    self.assertEqual(problems[0], expected)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/7a3c28ee/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index ff87adb..15c19d1 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -186,26 +186,6 @@ class TestHDP23StackAdvisor(TestCase):
     self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services,
hosts)
     self.assertEquals(configurations['hdfs-site']['properties']['dfs.namenode.inode.attributes.provider.class'],
'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer', "Test with Ranger HDFS plugin
is enabled")
 
-    # Test 1 for dfs.allow.truncate with no HAWQ and dfs.allow.truncate not set
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services,
hosts)
-    self.assertTrue('dfs.allow.truncate' not in configurations['hdfs-site']['properties'])
-
-    # Test 2 for dfs.allow.truncate with HAWQ and dfs.allow.truncate not set
-    services["services"].append({"StackServices" : {"service_name" : "HAWQ"}, "components":[]})
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services,
hosts)
-    self.assertEquals(configurations['hdfs-site']['properties']['dfs.allow.truncate'], 'true')
-
-    # Test 3 for dfs.allow.truncate with no HAWQ and dfs.allow.truncate set to false
-    services["services"].remove({"StackServices" : {"service_name" : "HAWQ"}, "components":[]})
-    configurations['hdfs-site']['properties']['dfs.allow.truncate'] = 'false'
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services,
hosts)
-    self.assertEquals(configurations['hdfs-site']['properties']['dfs.allow.truncate'], 'false')
-
-    # Test 4 for dfs.allow.truncate with HAWQ and dfs.allow.truncate set to false
-    services["services"].append({"StackServices" : {"service_name" : "HAWQ"}, "components":[]})
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services,
hosts)
-    self.assertEquals(configurations['hdfs-site']['properties']['dfs.allow.truncate'], 'true')
-
   def test_recommendYARNConfigurations(self):
     configurations = {}
     servicesList = ["YARN"]
@@ -1561,97 +1541,6 @@ class TestHDP23StackAdvisor(TestCase):
     self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData,
services, None)
     self.assertEquals(recommendedConfigurations['ranger-admin-site']['properties']['ranger.audit.solr.zookeepers'],
'NONE')
 
-  def test_validateHDFSConfigurations(self):
-    properties = {'dfs.datanode.data.dir': '/hadoop/hdfs/data'}
-    recommendedDefaults = {}
-    configurations = {
-      "core-site": {"properties": {}},
-    }
-    services = {
-      "services": [
-        {
-          "StackServices": {
-            "service_name": "HDFS"
-          },
-          "components": [
-            {
-              "StackServiceComponents": {
-                "component_name": "NAMENODE",
-                "hostnames": ["c6401.ambari.apache.org"]
-              }
-            }
-          ]
-        },
-        {
-          "StackServices": {
-            "service_name": "HAWQ"
-          },
-          "components": [
-            {
-              "StackServiceComponents": {
-                "component_name": "HAWQMASTER",
-                "hostnames": ["c6401.ambari.apache.org"]
-              }
-            }
-          ]
-        }
-      ],
-      "configurations": configurations
-    }
-    services["configurations"]["hdfs-site"] = {}
-    services["configurations"]["hdfs-site"]["properties"] = {}
-    hosts = {}
-
-    expected_warning = {
-        'config-type':'hdfs-site',
-        'message':'HAWQ requires dfs.allow.truncate in hdfs-site.xml set to True.',
-        'type':'configuration',
-        'config-name':'dfs.allow.truncate',
-        'level':'WARN'
-    }
-
-    # Test following cases:
-    # when HAWQ is being installed and dfs.allow.truncate is not set at all, warning
-    # when HAWQ is being installed and dfs.allow.truncate is set to True, no warning
-    # when HAWQ is being installed and dfs.allow.truncate is not set to True, warning
-    # when HAWQ is not installed and dfs.allow.truncate is not set at all, no warning
-    # when HAWQ is not installed and dfs.allow.truncate is set to True, no warning
-    # when HAWQ is not installed and dfs.allow.truncate is not set to True, no warning
-    # 1
-    problems = self.stackAdvisor.validateHDFSConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
-    self.assertEqual(len(problems), 1)
-    self.assertEqual(problems[0], expected_warning)
-
-    # 2
-    services["configurations"]["hdfs-site"]["properties"]["dfs.allow.truncate"] = "True"
-    problems = self.stackAdvisor.validateHDFSConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
-    self.assertEqual(len(problems), 0)
-
-    # 3
-    services["configurations"]["hdfs-site"]["properties"]["dfs.allow.truncate"] = "false"
-    problems = self.stackAdvisor.validateHDFSConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
-    self.assertEqual(len(problems), 1)
-    self.assertEqual(problems[0], expected_warning)
-
-    del services["configurations"]["hdfs-site"]["properties"]["dfs.allow.truncate"]
-    servicesElementWithoutHAWQ = [service for service in services["services"] if service["StackServices"]["service_name"]
!= "HAWQ"]
-    services["services"] = servicesElementWithoutHAWQ
-
-    # 4
-    problems = self.stackAdvisor.validateHDFSConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
-    self.assertEqual(len(problems), 0)
-
-    # 5
-    services["configurations"]["hdfs-site"]["properties"]["dfs.allow.truncate"] = "True"
-    problems = self.stackAdvisor.validateHDFSConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
-    self.assertEqual(len(problems), 0)
-
-    # 6
-    services["configurations"]["hdfs-site"]["properties"]["dfs.allow.truncate"] = "false"
-    problems = self.stackAdvisor.validateHDFSConfigurations(properties, recommendedDefaults,
configurations, services, hosts)
-    self.assertEqual(len(problems), 0)
-
-
   def test_recommendRangerKMSConfigurations(self):
     clusterData = {}
     services = {


Mime
View raw message