ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject [1/2] ambari git commit: AMBARI-17873. STS goes down after EU when Yarn is configured for custom queues (dgrinenko via dlysnichenko)
Date Mon, 25 Jul 2016 16:20:08 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 78f3a060a -> 807e56a3b
  refs/heads/trunk 8b041be4a -> b19714343


AMBARI-17873. STS goes down after EU when Yarn is configured for custom queues (dgrinenko
via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/807e56a3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/807e56a3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/807e56a3

Branch: refs/heads/branch-2.4
Commit: 807e56a3bddfaa40a30c524898241c47c5ae3df7
Parents: 78f3a06
Author: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Authored: Mon Jul 25 19:18:47 2016 +0300
Committer: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Committed: Mon Jul 25 19:18:47 2016 +0300

----------------------------------------------------------------------
 .../1.2.1/configuration/spark-defaults.xml      |  6 +++
 .../configuration/spark-thrift-sparkconf.xml    |  6 +++
 .../2.0.0/configuration/spark2-defaults.xml     |  6 +++
 .../configuration/spark2-thrift-sparkconf.xml   |  6 +++
 .../stacks/HDP/2.2/services/stack_advisor.py    | 43 +++++++++++++++-
 .../stacks/HDP/2.5/services/stack_advisor.py    | 38 +++++++++++++-
 .../stacks/2.2/common/test_stack_advisor.py     | 49 ++++++++++++++++++
 .../stacks/2.5/common/test_stack_advisor.py     | 52 ++++++++++++++++++++
 8 files changed, 202 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml
index c53dcd7..646239e 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-defaults.xml
@@ -69,6 +69,12 @@
     <description>
       The name of the YARN queue to which the application is submitted.
     </description>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml
index 67120d3..e27c986 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.5.2/configuration/spark-thrift-sparkconf.xml
@@ -69,6 +69,12 @@
     <description>
       The name of the YARN queue to which the application is submitted.
     </description>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-defaults.xml
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-defaults.xml
index c1ddd87..2f9334e 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-defaults.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-defaults.xml
@@ -25,6 +25,12 @@
     <description>
       The name of the YARN queue to which the application is submitted.
     </description>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-thrift-sparkconf.xml
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-thrift-sparkconf.xml
index fd339c3..dfe87cb 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-thrift-sparkconf.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-thrift-sparkconf.xml
@@ -25,6 +25,12 @@
     <description>
       The name of the YARN queue to which the application is submitted.
     </description>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index 5b7b7a4..862aab7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -26,6 +26,7 @@ import socket
 import re
 import xml.etree.ElementTree as ET
 
+
 class HDP22StackAdvisor(HDP21StackAdvisor):
 
   def getServiceConfigurationRecommenderDict(self):
@@ -41,11 +42,29 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       "STORM": self.recommendStormConfigurations,
       "KNOX": self.recommendKnoxConfigurations,
       "RANGER": self.recommendRangerConfigurations,
-      "LOGSEARCH" : self.recommendLogsearchConfigurations
+      "LOGSEARCH" : self.recommendLogsearchConfigurations,
+      "SPARK": self.recommendSparkConfigurations,
     }
     parentRecommendConfDict.update(childRecommendConfDict)
     return parentRecommendConfDict
 
+
+  def recommendSparkConfigurations(self, configurations, clusterData, services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    putSparkProperty = self.putProperty(configurations, "spark-defaults", services)
+    putSparkProperty("spark.yarn.queue", self.recommendYarnQueue(services))
+
+    # add only if spark supports this config
+    if "configurations" in services and "spark-thrift-sparkconf" in services["configurations"]:
+      putSparkThriftSparkConf = self.putProperty(configurations, "spark-thrift-sparkconf",
services)
+      putSparkThriftSparkConf("spark.yarn.queue", self.recommendYarnQueue(services))
+
+
   def recommendYARNConfigurations(self, configurations, clusterData, services, hosts):
     super(HDP22StackAdvisor, self).recommendYARNConfigurations(configurations, clusterData,
services, hosts)
     putYarnProperty = self.putProperty(configurations, "yarn-site", services)
@@ -960,7 +979,9 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       "STORM": {"ranger-storm-plugin-properties": self.validateStormRangerPluginConfigurations},
       "MAPREDUCE2": {"mapred-site": self.validateMapReduce2Configurations},
       "TEZ": {"tez-site": self.validateTezConfigurations},
-      "RANGER": {"ranger-env": self.validateRangerConfigurationsEnv}
+      "RANGER": {"ranger-env": self.validateRangerConfigurationsEnv},
+      "SPARK": {"spark-defaults": self.validateSparkDefaults,
+                "spark-thrift-sparkconf": self.validateSparkThriftSparkConf}
     }
     self.mergeValidators(parentValidators, childValidators)
     return parentValidators
@@ -1367,6 +1388,24 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     configurationValidationProblems.extend(parentValidationProblems)
     return configurationValidationProblems
 
+  def validateSparkDefaults(self, properties, recommendedDefaults, configurations, services,
hosts):
+    validationItems = [
+      {
+        "config-name": 'spark.yarn.queue',
+        "item": self.validatorYarnQueue(properties, recommendedDefaults, 'spark.yarn.queue',
services)
+      }
+    ]
+    return self.toConfigurationValidationProblems(validationItems, "spark-defaults")
+
+  def validateSparkThriftSparkConf(self, properties, recommendedDefaults, configurations,
services, hosts):
+    validationItems = [
+      {
+        "config-name": 'spark.yarn.queue',
+        "item": self.validatorYarnQueue(properties, recommendedDefaults, 'spark.yarn.queue',
services)
+      }
+    ]
+    return self.toConfigurationValidationProblems(validationItems, "spark-thrift-sparkconf")
+
   def validateHBASEConfigurations(self, properties, recommendedDefaults, configurations,
services, hosts):
     hbase_site = properties
     validationItems = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index d2b7371..4972972 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -106,7 +106,9 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
                "hive-interactive-site": self.validateHiveInteractiveSiteConfigurations},
       "YARN": {"yarn-site": self.validateYarnConfigurations},
       "RANGER": {"ranger-tagsync-site": self.validateRangerTagsyncConfigurations,
-                "ranger-admin-site": self.validateRangerAdminConfigurations}
+                "ranger-admin-site": self.validateRangerAdminConfigurations},
+      "SPARK2": {"spark2-defaults": self.validateSpark2Defaults,
+                 "spark2-thrift-sparkconf": self.validateSpark2ThriftSparkConf}
     }
     self.mergeValidators(parentValidators, childValidators)
     return parentValidators
@@ -199,6 +201,24 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
     validationProblems = self.toConfigurationValidationProblems(validationItems, "application-properties")
     return validationProblems
 
+  def validateSpark2Defaults(self, properties, recommendedDefaults, configurations, services,
hosts):
+    validationItems = [
+      {
+        "config-name": 'spark.yarn.queue',
+        "item": self.validatorYarnQueue(properties, recommendedDefaults, 'spark.yarn.queue',
services)
+      }
+    ]
+    return self.toConfigurationValidationProblems(validationItems, "spark2-defaults")
+
+  def validateSpark2ThriftSparkConf(self, properties, recommendedDefaults, configurations,
services, hosts):
+    validationItems = [
+      {
+        "config-name": 'spark.yarn.queue',
+        "item": self.validatorYarnQueue(properties, recommendedDefaults, 'spark.yarn.queue',
services)
+      }
+    ]
+    return self.toConfigurationValidationProblems(validationItems, "spark2-thrift-sparkconf")
+
   def validateYarnConfigurations(self, properties, recommendedDefaults, configurations, services,
hosts):
     parentValidationProblems = super(HDP25StackAdvisor, self).validateYARNConfigurations(properties,
recommendedDefaults, configurations, services, hosts)
     yarn_site_properties = getSiteProperties(configurations, "yarn-site")
@@ -371,11 +391,25 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
       "ATLAS": self.recommendAtlasConfigurations,
       "RANGER_KMS": self.recommendRangerKMSConfigurations,
       "STORM": self.recommendStormConfigurations,
-      "OOZIE": self.recommendOozieConfigurations
+      "OOZIE": self.recommendOozieConfigurations,
+      "SPARK2": self.recommendSpark2Configurations
     }
     parentRecommendConfDict.update(childRecommendConfDict)
     return parentRecommendConfDict
 
+  def recommendSpark2Configurations(self, configurations, clusterData, services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    putSparkProperty = self.putProperty(configurations, "spark2-defaults", services)
+    putSparkThriftSparkConf = self.putProperty(configurations, "spark2-thrift-sparkconf",
services)
+
+    putSparkProperty("spark.yarn.queue", self.recommendYarnQueue(services))
+    putSparkThriftSparkConf("spark.yarn.queue", self.recommendYarnQueue(services))
+
   def recommendStormConfigurations(self, configurations, clusterData, services, hosts):
     super(HDP25StackAdvisor, self).recommendStormConfigurations(configurations, clusterData,
services, hosts)
     storm_site = getServicesSiteProperties(services, "storm-site")

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index ab5ae8b..a3e0e16 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -910,6 +910,32 @@ class TestHDP22StackAdvisor(TestCase):
     self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services,
None)
     self.assertEquals(configurations, expected)
 
+  def test_recommendSPARKConfigurations(self):
+    configurations = {}
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256
+    }
+    expected = {
+      "spark-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSparkConfigurations(configurations, clusterData, services,
None)
+    self.assertEquals(configurations, expected)
+
   def test_recommendYARNConfigurationAttributes(self):
     configurations = {
       "yarn-env": {
@@ -4105,3 +4131,26 @@ class TestHDP22StackAdvisor(TestCase):
 
     res = self.stackAdvisor.validateRangerConfigurationsEnv(properties, recommendedDefaults,
configurations, services, {})
     self.assertEquals(res, res_expected)
+
+  def test_validateSparkDefaults(self):
+    properties = {}
+    recommendedDefaults = {
+      "spark.yarn.queue": "default",
+    }
+    configurations = {}
+    services = {
+      "services":
+        [
+          {
+            "StackServices": {
+              "service_name": "SPARK"
+            }
+          }
+        ]
+    }
+
+    # Test with ranger plugin enabled, validation fails
+    res_expected = [{'config-type': 'spark-defaults', 'message': 'Value should be set', 'type':
'configuration', 'config-name': 'spark.yarn.queue', 'level': 'ERROR'}]
+
+    res = self.stackAdvisor.validateSparkDefaults(properties, recommendedDefaults, configurations,
services, {})
+    self.assertEquals(res, res_expected)

http://git-wip-us.apache.org/repos/asf/ambari/blob/807e56a3/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index 64f63d9..ebd2f2a 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -361,6 +361,36 @@ class TestHDP25StackAdvisor(TestCase):
     self.expected_visibility_false = {'visible': 'false'}
     self.expected_visibility_true = {'visible': 'true'}
 
+  def test_recommendSPARK2Configurations(self):
+    configurations = {}
+    services = {"configurations": configurations}
+    services['services'] = [
+      {
+        "StackServices": {
+          "service_name": "SPARK2"
+        },
+      }
+    ]
+    clusterData = {
+      "cpu": 4,
+      "containers": 5,
+      "ramPerContainer": 256
+    }
+    expected = {
+      "spark2-defaults": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      },
+      "spark2-thrift-sparkconf": {
+        "properties": {
+          "spark.yarn.queue": "default"
+        }
+      }
+    }
+
+    self.stackAdvisor.recommendSpark2Configurations(configurations, clusterData, services,
None)
+    self.assertEquals(configurations, expected)
 
   def load_json(self, filename):
     file = os.path.join(self.testDirectory, filename)
@@ -7542,6 +7572,28 @@ class TestHDP25StackAdvisor(TestCase):
     self.stackAdvisor.recommendStormConfigurations(configurations, clusterData, services,
None)
     self.assertEquals(configurations['storm-site']['properties']['nimbus.authorizer'], 'org.apache.storm.security.auth.authorizer.SimpleACLAuthorizer',
"Test nimbus.authorizer with Ranger Storm plugin being disabled in kerberos environment")
 
+  def test_validateSpark2Defaults(self):
+    properties = {}
+    recommendedDefaults = {
+      "spark.yarn.queue": "default",
+    }
+    configurations = {}
+    services = {
+      "services":
+        [
+          {
+            "StackServices": {
+              "service_name": "SPARK"
+            }
+          }
+        ]
+    }
+
+    # Test with ranger plugin enabled, validation fails
+    res_expected = [{'config-type': 'spark2-defaults', 'message': 'Value should be set',
'type': 'configuration', 'config-name': 'spark.yarn.queue', 'level': 'ERROR'}]
+
+    res = self.stackAdvisor.validateSpark2Defaults(properties, recommendedDefaults, configurations,
services, {})
+    self.assertEquals(res, res_expected)
 
 """
 Given a comma-separated string, split the items, sort them, and re-join the elements


Mime
View raw message