ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject [2/2] ambari git commit: AMBARI-13936 Add support for ATS v1.5 for HDP versions 2.3 and higher (dsen)
Date Wed, 18 Nov 2015 13:43:19 GMT
AMBARI-13936 Add support for ATS v1.5 for HDP versions 2.3 and higher (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8769f418
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8769f418
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8769f418

Branch: refs/heads/trunk
Commit: 8769f4187e7e56c2bd23ea722fae0255c8697908
Parents: 0fdf876
Author: Dmytro Sen <dsen@apache.org>
Authored: Wed Nov 18 15:42:59 2015 +0200
Committer: Dmytro Sen <dsen@apache.org>
Committed: Wed Nov 18 15:42:59 2015 +0200

----------------------------------------------------------------------
 .../2.1.0.2.0/package/scripts/params_linux.py   |    6 +
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |   36 +
 .../2.3/services/TEZ/configuration/tez-site.xml |    5 +
 .../services/YARN/configuration/yarn-site.xml   |   48 +-
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   14 +-
 .../test/python/stacks/2.3/YARN/test_ats_1_5.py |  303 +++++
 .../test/python/stacks/2.3/configs/ats_1_5.json | 1111 ++++++++++++++++++
 7 files changed, 1521 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8769f418/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index aa71751..14336ec 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -98,6 +98,12 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   # Timeline Service property that was added in 2.2
   ats_leveldb_state_store_dir = config['configurations']['yarn-site']['yarn.timeline-service.leveldb-state-store.path']
 
+# ats 1.5 properties
+entity_groupfs_active_dir = config['configurations']['yarn-site']['yarn.timeline-service.entity-group-fs-store.active-dir']
+entity_groupfs_active_dir_mode = 01777
+entity_groupfs_store_dir = config['configurations']['yarn-site']['yarn.timeline-service.entity-group-fs-store.done-dir']
+entity_groupfs_store_dir_mode = 0700
+
 hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 
 limits_conf_dir = "/etc/security/limits.d"

http://git-wip-us.apache.org/repos/asf/ambari/blob/8769f418/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index 81809aa..af28ce9 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -256,6 +256,42 @@ def yarn(name = None):
        recursive=True,
        cd_access="a",
       )
+    # app timeline server 1.5 directories
+    if not is_empty(params.entity_groupfs_store_dir):
+      parent_path = os.path.dirname(params.entity_groupfs_store_dir)
+      params.HdfsResource(parent_path,
+                          type="directory",
+                          action="create_on_execute",
+                          change_permissions_for_parents=True,
+                          owner=params.yarn_user,
+                          group=params.user_group,
+                          mode=0755
+                          )
+      params.HdfsResource(params.entity_groupfs_store_dir,
+                          type="directory",
+                          action="create_on_execute",
+                          owner=params.yarn_user,
+                          group=params.user_group,
+                          mode=params.entity_groupfs_store_dir_mode
+                          )
+    if not is_empty(params.entity_groupfs_active_dir):
+      parent_path = os.path.dirname(params.entity_groupfs_active_dir)
+      params.HdfsResource(parent_path,
+                          type="directory",
+                          action="create_on_execute",
+                          change_permissions_for_parents=True,
+                          owner=params.yarn_user,
+                          group=params.user_group,
+                          mode=0755
+                          )
+      params.HdfsResource(params.entity_groupfs_active_dir,
+                          type="directory",
+                          action="create_on_execute",
+                          owner=params.yarn_user,
+                          group=params.user_group,
+                          mode=params.entity_groupfs_active_dir_mode
+                          )
+    params.HdfsResource(None, action="execute")
 
   File(params.rm_nodes_exclude_path,
        owner=params.yarn_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8769f418/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/configuration/tez-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/configuration/tez-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/configuration/tez-site.xml
index 37db69f..a80da09 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/configuration/tez-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/TEZ/configuration/tez-site.xml
@@ -98,4 +98,9 @@
     </depends-on>
   </property>
 
+  <property>
+    <name>tez.history.logging.service.class</name>
+    <value>org.apache.tez.dag.history.logging.ats.ATSV15HistoryLoggingService</value>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8769f418/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml
index c9e617b..8f2ecf9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-site.xml
@@ -62,5 +62,51 @@
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
   </property>
-
+  <!--ats v1.5 properties-->
+  <property>
+    <name>yarn.timeline-service.version</name>
+    <value>1.5</value>
+    <description>Timeline service version we’re currently using.</description>
+  </property>
+  <property>
+    <name>yarn.timeline-service.store-class</name>
+    <value>org.apache.hadoop.yarn.server.timeline.EntityGroupFSTimelineStore</value>
+    <description>Main storage class for YARN timeline server.</description>
+  </property>
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.active-dir</name>
+    <value>/ats/active/</value>
+    <description>DFS path to store active application’s timeline data</description>
+  </property>
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.done-dir</name>
+    <value>/ats/done/</value>
+    <description>DFS path to store done application’s timeline data</description>
+  </property>
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.group-id-plugin-classes</name>
+    <value>org.apache.tez.dag.history.logging.ats.TimelineCachePluginImpl</value>
+    <description>Plugins that can translate a timeline entity read request into a list
of timeline cache ids, separated by commas. </description>
+  </property>
+  <!-- advanced ats v1.5 properties-->
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.summary-store</name>
+    <!-- Use rolling leveldb, advanced -->
+    <value>org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore</value>
+  </property>
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.scan-interval-seconds</name>
+    <!-- Default is 60 seconds, advanced -->
+    <value>60</value>
+  </property>
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds</name>
+    <!-- 3600 is default, advanced -->
+    <value>3600</value>
+  </property>
+  <property>
+    <name>yarn.timeline-service.entity-group-fs-store.retain-seconds</name>
+    <!-- 7 days is default, advanced -->
+    <value>604800</value>
+  </property>
 </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8769f418/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index 2c1184a..f0b1cb8 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -41,7 +41,6 @@ class TestAppTimelineServer(RMFTestCase):
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-
     self.assert_configure_default()
     self.assertNoMoreResources()
 
@@ -186,6 +185,19 @@ class TestAppTimelineServer(RMFTestCase):
                               recursive = True,
                               cd_access='a'
                               )
+    self.assertResourceCalled('HdfsResource', None,
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/bin',
+                              keytab = UnknownConfigurationMock(),
+                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                              dfs_type = '',
+                              hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              user = 'hdfs',
+                              action = ['execute'],
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
                               owner = 'yarn',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/8769f418/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py b/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
new file mode 100644
index 0000000..e94d960
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
@@ -0,0 +1,303 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import json
+import os
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions import version
+from resource_management.libraries.script.script import Script
+from resource_management.libraries import functions
+
+origin_exists = os.path.exists
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+@patch.object(os.path, "exists", new=MagicMock(
+  side_effect=lambda *args: origin_exists(args[0])
+  if args[0][-2:] == "j2" else True))
+
+@patch.object(Script, "is_hdp_stack_greater_or_equal", new = MagicMock(return_value=False))
+@patch.object(functions, "get_hdp_version", new = MagicMock(return_value="2.0.0.0-1234"))
+class TestAts(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
+  STACK_VERSION = "2.3"
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/application_timeline_server.py",
+                       classname="ApplicationTimelineServer",
+                       command="configure",
+                       config_file="ats_1_5.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+                       )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-yarn/yarn',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/yarn',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop-mapreduce/mapred',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-mapreduce/mapred',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn',
+                              owner = 'yarn',
+                              ignore_failures = True,
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'hadoop.proxyuser.hive.groups':
u'true',
+                                                                     u'hadoop.proxyuser.oozie.hosts':
u'true',
+                                                                     u'webinterface.private.actions':
u'true'}},
+                              owner = 'hdfs',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'dfs.cluster.administrators':
u'true',
+                                                                     u'dfs.support.append':
u'true',
+                                                                     u'dfs.web.ugi': u'true'}},
+                              owner = 'hdfs',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path':
u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir':
u'true'}},
+                              owner = 'yarn',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'yarn-site.xml',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'yarn.nodemanager.container-executor.class':
u'true',
+                                                                     u'yarn.nodemanager.disk-health-checker.min-healthy-disks':
u'true',
+                                                                     u'yarn.nodemanager.local-dirs':
u'true'}},
+                              owner = 'yarn',
+                              configurations = self.getConfig()['configurations']['yarn-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              mode = 0644,
+                              configuration_attributes = {u'final': {u'yarn.scheduler.capacity.node-locality-delay':
u'true'}},
+                              owner = 'yarn',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/timeline',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('HdfsResource', '/ats',
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/bin',
+                              keytab = UnknownConfigurationMock(),
+                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                              dfs_type = '',
+                              hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              user = 'hdfs',
+                              change_permissions_for_parents = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'],
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('HdfsResource', '/ats/done',
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/bin',
+                              keytab = UnknownConfigurationMock(),
+                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                              dfs_type = '',
+                              hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              user = 'hdfs',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'],
+                              mode = 0700,
+                              )
+    self.assertResourceCalled('HdfsResource', '/ats',
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/bin',
+                              keytab = UnknownConfigurationMock(),
+                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                              dfs_type = '',
+                              hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              user = 'hdfs',
+                              change_permissions_for_parents = True,
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'],
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('HdfsResource', '/ats/active',
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/bin',
+                              keytab = UnknownConfigurationMock(),
+                              dfs_type = '',
+                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                              hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              user = 'hdfs',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              type = 'directory',
+                              action = ['create_on_execute'],
+                              mode = 01777,
+                              )
+    self.assertResourceCalled('HdfsResource', None,
+                              security_enabled = False,
+                              hadoop_bin_dir = '/usr/bin',
+                              keytab = UnknownConfigurationMock(),
+                              default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+                              dfs_type = '',
+                              hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+                              kinit_path_local = '/usr/bin/kinit',
+                              principal_name = UnknownConfigurationMock(),
+                              user = 'hdfs',
+                              action = ['execute'],
+                              hadoop_conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
+                              content = Template('yarn.conf.j2'),
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/etc/security/limits.d/mapreduce.conf',
+                              content = Template('mapreduce.conf.j2'),
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/yarn-env.sh',
+                              content = InlineTemplate(self.getConfig()['configurations']['yarn-env']['content']),
+                              owner = 'yarn',
+                              group = 'hadoop',
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('File', '/usr/lib/hadoop-yarn/bin/container-executor',
+                              group = 'hadoop',
+                              mode = 02050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/container-executor.cfg',
+                              content = Template('container-executor.cfg.j2'),
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('Directory', '/cgroups_test/cpu',
+                              mode = 0755,
+                              group = 'hadoop',
+                              recursive = True,
+                              cd_access = 'a',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-env.sh',
+                              content = InlineTemplate(self.getConfig()['configurations']['mapred-env']['content']),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configuration_attributes = {u'final': {u'mapred.healthChecker.script.path':
u'true',
+                                                                     u'mapreduce.jobtracker.staging.root.dir':
u'true'}},
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configuration_attributes = {u'final': {u'yarn.scheduler.capacity.node-locality-delay':
u'true'}},
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )


Mime
View raw message