ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alejan...@apache.org
Subject ambari git commit: AMBARI-14561. Configuration changes of spark thrift server for Spark 1.6 (Jeff Zhang via alejandro)
Date Wed, 20 Jan 2016 00:45:30 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 2e6aaf58e -> f42a1a81c


AMBARI-14561. Configuration changes of spark thrift server for Spark 1.6 (Jeff Zhang via alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f42a1a81
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f42a1a81
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f42a1a81

Branch: refs/heads/trunk
Commit: f42a1a81c371fee1f9d830d0064d403b47082f88
Parents: 2e6aaf5
Author: Alejandro Fernandez <afernandez@hortonworks.com>
Authored: Tue Jan 19 16:45:00 2016 -0800
Committer: Alejandro Fernandez <afernandez@hortonworks.com>
Committed: Tue Jan 19 16:45:21 2016 -0800

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py         |   5 +-
 .../package/scripts/job_history_server.py       |   8 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  12 ++
 .../1.2.0.2.2/package/scripts/setup_spark.py    |  18 +-
 .../1.2.0.2.2/package/scripts/spark_client.py   |   4 +-
 .../1.2.0.2.2/package/scripts/spark_service.py  |  22 ++-
 .../package/scripts/spark_thrift_server.py      |   8 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   6 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   6 +
 .../SPARK/configuration/spark-defaults.xml      |  53 ++++++
 .../spark-thrift-fairscheduler.xml              |  37 +++++
 .../configuration/spark-thrift-sparkconf.xml    | 164 +++++++++++++++++++
 .../stacks/HDP/2.4/services/SPARK/metainfo.xml  |  13 ++
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |  43 +++++
 14 files changed, 385 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index c3ffc7b..b4c8bc8 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -53,7 +53,10 @@ TARBALL_MAP = {
                   "/hdp/apps/{0}/sqoop/sqoop.tar.gz".format(STACK_VERSION_PATTERN)),
 
     "mapreduce": ("/usr/hdp/{0}/hadoop/mapreduce.tar.gz".format(STACK_VERSION_PATTERN),
-                  "/hdp/apps/{0}/mapreduce/mapreduce.tar.gz".format(STACK_VERSION_PATTERN))
+                  "/hdp/apps/{0}/mapreduce/mapreduce.tar.gz".format(STACK_VERSION_PATTERN)),
+
+    "spark": ("/usr/hdp/{0}/spark/lib/spark-hdp-assembly.jar".format(STACK_VERSION_PATTERN),
+                  "/hdp/apps/{0}/spark/spark-hdp-assembly.jar".format(STACK_VERSION_PATTERN))
   }
 }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
index 943664a..24b86e3 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
@@ -41,24 +41,24 @@ class JobHistoryServer(Script):
     
     self.install_packages(env)
     
-  def configure(self, env):
+  def configure(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     
-    setup_spark(env, 'server', action = 'config')
+    setup_spark(env, 'server', upgrade_type=upgrade_type, action = 'config')
     
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     
     self.configure(env)
-    spark_service('jobhistoryserver', action='start')
+    spark_service('jobhistoryserver', upgrade_type=upgrade_type, action='start')
 
   def stop(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     
-    spark_service('jobhistoryserver', action='stop')
+    spark_service('jobhistoryserver', upgrade_type=upgrade_type, action='stop')
 
   def status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 68240bd..bc3e898 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -79,12 +79,15 @@ java_home = config['hostLevelParams']['java_home']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+user_group = config['configurations']['cluster-env']['user_group']
 
 spark_user = status_params.spark_user
 hive_user = status_params.hive_user
 spark_group = status_params.spark_group
 user_group = status_params.user_group
 spark_hdfs_user_dir = format("/user/{spark_user}")
+spark_history_dir = 'hdfs:///spark-history'
+
 spark_history_server_pid_file = status_params.spark_history_server_pid_file
 spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file
 
@@ -153,6 +156,12 @@ if security_enabled:
 # thrift server support - available on HDP 2.3 or higher
 spark_thrift_sparkconf = None
 spark_thrift_cmd_opts_properties = ''
+spark_thrift_fairscheduler_content = None
+spark_thrift_master = "yarn-client"
+if 'nm_hosts' in config['clusterHostInfo'] and len(config['clusterHostInfo']['nm_hosts'])
== 1:
+  # use local mode when there's only one nodemanager
+  spark_thrift_master = "local[4]"
+
 if has_spark_thriftserver and 'spark-thrift-sparkconf' in config['configurations']:
   spark_thrift_sparkconf = config['configurations']['spark-thrift-sparkconf']
   spark_thrift_cmd_opts_properties = config['configurations']['spark-env']['spark_thrift_cmd_opts']
@@ -164,6 +173,9 @@ if has_spark_thriftserver and 'spark-thrift-sparkconf' in config['configurations
     })
     spark_hive_properties.update(config['configurations']['spark-hive-site-override'])
 
+  if 'spark-thrift-fairscheduler' in config['configurations'] and 'fairscheduler_content'
in config['configurations']['spark-thrift-fairscheduler']:
+    spark_thrift_fairscheduler_content = config['configurations']['spark-thrift-fairscheduler']['fairscheduler_content']
+
 default_fs = config['configurations']['core-site']['fs.defaultFS']
 hdfs_site = config['configurations']['hdfs-site']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 4b38572..14c78f6 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -26,9 +26,10 @@ from resource_management import *
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
 from resource_management.core import shell
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.version import format_hdp_stack_version
 
-
-def setup_spark(env, type, action = None):
+def setup_spark(env, type, upgrade_type = None, action = None):
   import params
 
   Directory([params.spark_pid_dir, params.spark_log_dir],
@@ -89,3 +90,16 @@ def setup_spark(env, type, action = None):
       group = params.user_group,
       key_value_delimiter = " ",
     )
+
+  effective_version = params.version if upgrade_type is not None else params.hdp_stack_version
+  if effective_version:
+    effective_version = format_hdp_stack_version(effective_version)
+
+  if params.spark_thrift_fairscheduler_content and effective_version and compare_versions(effective_version,
'2.4.0.0') >= 0:
+    # create spark-thrift-fairscheduler.xml
+    File(os.path.join(params.spark_conf,"spark-thrift-fairscheduler.xml"),
+      owner=params.spark_user,
+      group=params.spark_group,
+      mode=0755,
+      content=InlineTemplate(params.spark_thrift_fairscheduler_content)
+    )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
index d4b8f85..fe8cfc4 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
@@ -34,11 +34,11 @@ class SparkClient(Script):
     self.install_packages(env)
     self.configure(env)
 
-  def configure(self, env):
+  def configure(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     
-    setup_spark(env, 'client', action = 'config')
+    setup_spark(env, 'client', upgrade_type=upgrade_type, action = 'config')
 
   def status(self, env):
     raise ClientComponentHasNoStatus()

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index d4c6732..a428209 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -25,11 +25,31 @@ from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import File, Execute
+from resource_management.libraries.functions.version import format_hdp_stack_version
 
-def spark_service(name, action):
+def spark_service(name, upgrade_type=None, action=None):
   import params
 
   if action == 'start':
+
+    effective_version = params.version if upgrade_type is not None else params.hdp_stack_version
+    if effective_version:
+      effective_version = format_hdp_stack_version(effective_version)
+
+    if effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
+      # copy spark-hdp-assembly.jar to hdfs
+      copy_to_hdfs("spark", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
+      # create spark history directory
+      params.HdfsResource(params.spark_history_dir,
+                          type="directory",
+                          action="create_on_execute",
+                          owner=params.spark_user,
+                          group=params.user_group,
+                          mode=0777,
+                          recursive_chmod=True
+                          )
+      params.HdfsResource(None, action="execute")
+
     if params.security_enabled:
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} {spark_principal};
")
       Execute(spark_kinit_cmd, user=params.spark_user)

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
index d2145e2..86b4010 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_thrift_server.py
@@ -41,22 +41,22 @@ class SparkThriftServer(Script):
 
     self.install_packages(env)
 
-  def configure(self, env):
+  def configure(self, env ,upgrade_type=None):
     import params
     env.set_params(params)
-    setup_spark(env, 'server', action = 'config')
+    setup_spark(env, 'server', upgrade_type = upgrade_type, action = 'config')
 
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     self.configure(env)
-    spark_service('sparkthriftserver',action='start')
+    spark_service('sparkthriftserver', upgrade_type=upgrade_type, action='start')
 
   def stop(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    spark_service('sparkthriftserver',action='stop')
+    spark_service('sparkthriftserver', upgrade_type=upgrade_type, action='stop')
 
   def status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index a3880b3..45bf744 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -854,11 +854,17 @@
 
     <service name="SPARK">
       <component name="SPARK_JOBHISTORYSERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver"/>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>
       </component>
       <component name="SPARK_THRIFTSERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver"/>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index e579ee0..682815f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -642,11 +642,17 @@
 
     <service name="SPARK">
       <component name="SPARK_JOBHISTORYSERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver"/>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>
       </component>
       <component name="SPARK_THRIFTSERVER">
+        <pre-upgrade>
+          <task xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver"/>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart-task"/>
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
new file mode 100644
index 0000000..d8af790
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="true">
+    <property>
+        <name>spark.yarn.services</name>
+        <deleted>true</deleted>
+    </property>
+    <property>
+        <name>spark.history.provider</name>
+        <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
+        <description>Name of history provider class</description>
+    </property>
+    <property>
+        <name>spark.history.fs.logDirectory</name>
+        <value>{{spark_history_dir}}</value>
+        <description>
+            Base directory for history spark application log.
+        </description>
+    </property>
+    <property>
+        <name>spark.eventLog.enabled</name>
+        <value>true</value>
+        <description>
+            Whether to log Spark events, useful for reconstructing the Web UI after the application
has finished.
+        </description>
+    </property>
+    <property>
+        <name>spark.eventLog.dir</name>
+        <value>{{spark_history_dir}}</value>
+        <description>
+            Base directory in which Spark events are logged, if spark.eventLog.enabled is
true.
+        </description>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
new file mode 100644
index 0000000..2dda4bb
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration supports_final="true">
+    <property>
+        <name>fairscheduler_content</name>
+        <description>This is the jinja template for spark-thrift-fairscheduler.xml
file.</description>
+        <value>&lt;?xml version="1.0"?&gt;
+            &lt;allocations&gt;
+            &lt;pool name="default"&gt;
+            &lt;schedulingMode&gt;FAIR&lt;/schedulingMode&gt;
+            &lt;weight&gt;1&lt;/weight&gt;
+            &lt;minShare&gt;2&lt;/minShare&gt;
+            &lt;/pool&gt;
+            &lt;/allocations&gt;
+        </value>
+        <value-attributes>
+            <type>content</type>
+        </value-attributes>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
new file mode 100644
index 0000000..3b13496
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
@@ -0,0 +1,164 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>spark.yarn.executor.memoryOverhead</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.driver.memoryOverhead</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.scheduler.heartbeat.interval-ms</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.max.executor.failures</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.containerLauncherMaxThreads</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.submit.file.replication</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.preserve.staging.files</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.max.executor.failures</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.services</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.history.provider</name>
+    <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
+    <description>Name of history provider class</description>
+  </property>
+
+  <property>
+    <name>spark.history.fs.logDirectory</name>
+    <value>{{spark_history_dir}}</value>
+    <description>
+      Base directory for history spark application log.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.eventLog.enabled</name>
+    <value>true</value>
+    <description>
+      Whether to log Spark events, useful for reconstructing the Web UI after the application
has finished.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.eventLog.dir</name>
+    <value>{{spark_history_dir}}</value>
+    <description>
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.master</name>
+    <value>{{spark_thrift_master}}</value>
+    <description>
+      The deploying mode of spark application, by default it is yarn-client for thrift-server
but local mode for there's
+      only one nodemanager.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.scheduler.allocation.file</name>
+    <value>{{spark_conf}}/spark-thrift-fairscheduler.xml</value>
+    <description>
+      Scheduler configuration file for thriftserver.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.scheduler.mode</name>
+    <value>FAIR</value>
+    <description>
+      The scheduling mode between jobs submitted to the same SparkContext.
+    </description>
+  </property>
+  
+  <property>
+    <name>spark.shuffle.service.enabled</name>
+    <value>true</value>
+    <description>
+      Enables the external shuffle service.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.enabled</name>
+    <value>true</value>
+    <description>
+      Whether to use dynamic resource allocation, which scales the number of executors registered
with this application up and down based on the workload.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.executor.instances</name>
+    <value>2</value>
+    <description>
+      The number of executor.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.yarn.am.memory</name>
+    <value>512m</value>
+    <description>
+      Amount of memory to use for the YARN Application Master in client mode.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.executor.memory</name>
+    <value>1g</value>
+    <description>
+      Amount of memory to use per executor process.
+    </description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
index 6c0e393..ef08b3a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
@@ -24,6 +24,19 @@
         <service>
           <name>SPARK</name>
           <version>1.6.0.2.4</version>
+          <configuration-dependencies>
+            <config-type>spark-defaults</config-type>
+            <config-type>spark-env</config-type>
+            <config-type>spark-log4j-properties</config-type>
+            <config-type>spark-metrics-properties</config-type>
+            <config-type>spark-thrift-sparkconf</config-type>
+            <config-type>spark-hive-site-override</config-type>
+            <config-type>spark-thrift-fairscheduler</config-type>
+          </configuration-dependencies>
+          <requiredServices>
+            <service>HDFS</service>
+            <service>YARN</service>
+          </requiredServices>
         </service>
     </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f42a1a81/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index d5e4f78..6b41c84 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -98,6 +98,49 @@
         </changes>
       </component>
     </service>
+
+    <service name="SPARK">
+      <component name="SPARK_JOBHISTORYSERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_spark_jobhistoryserver">
+            <type>spark-defaults</type>
+            <transfer operation="delete" delete-key="spark.yarn.services" />
+            <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
+            <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
+            <set key="spark.eventLog.enabled" value="true"/>
+            <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
+          </definition>
+        </changes>
+      </component>
+      <component name="SPARK_THRIFTSERVER">
+        <changes>
+          <definition xsi:type="configure" id="hdp_2_4_0_0_spark_thriftserver">
+            <type>spark-thrift-sparkconf</type>
+            <transfer operation="delete" delete-key="spark.yarn.executor.memoryOverhead"
/>
+            <transfer operation="delete" delete-key="spark.yarn.driver.memoryOverhead"
/>
+            <transfer operation="delete" delete-key="spark.yarn.scheduler.heartbeat.interval-ms"
/>
+            <transfer operation="delete" delete-key="spark.yarn.max.executor.failures"
/>
+            <transfer operation="delete" delete-key="spark.yarn.containerLauncherMaxThreads"
/>
+            <transfer operation="delete" delete-key="spark.yarn.submit.file.replication"
/>
+            <transfer operation="delete" delete-key="spark.yarn.preserve.staging.files"
/>
+            <transfer operation="delete" delete-key="spark.yarn.max.executor.failures"
/>
+            <set key="spark.history.provider" value="org.apache.spark.deploy.history.FsHistoryProvider"/>
+            <set key="spark.history.fs.logDirectory" value="{{spark_history_dir}}"/>
+            <set key="spark.eventLog.enabled" value="true"/>
+            <set key="spark.eventLog.dir" value="{{spark_history_dir}}"/>
+            <set key="spark.master" value="{{spark_thrift_master}}"/>
+            <set key="spark.scheduler.allocation.file" value="{{spark_conf}}/spark-thrift-fairscheduler.xml"/>
+            <set key="spark.scheduler.mode" value="FAIR"/>
+            <set key="spark.shuffle.service.enabled" value="true"/>
+            <set key="spark.dynamicAllocation.enabled" value="true"/>
+            <set key="spark.executor.instances" value="2"/>
+            <set key="spark.yarn.am.memory" value="512m"/>
+            <set key="spark.executor.memory" value="1g"/>
+          </definition>
+        </changes>
+      </component>
+    </service>
   </services>
 
+
 </upgrade-config-changes>


Mime
View raw message