Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 038B618586 for ; Mon, 26 Oct 2015 20:56:30 +0000 (UTC) Received: (qmail 2878 invoked by uid 500); 26 Oct 2015 20:56:29 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 2851 invoked by uid 500); 26 Oct 2015 20:56:29 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 2842 invoked by uid 99); 26 Oct 2015 20:56:29 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 26 Oct 2015 20:56:29 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id BBEBBE0515; Mon, 26 Oct 2015 20:56:29 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: alejandro@apache.org To: commits@ambari.apache.org Message-Id: <4b9282fd94c4453abf241be38054e239@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: ambari git commit: AMBARI-13513. Add cmd option config to spark thrift server (Judy Nash via alejandro) Date: Mon, 26 Oct 2015 20:56:29 +0000 (UTC) Repository: ambari Updated Branches: refs/heads/branch-2.1 b74b2dc05 -> 15c0ce192 AMBARI-13513. Add cmd option config to spark thrift server (Judy Nash via alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/15c0ce19 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/15c0ce19 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/15c0ce19 Branch: refs/heads/branch-2.1 Commit: 15c0ce1929fc30f55552836ad02f174b1ee8ba6e Parents: b74b2dc Author: Alejandro Fernandez Authored: Mon Oct 26 13:56:14 2015 -0700 Committer: Alejandro Fernandez Committed: Mon Oct 26 13:56:14 2015 -0700 ---------------------------------------------------------------------- .../SPARK/1.2.0.2.2/configuration/spark-env.xml | 6 ++++++ .../common-services/SPARK/1.2.0.2.2/package/scripts/params.py | 3 +++ .../SPARK/1.2.0.2.2/package/scripts/spark_service.py | 2 +- .../test/python/stacks/2.3/SPARK/test_spark_thrift_server.py | 2 +- .../src/test/python/stacks/2.3/configs/spark_default.json | 5 +++-- 5 files changed, 14 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/15c0ce19/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml index 79e3b52..25fcc6b 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/configuration/spark-env.xml @@ -100,4 +100,10 @@ fi + + spark_thrift_cmd_opts + additional spark thrift server commandline options + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/15c0ce19/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py index be81f59..50b241f 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py @@ -161,9 +161,12 @@ if security_enabled: # thrift server support - available on HDP 2.3 or higher spark_thrift_sparkconf = None +spark_thrift_cmd_opts_properties = '' if version and compare_versions(format_hdp_stack_version(version), '2.3.2.0') >= 0 \ and 'spark-thrift-sparkconf' in config['configurations']: spark_thrift_sparkconf = config['configurations']['spark-thrift-sparkconf'] + spark_thrift_cmd_opts_properties = config['configurations']['spark-env']['spark_thrift_cmd_opts'] + if is_hive_installed: spark_hive_properties.update(config['configurations']['spark-hive-site-override']) http://git-wip-us.apache.org/repos/asf/ambari/blob/15c0ce19/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py index 68a395b..b274c61 100644 --- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py +++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py @@ -51,7 +51,7 @@ def spark_service(name, action): elif name == 'sparkthriftserver': thriftserver_no_op_test = format( 'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat {spark_thrift_server_pid_file}` >/dev/null 2>&1') - Execute(format('{spark_thrift_server_start} --properties-file {spark_thrift_server_conf_file}'), + Execute(format('{spark_thrift_server_start} --properties-file {spark_thrift_server_conf_file} {spark_thrift_cmd_opts_properties}'), user=params.spark_user, environment={'JAVA_HOME': params.java_home}, not_if=thriftserver_no_op_test http://git-wip-us.apache.org/repos/asf/ambari/blob/15c0ce19/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py index f5f12f2..d6ac468 100644 --- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py +++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py @@ -53,7 +53,7 @@ class TestSparkThriftServer(RMFTestCase): target = RMFTestCase.TARGET_COMMON_SERVICES ) self.assert_configure_default() - self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-thriftserver.sh --properties-file /usr/hdp/current/spark-client/conf/spark-thrift-sparkconf.conf', + self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-thriftserver.sh --properties-file /usr/hdp/current/spark-client/conf/spark-thrift-sparkconf.conf --driver-memory 1g', environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'}, not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid` >/dev/null 2>&1', user = 'spark', http://git-wip-us.apache.org/repos/asf/ambari/blob/15c0ce19/ambari-server/src/test/python/stacks/2.3/configs/spark_default.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.3/configs/spark_default.json b/ambari-server/src/test/python/stacks/2.3/configs/spark_default.json index 9f3fb90..84cd31f 100644 --- a/ambari-server/src/test/python/stacks/2.3/configs/spark_default.json +++ b/ambari-server/src/test/python/stacks/2.3/configs/spark_default.json @@ -21,7 +21,7 @@ "serviceName": "SLIDER", "role": "SLIDER", "commandParams": { - "version": "2.3.1.0-2067", + "version": "2.3.2.0-2067", "command_timeout": "300", "service_package_folder": "OOZIE", "script_type": "PYTHON", @@ -160,7 +160,8 @@ "spark_pid_dir": "/var/run/spark", "spark_log_dir": "/var/log/spark", "spark_group": "spark", - "spark_user": "spark" + "spark_user": "spark", + "spark_thrift_cmd_opts": "--driver-memory 1g" }, "spark-metrics-properties": { "content": "\n# syntax: [instance].sink|source.[name].[options]=[value]\n\n# This file configures Spark's internal metrics system. The metrics system is\n# divided into instances which correspond to internal components.\n# Each instance can be configured to report its metrics to one or more sinks.\n# Accepted values for [instance] are \"master\", \"worker\", \"executor\", \"driver\",\n# and \"applications\". A wild card \"*\" can be used as an instance name, in\n# which case all instances will inherit the supplied property.\n#\n# Within an instance, a \"source\" specifies a particular set of grouped metrics.\n# there are two kinds of sources:\n# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will\n# collect a Spark component's internal state. Each instance is paired with a\n# Spark source that is added automatically.\n# 2. Common sources, like JvmSource, which will collect low level state.\n# These can be added through configuration options and ar e then loaded\n# using reflection.\n#\n# A \"sink\" specifies where metrics are delivered to. Each instance can be\n# assigned one or more sinks.\n#\n# The sink|source field specifies whether the property relates to a sink or\n# source.\n#\n# The [name] field specifies the name of source or sink.\n#\n# The [options] field is the specific property of this source or sink. The\n# source or sink is responsible for parsing this property.\n#\n# Notes:\n# 1. To add a new sink, set the \"class\" option to a fully qualified class\n# name (see examples below).\n# 2. Some sinks involve a polling period. The minimum allowed polling period\n# is 1 second.\n# 3. Wild card properties can be overridden by more specific properties.\n# For example, master.sink.console.period takes precedence over\n# *.sink.console.period.\n# 4. A metrics specific configuration\n# \"spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties\" should be\n# added to Java properties using -Dspark.metrics.conf=xxx if you wa nt to\n# customize metrics system. You can also put the file in ${SPARK_HOME}/conf\n# and it will be loaded automatically.\n# 5. MetricsServlet is added by default as a sink in master, worker and client\n# driver, you can send http request \"/metrics/json\" to get a snapshot of all the\n# registered metrics in json format. For master, requests \"/metrics/master/json\" and\n# \"/metrics/applications/json\" can be sent seperately to get metrics snapshot of\n# instance master and applications. MetricsServlet may not be configured by self.\n#\n\n## List of available sinks and their properties.\n\n# org.apache.spark.metrics.sink.ConsoleSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n\n# org.apache.spark.metrics.sink.CSVSink\n# Name: Default: Description:\n# period 10 Poll period\n# unit seconds Units of poll period\n# directory /tmp Where to store CSV files\n\n# org.apache.spark.metrics.sink.GangliaSink\n# Name: Default: Description:\n# host NONE Hostname or multicast group of Ganglia server\n# port NONE Port of Ganglia server(s)\n# period 10 Poll period\n# unit seconds Units of poll period\n# ttl 1 TTL of messages sent by Ganglia\n# mode multicast Ganglia network mode ('unicast' or 'multicast')\n\n# org.apache.spark.metrics.sink.JmxSink\n\n# org.apache.spark.metrics.sink.MetricsServlet\n# Name: Default: Description:\n# path VARIES* Path prefix from the web server root\n# sample false Whether to show entire set of samples for histograms ('false' or 'true')\n#\n# * Default path is /metrics/json for all instances except the master. The master has two paths:\n# /metrics/aplications/json # App information\n# /metrics/master/json # Master information\n\n# org.apache.spark.metrics.sink.GraphiteSink\n# Name: Default: Description:\n# host NONE Hostname of Graphite server\n# port NONE Port of Graphite server\n# period 10 Poll period\n# unit seconds Units of poll period\n# prefix EMPTY STRING Prefix to prepend to metric name \n\n## Examples\n# Enable JmxSink for all instances by class name\n#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink\n\n# Enable ConsoleSink for all instances by class name\n#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink\n\n# Polling period for ConsoleSink\n#*.sink.console.period=10\n\n#*.sink.console.unit=seconds\n\n# Master instance overlap polling period\n#master.sink.console.period=15\n\n#master.sink.console.unit=seconds\n\n# Enable CsvSink for all instances\n#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink\n\n# Polling period for CsvSink\n#*.sink.csv.period=1\n\n#*.sink.csv.unit=minutes\n\n# Polling directory for CsvSink\n#*.sink.csv.directory=/tmp/\n\n# Worker instance overlap polling period\n#worker.sink.csv.period=10\n\n#worker.sink.csv.unit=minutes\n\n# Enable jvm source for instance master, worker, driver and executor\n#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#worker.source.jvm.class=org.apache.spark.metrics.source .JvmSource\n\n#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource\n\n#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource"