ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nc...@apache.org
Subject [35/50] [abbrv] ambari git commit: AMBARI-19834 HDP 3.0 support for Spark with configs, kerberos, widgets, metrics, quicklinks, and themes (dsen)
Date Thu, 23 Mar 2017 13:24:27 GMT
AMBARI-19834 HDP 3.0 support for Spark with configs, kerberos, widgets, metrics, quicklinks, and themes (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1aba730c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1aba730c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1aba730c

Branch: refs/heads/branch-feature-AMBARI-12556
Commit: 1aba730c672ca6728e676ca259ca6ddfb1ac5ae3
Parents: 075cecb
Author: Dmytro Sen <dsen@apache.org>
Authored: Wed Mar 22 18:32:06 2017 +0200
Committer: Dmytro Sen <dsen@apache.org>
Committed: Wed Mar 22 18:32:06 2017 +0200

----------------------------------------------------------------------
 .../common-services/SPARK/2.2.0/alerts.json     |  32 +++
 .../SPARK/2.2.0/configuration/livy-conf.xml     | 114 ++++++++
 .../SPARK/2.2.0/configuration/livy-env.xml      |  97 +++++++
 .../configuration/livy-log4j-properties.xml     |  42 +++
 .../configuration/livy-spark-blacklist.xml      |  52 ++++
 .../2.2.0/configuration/spark-defaults.xml      | 115 ++++++++
 .../SPARK/2.2.0/configuration/spark-env.xml     | 147 ++++++++++
 .../configuration/spark-hive-site-override.xml  |  59 ++++
 .../configuration/spark-log4j-properties.xml    |  46 +++
 .../configuration/spark-logsearch-conf.xml      |  98 +++++++
 .../configuration/spark-metrics-properties.xml  | 165 +++++++++++
 .../spark-thrift-fairscheduler.xml              |  36 +++
 .../configuration/spark-thrift-sparkconf.xml    | 160 +++++++++++
 .../common-services/SPARK/2.2.0/kerberos.json   | 126 +++++++++
 .../common-services/SPARK/2.2.0/metainfo.xml    | 282 +++++++++++++++++++
 .../SPARK/2.2.0/quicklinks/quicklinks.json      |  27 ++
 .../SPARK/2.2.0/scripts/job_history_server.py   | 106 +++++++
 .../SPARK/2.2.0/scripts/livy_server.py          | 149 ++++++++++
 .../SPARK/2.2.0/scripts/livy_service.py         |  48 ++++
 .../SPARK/2.2.0/scripts/params.py               | 266 +++++++++++++++++
 .../SPARK/2.2.0/scripts/service_check.py        |  62 ++++
 .../SPARK/2.2.0/scripts/setup_livy.py           |  88 ++++++
 .../SPARK/2.2.0/scripts/setup_spark.py          | 116 ++++++++
 .../SPARK/2.2.0/scripts/spark_client.py         |  60 ++++
 .../SPARK/2.2.0/scripts/spark_service.py        | 145 ++++++++++
 .../SPARK/2.2.0/scripts/spark_thrift_server.py  |  89 ++++++
 .../SPARK/2.2.0/scripts/status_params.py        |  45 +++
 .../stacks/HDP/3.0/services/SPARK/metainfo.xml  |  32 +++
 28 files changed, 2804 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/alerts.json b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/alerts.json
new file mode 100644
index 0000000..3ccae35
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/alerts.json
@@ -0,0 +1,32 @@
+{
+  "SPARK": {
+    "service": [],
+    "SPARK_JOBHISTORYSERVER": [
+      {
+        "name": "SPARK_JOBHISTORYSERVER_PROCESS",
+        "label": "Spark History Server",
+        "description": "This host-level alert is triggered if the Spark History Server cannot be determined to be up.",
+        "interval": 1,
+        "scope": "HOST",
+        "source": {
+          "type": "PORT",
+          "uri": "{{spark-defaults/spark.history.ui.port}}",
+          "default_port": 18081,
+          "reporting": {
+            "ok": {
+              "text": "TCP OK - {0:.3f}s response on port {1}"
+            },
+            "warning": {
+              "text": "TCP OK - {0:.3f}s response on port {1}",
+              "value": 1.5
+            },
+            "critical": {
+              "text": "Connection failed: {0} to {1}:{2}",
+              "value": 5
+            }
+          }
+        }
+      }
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-conf.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-conf.xml
new file mode 100644
index 0000000..61cbda3
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-conf.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="true">
+    <property>
+        <name>livy.environment</name>
+        <value>production</value>
+        <description>
+            Specifies Livy's environment. May either be "production" or "development". In "development"
+            mode, Livy will enable debugging options, such as reporting possible routes on a 404.
+            defaults to development
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.port</name>
+        <value>8999</value>
+        <description>
+            What port to start the server on. Defaults to 8999.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.session.timeout</name>
+        <value>3600000</value>
+        <description>
+            Time in milliseconds on how long Livy will wait before timing out an idle session.
+            Default is one hour.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.impersonation.enabled</name>
+        <value>true</value>
+        <description>
+            If livy should use proxy users when submitting a job.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.impersonation.enabled</name>
+        <value>true</value>
+        <description>
+            If livy should use proxy users when submitting a job.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.csrf_protection.enabled</name>
+        <value>true</value>
+        <description>
+            Whether to enable csrf protection for livy's rest api.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.spark.master</name>
+        <value>yarn-cluster</value>
+        <description>
+            spark.master property for spark engine
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.repl.enableHiveContext</name>
+        <value>true</value>
+        <description>
+            Whether to enable HiveContext in livy interpreter
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.recovery.mode</name>
+        <value>recovery</value>
+        <description>
+            Recovery mode for livy, either be "off" or "recovery".
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.recovery.state-store</name>
+        <value>filesystem</value>
+        <description>
+            Where Livy should store state for recovery.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy.server.recovery.state-store.url</name>
+        <value>/livy-recovery</value>
+        <description>
+            Where Livy should store state for recovery.
+        </description>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-env.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-env.xml
new file mode 100644
index 0000000..cec8054
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-env.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="true">
+    <property>
+        <name>livy_user</name>
+        <display-name>Livy User</display-name>
+        <value>livy</value>
+        <property-type>USER</property-type>
+        <value-attributes>
+            <type>user</type>
+            <overridable>false</overridable>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy_group</name>
+        <display-name>Livy Group</display-name>
+        <value>livy</value>
+        <property-type>GROUP</property-type>
+        <description>livy group</description>
+        <value-attributes>
+            <type>user</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy_log_dir</name>
+        <value>/var/log/livy</value>
+        <description>Livy Log Dir</description>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>livy_pid_dir</name>
+        <value>/var/run/livy</value>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <property>
+        <name>spark_home</name>
+        <value>/usr/hdp/current/spark-client</value>
+        <value-attributes>
+            <type>directory</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+    <!-- livy-env.sh -->
+    <property>
+        <name>content</name>
+        <description>This is the jinja template for livy-env.sh file</description>
+        <value>
+            #!/usr/bin/env bash
+
+            # - SPARK_HOME      Spark which you would like to use in livy
+            # - SPARK_CONF_DIR  Directory containing the Spark configuration to use.
+            # - HADOOP_CONF_DIR Directory containing the Hadoop / YARN configuration to use.
+            # - LIVY_LOG_DIR    Where log files are stored.  (Default: ${LIVY_HOME}/logs)
+            # - LIVY_PID_DIR    Where the pid file is stored. (Default: /tmp)
+            # - LIVY_SERVER_JAVA_OPTS  Java Opts for running livy server (You can set jvm related setting here, like jvm memory/gc algorithm and etc.)
+            export SPARK_HOME=/usr/hdp/current/spark-client
+            export SPARK_CONF_DIR=/etc/spark/conf
+            export JAVA_HOME={{java_home}}
+            export HADOOP_CONF_DIR=/etc/hadoop/conf
+            export LIVY_LOG_DIR={{livy_log_dir}}
+            export LIVY_PID_DIR={{livy_pid_dir}}
+            export LIVY_SERVER_JAVA_OPTS="-Xmx2g"
+        </value>
+        <value-attributes>
+            <type>content</type>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-log4j-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-log4j-properties.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-log4j-properties.xml
new file mode 100644
index 0000000..41ecd48
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-log4j-properties.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+    <property>
+        <name>content</name>
+        <description>Livy-log4j-Properties</description>
+        <value>
+            # Set everything to be logged to the console
+            log4j.rootCategory=INFO, console
+            log4j.appender.console=org.apache.log4j.ConsoleAppender
+            log4j.appender.console.target=System.err
+            log4j.appender.console.layout=org.apache.log4j.PatternLayout
+            log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+            log4j.logger.org.eclipse.jetty=WARN
+        </value>
+        <value-attributes>
+            <type>content</type>
+            <show-property-name>false</show-property-name>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-spark-blacklist.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-spark-blacklist.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-spark-blacklist.xml
new file mode 100644
index 0000000..553c436
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/livy-spark-blacklist.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+    <property>
+        <name>content</name>
+        <description>spark-blacklist.properties</description>
+        <value>
+            #
+            # Configuration override / blacklist. Defines a list of properties that users are not allowed
+            # to override when starting Spark sessions.
+            #
+            # This file takes a list of property names (one per line). Empty lines and lines starting with "#"
+            # are ignored.
+            #
+
+            # Disallow overriding the master and the deploy mode.
+            spark.master
+            spark.submit.deployMode
+
+            # Disallow overriding the location of Spark cached jars.
+            spark.yarn.jar
+            spark.yarn.jars
+            spark.yarn.archive
+
+            # Don't allow users to override the RSC timeout.
+            livy.rsc.server.idle_timeout
+        </value>
+        <value-attributes>
+            <type>content</type>
+            <show-property-name>false</show-property-name>
+        </value-attributes>
+        <on-ambari-upgrade add="false"/>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-defaults.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-defaults.xml
new file mode 100644
index 0000000..5d52a84
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-defaults.xml
@@ -0,0 +1,115 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="true">
+  <property>
+    <name>spark.yarn.queue</name>
+    <value>default</value>
+    <description>
+      The name of the YARN queue to which the application is submitted.
+    </description>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.driver.extraLibraryPath</name>
+    <value>{{spark_hadoop_lib_native}}</value>
+    <description>
+      Set a special library path to use when launching the driver JVM.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.executor.extraLibraryPath</name>
+    <value>{{spark_hadoop_lib_native}}</value>
+    <description>
+      Set a special library path to use when launching the executor JVM.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.provider</name>
+    <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
+    <description>
+      Name of history provider
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.ui.port</name>
+    <value>18081</value>
+    <description>
+      The port to which the web interface of the History Server binds.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.fs.logDirectory</name>
+    <value>hdfs:///spark-history/</value>
+    <description>
+      Base directory for history spark application log.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.kerberos.principal</name>
+    <value>none</value>
+    <description>
+      Kerberos principal name for the Spark History Server.
+    </description>
+    <property-type>KERBEROS_PRINCIPAL</property-type>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.kerberos.keytab</name>
+    <value>none</value>
+    <description>
+      Location of the kerberos keytab file for the Spark History Server.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.eventLog.enabled</name>
+    <value>true</value>
+    <description>
+        Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.eventLog.dir</name>
+    <value>hdfs:///spark-history/</value>
+    <description>
+        Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.yarn.historyServer.address</name>
+    <value>{{spark_history_server_host}}:{{spark_history_ui_port}}</value>
+    <description>The address of the Spark history server (i.e. host.com:18081). The address should not contain a scheme (http://). Defaults to not being set since the history server is an optional service. This address is given to the YARN ResourceManager when the Spark application finishes to link the application from the ResourceManager UI to the Spark history server UI.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-env.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-env.xml
new file mode 100644
index 0000000..f980f9a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-env.xml
@@ -0,0 +1,147 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="true">
+  <property>
+    <name>spark_user</name>
+    <display-name>Spark User</display-name>
+    <value>spark</value>
+    <property-type>USER</property-type>
+    <value-attributes>
+      <type>user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_group</name>
+    <display-name>Spark Group</display-name>
+    <value>spark</value>
+    <property-type>GROUP</property-type>
+    <description>spark group</description>
+    <value-attributes>
+      <type>user</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_log_dir</name>
+    <value>/var/log/spark</value>
+    <description>Spark Log Dir</description>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_pid_dir</name>
+    <value>/var/run/spark</value>
+    <value-attributes>
+      <type>directory</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_daemon_memory</name>
+    <value>1024</value>
+    <description>Memory for Master, Worker and history server (default: 1G)</description>
+    <value-attributes>
+      <type>int</type>
+      <unit>MB</unit>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hive_kerberos_keytab</name>
+    <value>{{hive_kerberos_keytab}}</value>
+    <description>hive keytab for spark thirft server</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hive_kerberos_principal</name>
+    <value>{{hive_kerberos_principal}}</value>
+    <description>hive principal for spark thrift server</description>
+    <property-type>KERBEROS_PRINCIPAL</property-type>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <!-- spark-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for spark-env.sh file</description>
+    <value>
+#!/usr/bin/env bash
+
+# This file is sourced when running various Spark programs.
+# Copy it as spark-env.sh and edit that to configure Spark for your site.
+
+# Options read in YARN client mode
+#SPARK_EXECUTOR_INSTANCES="2" #Number of workers to start (Default: 2)
+#SPARK_EXECUTOR_CORES="1" #Number of cores for the workers (Default: 1).
+#SPARK_EXECUTOR_MEMORY="1G" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
+#SPARK_DRIVER_MEMORY="512M" #Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)
+#SPARK_YARN_APP_NAME="spark" #The name of your application (Default: Spark)
+#SPARK_YARN_QUEUE="default" #The hadoop queue to use for allocation requests (Default: default)
+#SPARK_YARN_DIST_FILES="" #Comma separated list of files to be distributed with the job.
+#SPARK_YARN_DIST_ARCHIVES="" #Comma separated list of archives to be distributed with the job.
+
+# Generic options for the daemons used in the standalone deploy mode
+
+# Alternate conf dir. (Default: ${SPARK_HOME}/conf)
+export SPARK_CONF_DIR=${SPARK_CONF_DIR:-{{spark_home}}/conf}
+
+# Where log files are stored.(Default:${SPARK_HOME}/logs)
+#export SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs
+export SPARK_LOG_DIR={{spark_log_dir}}
+
+# Where the pid file is stored. (Default: /tmp)
+export SPARK_PID_DIR={{spark_pid_dir}}
+
+#Memory for Master, Worker and history server (default: 1024MB)
+export SPARK_DAEMON_MEMORY={{spark_daemon_memory}}m
+
+# A string representing this instance of spark.(Default: $USER)
+SPARK_IDENT_STRING=$USER
+
+# The scheduling priority for daemons. (Default: 0)
+SPARK_NICENESS=0
+
+export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
+
+# The java implementation to use.
+export JAVA_HOME={{java_home}}
+
+</value>
+    <value-attributes>
+      <type>content</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark_thrift_cmd_opts</name>
+    <description>additional spark thrift server commandline options</description>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-hive-site-override.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-hive-site-override.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-hive-site-override.xml
new file mode 100644
index 0000000..f189952
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-hive-site-override.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<configuration supports_final="true">
+  <property>
+    <name>hive.server2.enable.doAs</name>
+    <value>false</value>
+    <description>
+      Disable impersonation in Hive Server 2.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hive.metastore.client.socket.timeout</name>
+    <value>1800</value>
+    <description>MetaStore Client socket timeout in seconds</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hive.metastore.client.connect.retry.delay</name>
+    <value>5</value>
+    <description>
+      Expects a time value - number of seconds for the client to wait between consecutive connection attempts
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>10016</value>
+    <description>
+      TCP port number to listen on, default 10015.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>hive.server2.transport.mode</name>
+    <value>binary</value>
+    <description>
+      Expects one of [binary, http].
+      Transport mode of HiveServer2.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-log4j-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-log4j-properties.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-log4j-properties.xml
new file mode 100644
index 0000000..3253750
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-log4j-properties.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>content</name>
+    <description>Spark-log4j-Properties</description>
+    <value>
+# Set everything to be logged to the console
+log4j.rootCategory=INFO, console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.eclipse.jetty=WARN
+log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-logsearch-conf.xml
new file mode 100644
index 0000000..6d3af8a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-logsearch-conf.xml
@@ -0,0 +1,98 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Spark</value>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>SPARK_JOBHISTORYSERVER:spark_jobhistory_server;SPARK_THRIFTSERVER:spark_thriftserver;LIVY_SERVER:livy_server</value>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+   "input":[
+      {
+       "type":"spark_jobhistory_server",
+       "rowtype":"service",
+       "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
+     },
+     {
+       "type":"spark_thriftserver",
+       "rowtype":"service",
+       "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
+     },
+     {
+       "type":"livy_server",
+       "rowtype":"service",
+       "path":"{{default('/configurations/livy-env/livy_log_dir', '/var/log/livy')}}/livy-livy-server.out"
+     }
+   ],
+   "filter":[
+      {
+          "filter":"grok",
+          "conditions":{
+            "fields":{
+              "type":[
+                "spark_jobhistory_server",
+                "spark_thriftserver",
+                "livy_server"
+              ]
+             }
+          },
+          "log4j_format":"",
+          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
+          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
+          "post_map_values":{
+            "logtime":{
+              "map_date":{
+                "target_date_pattern":"yy/MM/dd HH:mm:ss"
+              }
+             },
+            "level":{
+              "map_fieldvalue":{
+                "pre_value":"WARNING",
+                "post_value":"WARN"
+              }
+             }
+           }
+      }
+   ]
+}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-metrics-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-metrics-properties.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-metrics-properties.xml
new file mode 100644
index 0000000..d543afb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-metrics-properties.xml
@@ -0,0 +1,165 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="true">
+  <property>
+    <name>content</name>
+    <description>Spark-metrics-properties</description>
+    <value>
+# syntax: [instance].sink|source.[name].[options]=[value]
+
+# This file configures Spark's internal metrics system. The metrics system is
+# divided into instances which correspond to internal components.
+# Each instance can be configured to report its metrics to one or more sinks.
+# Accepted values for [instance] are "master", "worker", "executor", "driver",
+# and "applications". A wild card "*" can be used as an instance name, in
+# which case all instances will inherit the supplied property.
+#
+# Within an instance, a "source" specifies a particular set of grouped metrics.
+# there are two kinds of sources:
+# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will
+# collect a Spark component's internal state. Each instance is paired with a
+# Spark source that is added automatically.
+# 2. Common sources, like JvmSource, which will collect low level state.
+# These can be added through configuration options and are then loaded
+# using reflection.
+#
+# A "sink" specifies where metrics are delivered to. Each instance can be
+# assigned one or more sinks.
+#
+# The sink|source field specifies whether the property relates to a sink or
+# source.
+#
+# The [name] field specifies the name of source or sink.
+#
+# The [options] field is the specific property of this source or sink. The
+# source or sink is responsible for parsing this property.
+#
+# Notes:
+# 1. To add a new sink, set the "class" option to a fully qualified class
+# name (see examples below).
+# 2. Some sinks involve a polling period. The minimum allowed polling period
+# is 1 second.
+# 3. Wild card properties can be overridden by more specific properties.
+# For example, master.sink.console.period takes precedence over
+# *.sink.console.period.
+# 4. A metrics specific configuration
+# "spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties" should be
+# added to Java properties using -Dspark.metrics.conf=xxx if you want to
+# customize metrics system. You can also put the file in ${SPARK_HOME}/conf
+# and it will be loaded automatically.
+# 5. MetricsServlet is added by default as a sink in master, worker and client
+# driver, you can send http request "/metrics/json" to get a snapshot of all the
+# registered metrics in json format. For master, requests "/metrics/master/json" and
+# "/metrics/applications/json" can be sent seperately to get metrics snapshot of
+# instance master and applications. MetricsServlet may not be configured by self.
+#
+
+## List of available sinks and their properties.
+
+# org.apache.spark.metrics.sink.ConsoleSink
+# Name: Default: Description:
+# period 10 Poll period
+# unit seconds Units of poll period
+
+# org.apache.spark.metrics.sink.CSVSink
+# Name: Default: Description:
+# period 10 Poll period
+# unit seconds Units of poll period
+# directory /tmp Where to store CSV files
+
+# org.apache.spark.metrics.sink.GangliaSink
+# Name: Default: Description:
+# host NONE Hostname or multicast group of Ganglia server
+# port NONE Port of Ganglia server(s)
+# period 10 Poll period
+# unit seconds Units of poll period
+# ttl 1 TTL of messages sent by Ganglia
+# mode multicast Ganglia network mode ('unicast' or 'multicast')
+
+# org.apache.spark.metrics.sink.JmxSink
+
+# org.apache.spark.metrics.sink.MetricsServlet
+# Name: Default: Description:
+# path VARIES* Path prefix from the web server root
+# sample false Whether to show entire set of samples for histograms ('false' or 'true')
+#
+# * Default path is /metrics/json for all instances except the master. The master has two paths:
+# /metrics/aplications/json # App information
+# /metrics/master/json # Master information
+
+# org.apache.spark.metrics.sink.GraphiteSink
+# Name: Default: Description:
+# host NONE Hostname of Graphite server
+# port NONE Port of Graphite server
+# period 10 Poll period
+# unit seconds Units of poll period
+# prefix EMPTY STRING Prefix to prepend to metric name
+
+## Examples
+# Enable JmxSink for all instances by class name
+#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink
+
+# Enable ConsoleSink for all instances by class name
+#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink
+
+# Polling period for ConsoleSink
+#*.sink.console.period=10
+
+#*.sink.console.unit=seconds
+
+# Master instance overlap polling period
+#master.sink.console.period=15
+
+#master.sink.console.unit=seconds
+
+# Enable CsvSink for all instances
+#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink
+
+# Polling period for CsvSink
+#*.sink.csv.period=1
+
+#*.sink.csv.unit=minutes
+
+# Polling directory for CsvSink
+#*.sink.csv.directory=/tmp/
+
+# Worker instance overlap polling period
+#worker.sink.csv.period=10
+
+#worker.sink.csv.unit=minutes
+
+# Enable jvm source for instance master, worker, driver and executor
+#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource
+
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-fairscheduler.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-fairscheduler.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-fairscheduler.xml
new file mode 100644
index 0000000..ab20354
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-fairscheduler.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration supports_final="true">
+  <property>
+    <name>fairscheduler_content</name>
+    <description>This is the jinja template for spark-thrift-fairscheduler.xml file.</description>
+    <value>&lt;?xml version="1.0"?&gt;
+            &lt;allocations&gt;
+            &lt;pool name="default"&gt;
+            &lt;schedulingMode&gt;FAIR&lt;/schedulingMode&gt;
+            &lt;weight&gt;1&lt;/weight&gt;
+            &lt;minShare&gt;2&lt;/minShare&gt;
+            &lt;/pool&gt;
+            &lt;/allocations&gt;
+        </value>
+    <value-attributes>
+      <type>content</type>
+    </value-attributes>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-sparkconf.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-sparkconf.xml
new file mode 100644
index 0000000..7155eb9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/configuration/spark-thrift-sparkconf.xml
@@ -0,0 +1,160 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="true">
+  <property>
+    <name>spark.yarn.queue</name>
+    <value>default</value>
+    <description>
+      The name of the YARN queue to which the application is submitted.
+    </description>
+    <depends-on>
+      <property>
+        <type>capacity-scheduler</type>
+        <name>yarn.scheduler.capacity.root.queues</name>
+      </property>
+    </depends-on>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.driver.extraLibraryPath</name>
+    <value>{{spark_hadoop_lib_native}}</value>
+    <description>
+       Set a special library path to use when launching the driver JVM.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.executor.extraLibraryPath</name>
+    <value>{{spark_hadoop_lib_native}}</value>
+    <description>
+      Set a special library path to use when launching the executor JVM.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.provider</name>
+    <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
+    <description>Name of history provider class</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.history.fs.logDirectory</name>
+    <value>{{spark_history_dir}}</value>
+    <final>true</final>
+    <description>
+      Base directory for history spark application log. It is the same value
+      as in spark-defaults.xml.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.eventLog.enabled</name>
+    <value>true</value>
+    <final>true</final>
+    <description>
+      Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.eventLog.dir</name>
+    <value>{{spark_history_dir}}</value>
+    <final>true</final>
+    <description>
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true. It is the same value
+      as in spark-defaults.xml.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.master</name>
+    <value>{{spark_thrift_master}}</value>
+    <description>
+      The deploying mode of spark application, by default it is yarn-client for thrift-server but local mode for there's
+      only one nodemanager.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.scheduler.allocation.file</name>
+    <value>{{spark_conf}}/spark-thrift-fairscheduler.xml</value>
+    <description>
+      Scheduler configuration file for thriftserver.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.scheduler.mode</name>
+    <value>FAIR</value>
+    <description>
+      The scheduling mode between jobs submitted to the same SparkContext.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.shuffle.service.enabled</name>
+    <value>true</value>
+    <description>
+      Enables the external shuffle service.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.hadoop.cacheConf</name>
+    <value>false</value>
+    <description>
+      Specifies whether HadoopRDD caches the Hadoop configuration object
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.dynamicAllocation.enabled</name>
+    <value>true</value>
+    <description>
+      Whether to use dynamic resource allocation, which scales the number of executors registered with this application up and down based on the workload.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.dynamicAllocation.initialExecutors</name>
+    <value>0</value>
+    <description>
+      Initial number of executors to run if dynamic allocation is enabled.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.dynamicAllocation.maxExecutors</name>
+    <value>10</value>
+    <description>
+      Upper bound for the number of executors if dynamic allocation is enabled.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
+    <name>spark.dynamicAllocation.minExecutors</name>
+    <value>0</value>
+    <description>
+      Lower bound for the number of executors if dynamic allocation is enabled.
+    </description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/kerberos.json
new file mode 100644
index 0000000..9912af1
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/kerberos.json
@@ -0,0 +1,126 @@
+{
+  "services": [
+    {
+      "name": "SPARK",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "sparkuser",
+          "principal": {
+            "value": "${spark-env/spark_user}${principal_suffix}@${realm}",
+            "type" : "user",
+            "configuration": "spark-defaults/spark.history.kerberos.principal",
+            "local_username" : "${spark-env/spark_user}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/spark.headless.keytab",
+            "owner": {
+              "name": "${spark-env/spark_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+              "access": ""
+            },
+            "configuration": "spark-defaults/spark.history.kerberos.keytab"
+          }
+        }
+      ],
+      "configurations": [
+        {
+          "spark-defaults": {
+            "spark.history.kerberos.enabled": "true"
+          }
+        },
+        {
+          "spark-thrift-sparkconf": {
+            "spark.yarn.keytab": "${spark-env/hive_kerberos_keytab}",
+            "spark.yarn.principal": "${spark-env/hive_kerberos_principal}"
+          }
+        },
+        {
+          "livy-conf": {
+            "livy.server.auth.type": "kerberos",
+            "livy.impersonation.enabled": "true",
+            "livy.superusers": "zeppelin${principal_suffix}"
+          }
+        },
+        {
+          "core-site": {
+            "hadoop.proxyuser.${livy-env/livy_user}.groups": "*",
+            "hadoop.proxyuser.${livy-env/livy_user}.hosts": "*"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "SPARK_JOBHISTORYSERVER",
+          "identities": [
+            {
+              "name": "hdfs",
+              "reference": "/HDFS/NAMENODE/hdfs"
+            }
+          ]
+        },
+        {
+          "name": "SPARK_CLIENT"
+        },
+        {
+          "name": "SPARK_THRIFTSERVER",
+          "identities": [
+            {
+              "name": "hdfs",
+              "reference": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "hive_server_hive",
+              "reference": "/HIVE/HIVE_SERVER/hive_server_hive"
+            }
+          ]
+        },
+        {
+          "name": "LIVY_SERVER",
+          "identities": [
+            {
+              "name": "hdfs",
+              "reference": "/HDFS/NAMENODE/hdfs"
+            },
+            {
+              "name": "livyuser",
+              "principal": {
+                "value": "${livy-env/livy_user}/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "livy-conf/livy.server.launch.kerberos.principal",
+                "local_username": "${livy-env/livy_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/livy.service.keytab",
+                "owner": {
+                  "name": "${livy-env/livy_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "livy-conf/livy.server.launch.kerberos.keytab"
+              }
+            },
+            {
+              "name": "livy_spnego",
+              "reference": "/spnego",
+              "principal": {
+                "configuration": "livy-conf/livy.server.auth.kerberos.principal"
+              },
+              "keytab": {
+                "configuration": "livy-conf/livy.server.auth.kerberos.keytab"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/metainfo.xml
new file mode 100644
index 0000000..25a6855
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/metainfo.xml
@@ -0,0 +1,282 @@
+<?xml version="1.0"?>
+<!--Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+        <name>SPARK</name>
+        <displayName>Spark</displayName>
+        <comment>Apache Spark is a fast and general engine for large-scale data processing.</comment>
+        <version>2.2.0</version>
+      <components>
+        <component>
+          <name>SPARK_JOBHISTORYSERVER</name>
+          <displayName>Spark History Server</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+               <scope>host</scope>
+               <auto-deploy>
+                 <enabled>true</enabled>
+               </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+             </auto-deploy>
+           </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/job_history_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <logs>
+            <log>
+              <logId>spark_jobhistory_server</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+        </component>
+        <component>
+          <name>SPARK_THRIFTSERVER</name>
+          <displayName>Spark Thrift Server</displayName>
+          <category>SLAVE</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+               <scope>host</scope>
+               <auto-deploy>
+                 <enabled>true</enabled>
+               </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HIVE/HIVE_METASTORE</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/spark_thrift_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <logs>
+            <log>
+              <logId>spark_thriftserver</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+        </component>
+        <component>
+          <name>SPARK_CLIENT</name>
+          <displayName>Spark Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>1+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
+               <scope>host</scope>
+               <auto-deploy>
+                 <enabled>true</enabled>
+               </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+             </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/spark_client.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>spark-log4j.properties</fileName>
+              <dictionaryName>spark-log4j-properties</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>spark-env.sh</fileName>
+              <dictionaryName>spark-env</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>spark-metrics.properties</fileName>
+              <dictionaryName>spark-metrics-properties</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>properties</type>
+              <fileName>spark-defaults.conf</fileName>
+              <dictionaryName>spark-defaults</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
+        <component>
+          <name>LIVY_SERVER</name>
+          <displayName>Livy for Spark Server</displayName>
+          <category>SLAVE</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <dependencies>
+            <dependency>
+              <name>SPARK/SPARK_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/livy_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <logs>
+            <log>
+              <logId>livy_server</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+        </component>
+      </components>
+
+      <configuration-dependencies>
+        <config-type>spark-defaults</config-type>
+        <config-type>spark-env</config-type>
+        <config-type>spark-log4j-properties</config-type>
+        <config-type>spark-metrics-properties</config-type>
+        <config-type>spark-thrift-sparkconf</config-type>
+        <config-type>spark-hive-site-override</config-type>
+        <config-type>spark-thrift-fairscheduler</config-type>
+        <config-type>livy-conf</config-type>
+        <config-type>livy-env</config-type>
+        <config-type>livy-log4j-properties</config-type>
+        <config-type>livy-spark-blacklist</config-type>
+      </configuration-dependencies>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>HDFS</service>
+        <service>YARN</service>
+        <service>HIVE</service>
+      </requiredServices>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>spark_${stack_version}</name>
+            </package>
+            <package>
+              <name>spark_${stack_version}-python</name>
+            </package>
+            <package>
+              <name>livy_${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>spark-${stack_version}</name>
+            </package>
+            <package>
+              <name>spark-${stack_version}-python</name>
+            </package>
+            <package>
+              <name>livy-${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <quickLinksConfigurations>
+        <quickLinksConfiguration>
+          <fileName>quicklinks.json</fileName>
+          <default>true</default>
+        </quickLinksConfiguration>
+      </quickLinksConfigurations>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/quicklinks/quicklinks.json b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/quicklinks/quicklinks.json
new file mode 100644
index 0000000..4639272
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/quicklinks/quicklinks.json
@@ -0,0 +1,27 @@
+{
+  "name": "default",
+  "description": "default quick links configuration",
+  "configuration": {
+    "protocol":
+    {
+      "type":"HTTP_ONLY"
+    },
+
+    "links": [
+      {
+        "name": "spark_history_server_ui",
+        "label": "Spark History Server UI",
+        "requires_user_name": "false",
+        "url": "%@://%@:%@",
+        "port":{
+          "http_property": "spark.history.ui.port",
+          "http_default_port": "18081",
+          "https_property": "spark.history.ui.port",
+          "https_default_port": "18081",
+          "regex": "^(\\d+)$",
+          "site": "spark-defaults"
+        }
+      }
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/job_history_server.py
new file mode 100644
index 0000000..16a2224
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/job_history_server.py
@@ -0,0 +1,106 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+import os
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.constants import StackFeature
+from resource_management.core.logger import Logger
+from resource_management.core import shell
+from setup_spark import *
+from spark_service import spark_service
+
+
+class JobHistoryServer(Script):
+
+  def install(self, env):
+    import params
+    env.set_params(params)
+    
+    self.install_packages(env)
+    
+  def configure(self, env, upgrade_type=None, config_dir=None):
+    import params
+    env.set_params(params)
+    
+    setup_spark(env, 'server', upgrade_type=upgrade_type, action = 'config')
+    
+  def start(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+    
+    self.configure(env)
+    spark_service('jobhistoryserver', upgrade_type=upgrade_type, action='start')
+
+  def stop(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+    
+    spark_service('jobhistoryserver', upgrade_type=upgrade_type, action='stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+
+    check_process_status(status_params.spark_history_server_pid_file)
+    
+
+  def get_component_name(self):
+    return "spark-historyserver"
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
+      Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
+      conf_select.select(params.stack_name, "spark", params.version)
+      stack_select.select("spark-historyserver", params.version)
+
+      # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
+      # need to copy the tarball, otherwise, copy it.
+      if params.version and check_stack_feature(StackFeature.TEZ_FOR_SPARK, params.version):
+        resource_created = copy_to_hdfs(
+          "tez",
+          params.user_group,
+          params.hdfs_user,
+          skip=params.sysprep_skip_copy_tarballs_hdfs)
+        if resource_created:
+          params.HdfsResource(None, action="execute")
+          
+  def get_log_folder(self):
+    import params
+    return params.spark_log_dir
+  
+  def get_user(self):
+    import params
+    return params.spark_user
+
+  def get_pid_files(self):
+    import status_params
+    return [status_params.spark_history_server_pid_file]
+
+if __name__ == "__main__":
+  JobHistoryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_server.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_server.py
new file mode 100644
index 0000000..b09d9a9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_server.py
@@ -0,0 +1,149 @@
+#!/usr/bin/python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.constants import StackFeature
+from resource_management.core.exceptions import Fail
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.providers.hdfs_resource import WebHDFSUtil
+from resource_management.libraries.providers.hdfs_resource import HdfsResourceProvider
+from resource_management import is_empty
+from resource_management import shell
+from resource_management.libraries.functions.decorator import retry
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import conf_select, stack_select
+
+from livy_service import livy_service
+from setup_livy import setup_livy
+
+class LivyServer(Script):
+
+  def install(self, env):
+    import params
+    env.set_params(params)
+
+    self.install_packages(env)
+
+  def configure(self, env, upgrade_type=None, config_dir=None):
+    import params
+    env.set_params(params)
+
+    setup_livy(env, 'server', upgrade_type=upgrade_type, action = 'config')
+
+  def start(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    if params.has_ats and params.has_livyserver:
+      Logger.info("Verifying DFS directories where ATS stores time line data for active and completed applications.")
+      self.wait_for_dfs_directories_created([params.entity_groupfs_store_dir, params.entity_groupfs_active_dir])
+
+    self.configure(env)
+    livy_service('server', upgrade_type=upgrade_type, action='start')
+
+  def stop(self, env, upgrade_type=None):
+    import params
+    env.set_params(params)
+
+    livy_service('server', upgrade_type=upgrade_type, action='stop')
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+
+    check_process_status(status_params.livy_server_pid_file)
+
+  #  TODO move out and compose with similar method in resourcemanager.py
+  def wait_for_dfs_directories_created(self, dirs):
+    import params
+
+    ignored_dfs_dirs = HdfsResourceProvider.get_ignored_resources_list(params.hdfs_resource_ignore_file)
+
+    if params.security_enabled:
+      Execute(format("{kinit_path_local} -kt {livy_kerberos_keytab} {livy_principal}"),
+              user=params.livy_user
+              )
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"),
+              user=params.hdfs_user
+              )
+
+    for dir_path in dirs:
+        self.wait_for_dfs_directory_created(dir_path, ignored_dfs_dirs)
+
+  def get_pid_files(self):
+    import status_params
+    return [status_params.livy_server_pid_file]
+
+
+  @retry(times=8, sleep_time=20, backoff_factor=1, err_class=Fail)
+  def wait_for_dfs_directory_created(self, dir_path, ignored_dfs_dirs):
+    import params
+
+    if not is_empty(dir_path):
+      dir_path = HdfsResourceProvider.parse_path(dir_path)
+
+      if dir_path in ignored_dfs_dirs:
+        Logger.info("Skipping DFS directory '" + dir_path + "' as it's marked to be ignored.")
+        return
+
+      Logger.info("Verifying if DFS directory '" + dir_path + "' exists.")
+
+      dir_exists = None
+
+      if WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.default_fs):
+        # check with webhdfs is much faster than executing hdfs dfs -test
+        util = WebHDFSUtil(params.hdfs_site, params.hdfs_user, params.security_enabled)
+        list_status = util.run_command(dir_path, 'GETFILESTATUS', method='GET', ignore_status_codes=['404'], assertable_result=False)
+        dir_exists = ('FileStatus' in list_status)
+      else:
+        # have to do time expensive hdfs dfs -d check.
+        dfs_ret_code = shell.call(format("hdfs --config {hadoop_conf_dir} dfs -test -d " + dir_path), user=params.livy_user)[0]
+        dir_exists = not dfs_ret_code #dfs -test -d returns 0 in case the dir exists
+
+      if not dir_exists:
+        raise Fail("DFS directory '" + dir_path + "' does not exist !")
+      else:
+        Logger.info("DFS directory '" + dir_path + "' exists.")
+
+  def get_component_name(self):
+    return "livy-server"
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    import params
+
+    env.set_params(params)
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
+      Logger.info("Executing Livy Server Stack Upgrade pre-restart")
+      conf_select.select(params.stack_name, "spark", params.version)
+      stack_select.select("livy-server", params.version)
+
+  def get_log_folder(self):
+    import params
+    return params.livy_log_dir
+
+  def get_user(self):
+    import params
+    return params.livy_user
+if __name__ == "__main__":
+    LivyServer().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/1aba730c/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_service.py b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_service.py
new file mode 100644
index 0000000..45201db
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/2.2.0/scripts/livy_service.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from resource_management.libraries.functions import format
+from resource_management.core.resources.system import File, Execute
+import threading
+
+def livy_service(name, upgrade_type=None, action=None):
+  import params
+
+  if action == 'start':
+    livyserver_no_op_test = format(
+      'ls {livy_server_pid_file} >/dev/null 2>&1 && ps -p `cat {livy_server_pid_file}` >/dev/null 2>&1')
+    Execute(format('{livy_server_start}'),
+            user=params.livy_user,
+            environment={'JAVA_HOME': params.java_home},
+            not_if=livyserver_no_op_test
+    )
+
+  elif action == 'stop':
+    Execute(format('{livy_server_stop}'),
+            user=params.livy_user,
+            environment={'JAVA_HOME': params.java_home}
+            )
+    File(params.livy_server_pid_file,
+        action="delete"
+        )
+
+
+
+


Mime
View raw message