ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jlun...@apache.org
Subject [1/3] ambari git commit: AMBARI-16293: Spark service fails to start (jluniya)
Date Fri, 06 May 2016 22:59:37 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 766ad0a70 -> bd6eecce6


http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-fairscheduler.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-fairscheduler.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-fairscheduler.xml
new file mode 100644
index 0000000..2dda4bb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-fairscheduler.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration supports_final="true">
+    <property>
+        <name>fairscheduler_content</name>
+        <description>This is the jinja template for spark-thrift-fairscheduler.xml
file.</description>
+        <value>&lt;?xml version="1.0"?&gt;
+            &lt;allocations&gt;
+            &lt;pool name="default"&gt;
+            &lt;schedulingMode&gt;FAIR&lt;/schedulingMode&gt;
+            &lt;weight&gt;1&lt;/weight&gt;
+            &lt;minShare&gt;2&lt;/minShare&gt;
+            &lt;/pool&gt;
+            &lt;/allocations&gt;
+        </value>
+        <value-attributes>
+            <type>content</type>
+        </value-attributes>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-sparkconf.xml
b/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-sparkconf.xml
new file mode 100644
index 0000000..4f2563b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.6.0/configuration/spark-thrift-sparkconf.xml
@@ -0,0 +1,193 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="true">
+
+  <property>
+    <name>spark.yarn.executor.memoryOverhead</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.driver.memoryOverhead</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.scheduler.heartbeat.interval-ms</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.max.executor.failures</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.containerLauncherMaxThreads</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.submit.file.replication</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.preserve.staging.files</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.max.executor.failures</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.yarn.services</name>
+    <deleted>true</deleted>
+  </property>
+
+  <property>
+    <name>spark.history.provider</name>
+    <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
+    <description>Name of history provider class</description>
+  </property>
+
+  <property>
+    <name>spark.history.fs.logDirectory</name>
+    <value>{{spark_history_dir}}</value>
+    <final>true</final>
+    <description>
+      Base directory for history spark application log. It is the same value
+      as in spark-defaults.xml.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.eventLog.enabled</name>
+    <value>true</value>
+    <final>true</final>
+    <description>
+      Whether to log Spark events, useful for reconstructing the Web UI after the application
has finished.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.eventLog.dir</name>
+    <value>{{spark_history_dir}}</value>
+    <final>true</final>
+    <description>
+      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
It is the same value
+      as in spark-defaults.xml.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.master</name>
+    <value>{{spark_thrift_master}}</value>
+    <description>
+      The deploying mode of spark application, by default it is yarn-client for thrift-server
but local mode for there's
+      only one nodemanager.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.scheduler.allocation.file</name>
+    <value>{{spark_conf}}/spark-thrift-fairscheduler.xml</value>
+    <description>
+      Scheduler configuration file for thriftserver.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.scheduler.mode</name>
+    <value>FAIR</value>
+    <description>
+      The scheduling mode between jobs submitted to the same SparkContext.
+    </description>
+  </property>
+  
+  <property>
+    <name>spark.shuffle.service.enabled</name>
+    <value>true</value>
+    <description>
+      Enables the external shuffle service.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.hadoop.cacheConf</name>
+    <value>false</value>
+    <description>
+      Specifies whether HadoopRDD caches the Hadoop configuration object
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.enabled</name>
+    <value>true</value>
+    <description>
+      Whether to use dynamic resource allocation, which scales the number of executors registered
with this application up and down based on the workload.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.initialExecutors</name>
+    <value>0</value>
+    <description>
+      Initial number of executors to run if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.maxExecutors</name>
+    <value>10</value>
+    <description>
+      Upper bound for the number of executors if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.dynamicAllocation.minExecutors</name>
+    <value>0</value>
+    <description>
+      Lower bound for the number of executors if dynamic allocation is enabled.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.yarn.am.memory</name>
+    <value>512m</value>
+    <description>
+      Amount of memory to use for the YARN Application Master in client mode.
+    </description>
+  </property>
+
+  <property>
+    <name>spark.executor.memory</name>
+    <value>1g</value>
+    <description>
+      Amount of memory to use per executor process.
+    </description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/common-services/SPARK/1.6.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.6.0/metainfo.xml b/ambari-server/src/main/resources/common-services/SPARK/1.6.0/metainfo.xml
new file mode 100644
index 0000000..2bd79d5
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.6.0/metainfo.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<metainfo>
+    <schemaVersion>2.0</schemaVersion>
+    <services>
+        <service>
+          <name>SPARK</name>
+          <extends>common-services/SPARK/1.5.2</extends>
+          <version>1.6.0</version>
+          <configuration-dependencies>
+            <config-type>spark-defaults</config-type>
+            <config-type>spark-env</config-type>
+            <config-type>spark-log4j-properties</config-type>
+            <config-type>spark-metrics-properties</config-type>
+            <config-type>spark-thrift-sparkconf</config-type>
+            <config-type>spark-hive-site-override</config-type>
+            <config-type>spark-thrift-fairscheduler</config-type>
+          </configuration-dependencies>
+          <requiredServices>
+            <service>HDFS</service>
+            <service>YARN</service>
+            <service>HIVE</service>
+          </requiredServices>
+        </service>
+    </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/metainfo.xml
index c0b43c2..8f2ca38 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/metainfo.xml
@@ -23,38 +23,8 @@
   <services>
     <service>
       <name>SPARK</name>
-      <extends>common-services/SPARK/1.2.0.2.2</extends>
+      <extends>common-services/SPARK/1.2.1</extends>
       <version>1.2.1.2.2</version>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
-          <packages>
-            <package>
-              <name>spark_${stack_version}</name>
-            </package>
-            <package>
-              <name>spark_${stack_version}-python</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>debian7,ubuntu12,ubuntu14</osFamily>
-          <packages>
-            <package>
-              <name>spark-${stack_version}</name>
-            </package>
-            <package>
-              <name>spark-${stack_version}-python</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <quickLinksConfigurations>
-        <quickLinksConfiguration>
-          <fileName>quicklinks.json</fileName>
-          <default>true</default>
-        </quickLinksConfiguration>
-      </quickLinksConfigurations>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
deleted file mode 100644
index 685665a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
-  "name": "default",
-  "description": "default quick links configuration",
-  "configuration": {
-    "protocol":
-    {
-      "type":"HTTP_ONLY"
-    },
-
-    "links": [
-      {
-        "name": "spark_history_server_ui",
-        "label": "Spark History Server UI",
-        "requires_user_name": "false",
-        "url": "%@://%@:%@",
-        "port":{
-          "http_property": "spark.history.ui.port",
-          "http_default_port": "18080",
-          "https_property": "spark.history.ui.port",
-          "https_default_port": "18080",
-          "regex": "^(\\d+)$",
-          "site": "spark-defaults"
-        }
-      }
-    ]
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
deleted file mode 100644
index 7472908..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-hive-site-override.xml
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<configuration supports_final="true">
-  <property>
-    <name>hive.server2.enable.doAs</name>
-    <value>false</value>
-    <description>
-      Disable impersonation in Hive Server 2.
-    </description>
-  </property>
-  <property>
-    <name>hive.metastore.client.socket.timeout</name>
-    <value>1800</value>
-    <description>MetaStore Client socket timeout in seconds</description>
-  </property>
-  <property>
-  	<name>hive.metastore.client.connect.retry.delay</name>
-  	<value>5</value>
-    <description>
-      Expects a time value - number of seconds for the client to wait between consecutive
connection attempts
-    </description>  	
-  </property>
-  <property>
-    <name>hive.server2.thrift.port</name>
-    <value>10015</value>
-    <description>
-      TCP port number to listen on, default 10015.
-    </description>
-  </property>
-  <property>
-    <name>hive.server2.transport.mode</name>
-    <value>binary</value>
-    <description>
-      Expects one of [binary, http].
-      Transport mode of HiveServer2.
-    </description>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-thrift-sparkconf.xml
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-thrift-sparkconf.xml
deleted file mode 100644
index b5742ea..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ /dev/null
@@ -1,100 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>spark.yarn.executor.memoryOverhead</name>
-    <value>384</value>
-    <description>
-      The amount of off heap memory (in megabytes) to be allocated per executor.
-      This is memory that accounts for things like VM overheads, interned strings,
-      other native overheads, etc.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.driver.memoryOverhead</name>
-    <value>384</value>
-    <description>
-      The amount of off heap memory (in megabytes) to be allocated per driver.
-      This is memory that accounts for things like VM overheads, interned strings,
-      other native overheads, etc.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.scheduler.heartbeat.interval-ms</name>
-    <value>5000</value>
-    <description>
-      The interval in ms in which the Spark application master heartbeats into the YARN ResourceManager.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.max.executor.failures</name>
-    <value>3</value>
-    <description>
-      The maximum number of executor failures before failing the application.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.queue</name>
-    <value>default</value>
-    <description>
-      The name of the YARN queue to which the application is submitted.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.containerLauncherMaxThreads</name>
-    <value>25</value>
-    <description>
-      The maximum number of threads to use in the application master for launching executor
containers.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.submit.file.replication</name>
-    <value>3</value>
-    <description>
-      HDFS replication level for the files uploaded into HDFS for the application.
-      These include things like the Spark jar, the app jar, and any distributed cache files/archives.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.preserve.staging.files</name>
-    <value>false</value>
-    <description>
-      Set to true to preserve the staged files (Spark jar, app jar, distributed cache files)
at the
-      end of the job rather then delete them.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.max.executor.failures</name>
-    <value>3</value>
-    <description>The maximum number of executor failures before failing the application.</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
index bb3b6ce..29272a9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/metainfo.xml
@@ -24,25 +24,7 @@
         <service>
           <name>SPARK</name>
           <version>1.5.2.2.3</version>
-          <extends>common-services/SPARK/1.4.1.2.3</extends>
-          <requiredServices>
-            <service>YARN</service>
-          </requiredServices>
-          <!-- No new components compared to 1.4.1 -->
-          <configuration-dependencies>
-            <config-type>spark-defaults</config-type>
-            <config-type>spark-env</config-type>
-            <config-type>spark-log4j-properties</config-type>
-            <config-type>spark-metrics-properties</config-type>
-            <config-type>spark-thrift-sparkconf</config-type>
-            <config-type>spark-hive-site-override</config-type>
-          </configuration-dependencies>
-	      <quickLinksConfigurations>
-	          <quickLinksConfiguration>
-	          <fileName>quicklinks.json</fileName>
-	          <default>true</default>
-	        </quickLinksConfiguration>
-	      </quickLinksConfigurations>
+          <extends>common-services/SPARK/1.5.2</extends>
         </service>
     </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
deleted file mode 100644
index 685665a..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
-  "name": "default",
-  "description": "default quick links configuration",
-  "configuration": {
-    "protocol":
-    {
-      "type":"HTTP_ONLY"
-    },
-
-    "links": [
-      {
-        "name": "spark_history_server_ui",
-        "label": "Spark History Server UI",
-        "requires_user_name": "false",
-        "url": "%@://%@:%@",
-        "port":{
-          "http_property": "spark.history.ui.port",
-          "http_default_port": "18080",
-          "https_property": "spark.history.ui.port",
-          "https_default_port": "18080",
-          "regex": "^(\\d+)$",
-          "site": "spark-defaults"
-        }
-      }
-    ]
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
deleted file mode 100644
index 1a6552f..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-defaults.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="true">
-    <property>
-        <name>spark.yarn.services</name>
-        <deleted>true</deleted>
-    </property>
-    <property>
-        <name>spark.history.provider</name>
-        <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
-        <description>Name of history provider class</description>
-    </property>
-    <property>
-        <name>spark.history.fs.logDirectory</name>
-        <value>hdfs:///spark-history</value>
-        <description>
-            Base directory for history spark application log.
-        </description>
-    </property>
-    <property>
-        <name>spark.eventLog.enabled</name>
-        <value>true</value>
-        <description>
-            Whether to log Spark events, useful for reconstructing the Web UI after the application
has finished.
-        </description>
-    </property>
-    <property>
-        <name>spark.eventLog.dir</name>
-        <value>hdfs:///spark-history</value>
-        <description>
-            Base directory in which Spark events are logged, if spark.eventLog.enabled is
true.
-        </description>
-    </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
deleted file mode 100644
index 2dda4bb..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-fairscheduler.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-<?xml version="1.0"?>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<configuration supports_final="true">
-    <property>
-        <name>fairscheduler_content</name>
-        <description>This is the jinja template for spark-thrift-fairscheduler.xml
file.</description>
-        <value>&lt;?xml version="1.0"?&gt;
-            &lt;allocations&gt;
-            &lt;pool name="default"&gt;
-            &lt;schedulingMode&gt;FAIR&lt;/schedulingMode&gt;
-            &lt;weight&gt;1&lt;/weight&gt;
-            &lt;minShare&gt;2&lt;/minShare&gt;
-            &lt;/pool&gt;
-            &lt;/allocations&gt;
-        </value>
-        <value-attributes>
-            <type>content</type>
-        </value-attributes>
-    </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
deleted file mode 100644
index 4f2563b..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/configuration/spark-thrift-sparkconf.xml
+++ /dev/null
@@ -1,193 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>spark.yarn.executor.memoryOverhead</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.driver.memoryOverhead</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.scheduler.heartbeat.interval-ms</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.max.executor.failures</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.containerLauncherMaxThreads</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.submit.file.replication</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.preserve.staging.files</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.max.executor.failures</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.yarn.services</name>
-    <deleted>true</deleted>
-  </property>
-
-  <property>
-    <name>spark.history.provider</name>
-    <value>org.apache.spark.deploy.history.FsHistoryProvider</value>
-    <description>Name of history provider class</description>
-  </property>
-
-  <property>
-    <name>spark.history.fs.logDirectory</name>
-    <value>{{spark_history_dir}}</value>
-    <final>true</final>
-    <description>
-      Base directory for history spark application log. It is the same value
-      as in spark-defaults.xml.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.eventLog.enabled</name>
-    <value>true</value>
-    <final>true</final>
-    <description>
-      Whether to log Spark events, useful for reconstructing the Web UI after the application
has finished.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.eventLog.dir</name>
-    <value>{{spark_history_dir}}</value>
-    <final>true</final>
-    <description>
-      Base directory in which Spark events are logged, if spark.eventLog.enabled is true.
It is the same value
-      as in spark-defaults.xml.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.master</name>
-    <value>{{spark_thrift_master}}</value>
-    <description>
-      The deploying mode of spark application, by default it is yarn-client for thrift-server
but local mode for there's
-      only one nodemanager.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.scheduler.allocation.file</name>
-    <value>{{spark_conf}}/spark-thrift-fairscheduler.xml</value>
-    <description>
-      Scheduler configuration file for thriftserver.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.scheduler.mode</name>
-    <value>FAIR</value>
-    <description>
-      The scheduling mode between jobs submitted to the same SparkContext.
-    </description>
-  </property>
-  
-  <property>
-    <name>spark.shuffle.service.enabled</name>
-    <value>true</value>
-    <description>
-      Enables the external shuffle service.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.hadoop.cacheConf</name>
-    <value>false</value>
-    <description>
-      Specifies whether HadoopRDD caches the Hadoop configuration object
-    </description>
-  </property>
-
-  <property>
-    <name>spark.dynamicAllocation.enabled</name>
-    <value>true</value>
-    <description>
-      Whether to use dynamic resource allocation, which scales the number of executors registered
with this application up and down based on the workload.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.dynamicAllocation.initialExecutors</name>
-    <value>0</value>
-    <description>
-      Initial number of executors to run if dynamic allocation is enabled.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.dynamicAllocation.maxExecutors</name>
-    <value>10</value>
-    <description>
-      Upper bound for the number of executors if dynamic allocation is enabled.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.dynamicAllocation.minExecutors</name>
-    <value>0</value>
-    <description>
-      Lower bound for the number of executors if dynamic allocation is enabled.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.am.memory</name>
-    <value>512m</value>
-    <description>
-      Amount of memory to use for the YARN Application Master in client mode.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.executor.memory</name>
-    <value>1g</value>
-    <description>
-      Amount of memory to use per executor process.
-    </description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
index a5a19f6..743a75a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/SPARK/metainfo.xml
@@ -23,21 +23,8 @@
     <services>
         <service>
           <name>SPARK</name>
+          <extends>common-services/SPARK/1.6.0</extends>
           <version>1.6.x.2.4</version>
-          <configuration-dependencies>
-            <config-type>spark-defaults</config-type>
-            <config-type>spark-env</config-type>
-            <config-type>spark-log4j-properties</config-type>
-            <config-type>spark-metrics-properties</config-type>
-            <config-type>spark-thrift-sparkconf</config-type>
-            <config-type>spark-hive-site-override</config-type>
-            <config-type>spark-thrift-fairscheduler</config-type>
-          </configuration-dependencies>
-          <requiredServices>
-            <service>HDFS</service>
-            <service>YARN</service>
-            <service>HIVE</service>
-          </requiredServices>
         </service>
     </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
index 107ca93..d1129cd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
@@ -23,7 +23,7 @@
   <services>
     <service>
       <name>SPARK</name>
-      <version>1.6.0.2.5</version>
+      <version>1.6.x.2.5</version>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
index 6bcc671..764118c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/KerberosDescriptorTest.java
@@ -151,7 +151,7 @@ public class KerberosDescriptorTest {
 
   @Test
   public void testCommonSparkServiceDescriptor() throws IOException {
-    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "SPARK",
"1.2.0.2.2");
+    KerberosDescriptor descriptor = getKerberosDescriptor(commonServicesDirectory, "SPARK",
"1.2.1");
     Assert.notNull(descriptor);
     Assert.notNull(descriptor.getServices());
     Assert.notNull(descriptor.getService("SPARK"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index cac8bf7..d7a2c84 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -26,7 +26,7 @@ from only_for_platform import not_for_platform, PLATFORM_WINDOWS
 @not_for_platform(PLATFORM_WINDOWS)
 @patch("resource_management.libraries.functions.get_stack_version", new=MagicMock(return_value="2.3.0.0-1597"))
 class TestJobHistoryServer(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.0.2.2/package"
+  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.1/package"
   STACK_VERSION = "2.2"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done',
'/app-logs', '/tmp']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
index 68b0f26..98f09f6 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
@@ -26,7 +26,7 @@ from only_for_platform import not_for_platform, PLATFORM_WINDOWS
 @not_for_platform(PLATFORM_WINDOWS)
 @patch("resource_management.libraries.functions.get_stack_version", new=MagicMock(return_value="2.3.0.0-1597"))
 class TestSparkClient(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.0.2.2/package"
+  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.1/package"
   STACK_VERSION = "2.2"
 
   def test_configure_default(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
index 0987f7c..bcb0d21 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_service_check.py
@@ -25,7 +25,7 @@ from only_for_platform import not_for_platform, PLATFORM_WINDOWS
 @not_for_platform(PLATFORM_WINDOWS)
 @patch("resource_management.libraries.functions.get_stack_version", new=MagicMock(return_value="2.3.0.0-1597"))
 class TestServiceCheck(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.0.2.2/package"
+  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.1/package"
   STACK_VERSION = "2.2"
 
   def test_service_check_default(self):
@@ -60,4 +60,4 @@ class TestServiceCheck(RMFTestCase):
         try_sleep = 3,
         logoutput = True
     )
-    self.assertNoMoreResources()
\ No newline at end of file
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json
b/ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json
index a187d59..5769244 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/spark-job-history-server.json
@@ -14,7 +14,7 @@
         "cluster-env": {}
     }, 
     "commandParams": {
-        "service_package_folder": "common-services/SPARK/1.2.0.2.2/package", 
+        "service_package_folder": "common-services/SPARK/1.2.1/package", 
         "script": "scripts/job_history_server.py", 
         "hooks_folder": "HDP/2.0.6/hooks", 
         "version": "2.2.2.0-2538", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bd6eecce/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index 674f30d..c00e18b 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -26,7 +26,7 @@ from only_for_platform import not_for_platform, PLATFORM_WINDOWS
 @not_for_platform(PLATFORM_WINDOWS)
 @patch("resource_management.libraries.functions.get_stack_version", new=MagicMock(return_value="2.3.2.0-1597"))
 class TestSparkThriftServer(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.0.2.2/package"
+  COMMON_SERVICES_PACKAGE_DIR = "SPARK/1.2.1/package"
   STACK_VERSION = "2.3"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done',
'/app-logs', '/tmp']
 


Mime
View raw message