ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maha...@apache.org
Subject [06/51] [partial] ambari git commit: Revert "[RTC 136620]: Introduce BigInsights stacks on Ambari 2.4 branch"
Date Wed, 17 Aug 2016 05:40:55 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-log4j.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-log4j.xml
deleted file mode 100644
index 0496b0d..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-log4j.xml
+++ /dev/null
@@ -1,82 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false">
-
-  <property>
-    <name>content</name>
-    <description>Custom log4j.properties</description>
-    <value>
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#
-
-#
-# Solr Logging Configuration
-#
-
-#  Logging level
-solr.log=${solr.log4j.dir}
-log4j.rootLogger=INFO, file, CONSOLE
-
-log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
-
-log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
-log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x [%X{collection} %X{shard} %X{replica} %X{core}] \u2013 %m%n
-
-#- size rotation with log cleanup.
-log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.MaxFileSize=4MB
-log4j.appender.file.MaxBackupIndex=9
-
-#- File to log to and log format
-log4j.appender.file.File=${solr.log}/solr.log
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-log4j.appender.file.layout.ConversionPattern=%-5p - %d{yyyy-MM-dd HH:mm:ss.SSS}; [%X{collection} %X{shard} %X{replica} %X{core}] %C; %m\n
-
-log4j.logger.org.apache.zookeeper=WARN
-log4j.logger.org.apache.hadoop=WARN
-
-# set to INFO to enable infostream log messages
-log4j.logger.org.apache.solr.update.LoggingInfoStream=OFF
-    </value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-site.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-site.xml
deleted file mode 100644
index e36e633..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/configuration/solr-site.xml
+++ /dev/null
@@ -1,44 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="true">
-  <property>
-    <name>solr.hdfs.security.kerberos.enabled</name>
-    <value>false</value>
-    <description>Set to true to enable Kerberos authentication</description>
-  </property>
-
-  <property>
-    <name>solr.hdfs.security.kerberos.keytabfile</name>
-    <value>/etc/security/keytabs/solr.service.keytab</value>
-    <description>A keytab file contains pairs of Kerberos principals and encrypted keys which allows for password-less authentication when Solr attempts to authenticate with secure Hadoop.
-    This file will need to be present on all Solr servers at the same path provided in this parameter.
-    </description>
-  </property>
-
-  <property>
-    <name>solr.hdfs.security.kerberos.principal</name>
-    <value>solr/_HOST@EXAMPLE.COM</value>
-    <description>The Kerberos principal that Solr should use to authenticate to secure Hadoop; the format of a typical Kerberos V5 principal is: primary/instance@realm
-    </description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/kerberos.json b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/kerberos.json
deleted file mode 100644
index 8e9a5e2..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/kerberos.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
-  "services": [
-    {
-      "name": "SOLR",
-      "identities": [
-        {
-          "name": "/spnego"
-        },
-        {
-          "name": "/smokeuser"
-        },
-        {
-          "name": "/hdfs"
-        }
-      ],
-      "configurations": [
-        {
-          "solr-site": {
-              "solr.hdfs.security.kerberos.enabled":"true"
-          }
-        }
-      ],
-      "components": [
-        {
-          "name": "SOLR",
-          "identities": [
-            {
-              "name": "solr",
-              "principal": {
-                "value": "${solr-env/solr_user}/_HOST@${realm}",
-                "type": "service",
-                "configuration": "solr-site/solr.hdfs.security.kerberos.principal",
-                "local_username": "${solr-env/solr_user}"
-              },
-              "keytab": {
-                "file": "${keytab_dir}/solr.service.keytab",
-                "owner": {
-                  "name": "${solr-env/solr_user}",
-                  "access": "r"
-                },
-                "group": {
-                  "name": "${cluster-env/user_group}",
-                  "access": ""
-                },
-                "configuration": "solr-site/solr.hdfs.security.kerberos.keytabfile"
-              }
-            }
-          ]
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml
deleted file mode 100644
index 5811c00..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml
+++ /dev/null
@@ -1,82 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SOLR</name>
-      <displayName>Solr</displayName>
-      <comment>Solr is the popular, blazing fast open source enterprise search platform from the Apache Lucene project
-      </comment>
-      <version>5.5.0</version>
-
-      <components>
-        <component>
-          <name>SOLR</name>
-          <displayName>Solr</displayName>
-          <category>MASTER</category>
-          <cardinality>1+</cardinality>
-          <commandScript>
-            <script>scripts/solr_server.py</script>
-            <scriptType>PYTHON</scriptType>
-	    <timeout>600</timeout>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>solr-site.xml</fileName>
-              <dictionaryName>solr-site</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>solr_4_2_*</name>
-            </package>
-            <package>
-              <name>titan_4_2_*</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <requiredServices>		
-        <service>HDFS</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>solr-log4j</config-type>
-        <config-type>solr-env</config-type>
-        <config-type>solr-site</config-type>
-        <config-type>titan-hbase-solr</config-type>
-        <restartRequiredAfterChange>true</restartRequiredAfterChange>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml~
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml~ b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml~
deleted file mode 100644
index e52e36b..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/metainfo.xml~
+++ /dev/null
@@ -1,82 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SOLR</name>
-      <displayName>Solr</displayName>
-      <comment>Solr is the popular, blazing fast open source enterprise search platform from the Apache Lucene project
-      </comment>
-      <version>5.5.0.4.2</version>
-
-      <components>
-        <component>
-          <name>SOLR</name>
-          <displayName>Solr</displayName>
-          <category>MASTER</category>
-          <cardinality>1+</cardinality>
-          <commandScript>
-            <script>scripts/solr_server.py</script>
-            <scriptType>PYTHON</scriptType>
-	    <timeout>600</timeout>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>xml</type>
-              <fileName>solr-site.xml</fileName>
-              <dictionaryName>solr-site</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>solr_4_2_*</name>
-            </package>
-            <package>
-              <name>titan_4_2_*</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <requiredServices>		
-        <service>HDFS</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>solr-log4j</config-type>
-        <config-type>solr-env</config-type>
-        <config-type>solr-site</config-type>
-        <config-type>titan-hbase-solr</config-type>
-        <restartRequiredAfterChange>true</restartRequiredAfterChange>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/__init__.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/__init__.py
deleted file mode 100755
index 5561e10..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
deleted file mode 100644
index 36c27c0..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/params.py
+++ /dev/null
@@ -1,156 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.resources import HdfsResource
-import status_params
-
-# server configurations
-config = Script.get_config()
-stack_name = default("/hostLevelParams/stack_name", None)
-
-zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']
-zookeeper_hosts.sort()
-zookeeper_hosts_list=','.join(zookeeper_hosts)
-
-java64_home = config['hostLevelParams']['java_home']
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade.
-# Version being upgraded/downgraded to
-# It cannot be used during the initial Cluser Install because the version is not yet known.
-version = default("/commandParams/version", None)
-
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-iop_stack_version = format_hdp_stack_version(stack_version_unformatted)
-
-# Upgrade direction
-upgrade_direction = default("/commandParams/upgrade_direction", None)
-
-solr_user=config['configurations']['solr-env']['solr_user']
-user_group=config['configurations']['cluster-env']['user_group']
-hostname = config['hostname']
-solr_server_hosts = config['clusterHostInfo']['solr_hosts'] 
-solr_server_host = solr_server_hosts[0]
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
-
-solr_home = '/usr/iop/current/solr-server'
-solr_conf_dir='/usr/iop/current/solr-server/conf'
-cloud_scripts=solr_home+'/server/scripts/cloud-scripts'
-if (current_version is not None and compare_versions(format_hdp_stack_version(current_version), '4.2.0.0') >=0 ) or  compare_versions(iop_stack_version, '4.2.0.0')>= 0:
-  if upgrade_direction is not None and upgrade_direction == Direction.DOWNGRADE and version is not None and compare_versions(format_hdp_stack_version(version), '4.2.0.0') < 0:
-    solr_data_dir=default("/configurations/solr-env/solr_lib_dir", None)
-  else:
-    solr_data_dir=default("/configurations/solr-env/solr_data_dir", None)
-else: #IOP 4.1
-  if upgrade_direction is not None and upgrade_direction == Direction.UPGRADE:
-    solr_data_dir=default("/configurations/solr-env/solr_data_dir", None)
-    lib_dir=default("/configurations/solr-env/solr_data_dir", None)
-    old_lib_dir=default("/configurations/solr-env/solr_lib_dir", None)
-  else:
-    solr_data_dir=default("/configurations/solr-env/solr_lib_dir", None)
-    lib_dir=default("/configurations/solr-env/solr_lib_dir", None)
-log_dir=config['configurations']['solr-env']['solr_log_dir']
-pid_dir=config['configurations']['solr-env']['solr_pid_dir']
-solr_port=config['configurations']['solr-env']['solr_port']
-
-zookeeper_chroot=config['configurations']['solr-env']['ZOOKEEPER_CHROOT']
-
-solr_xms_minmem = config['configurations']['solr-env']['solr_xms_minmem']
-solr_xmx_maxmem = config['configurations']['solr-env']['solr_xmx_maxmem']
-
-solr_site = dict(config['configurations']['solr-site'])
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-solr_principal = solr_site['solr.hdfs.security.kerberos.principal']
-
-if security_enabled:
-  solr_principal = solr_principal.replace('_HOST',hostname)
-  solr_site['solr.hdfs.security.kerberos.principal']=solr_principal
-
-#kerberos
-sole_kerberos_enabled=config['configurations']['solr-site']['solr.hdfs.security.kerberos.enabled']
-solr_keytab=config['configurations']['solr-site']['solr.hdfs.security.kerberos.keytabfile']
-
-#log4j.properties
-log4j_props = config['configurations']['solr-log4j']['content']
-
-solr_in_sh_template = config['configurations']['solr-env']['content']
-
-solr_pid_file = status_params.solr_pid_file
-
-solr_hdfs_home_dir = config['configurations']['solr-env']['solr_hdfs_home_dir']
-solr_hdfs_user_mode = 0775
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_site = config['configurations']['hdfs-site']
-default_fs = config['configurations']['core-site']['fs.defaultFS']
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = get_kinit_path()
-
-# parameters for intgeration with Titan
-configuration_tags = config['configurationTags']
-
-# Intgerate with Titan
-# parse the value for property 'index.search.solr.configset' in titan-hbase-solr
-titan_solr_configset = 'titan'
-if ('titan-hbase-solr' in configuration_tags):
-    titan_hbase_solr_props = config['configurations']['titan-hbase-solr']['content']
-    prop_list = titan_hbase_solr_props.split('\n')
-    for prop in prop_list:
-      if (prop.find('index.search.solr.configset') > -1):
-         titan_solr_configset_prop = prop.split('=')
-         titan_solr_configset = titan_solr_configset_prop[1]
-
-titan_solr_conf_dir = format('/usr/iop/current/titan-client/conf/solr')
-solr_conf_trg_dir = format('/usr/iop/current/solr-server/server/solr/configsets')
-solr_solr_conf_dir = format('/usr/iop/current/solr-server/server/solr/configsets/solr')
-solr_titan_conf_dir = format('/usr/iop/current/solr-server/server/solr/configsets/{titan_solr_configset}')
-titan_solr_jar_file = format('/usr/iop/current/titan-client/lib/jts-1.13.jar')
-solr_jar_trg_file =  format('/usr/iop/current/solr-server/server/solr-webapp/webapp/WEB-INF/lib/jts-1.13.jar')
-solr_conf_trg_file = format('/usr/iop/current/solr-server/server/solr/configsets/{titan_solr_configset}/solrconfig.xml')
-
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir,
-  principal_name = hdfs_principal_name,
-  hdfs_site = hdfs_site,
-  default_fs = default_fs
-)
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/service_check.py
deleted file mode 100755
index 9f6edfb..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/service_check.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-
-from resource_management import *
-from resource_management.libraries.functions.validate import call_and_match_output
-import subprocess
-import time
-
-class SolrServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    command = "curl"
-    httpGssnegotiate = "--negotiate"
-    userpswd = "-u:"
-    insecure = "-k"
-    silent = "-s"
-    out = "-o /dev/null"
-    head = "-w'%{http_code}'"
-    url = "http://" + params.solr_server_host + ":" + str(params.solr_port) + "/solr/"
-    url_server_check = url + '#/'
-
-    command_with_flags = [command, silent, out, head, httpGssnegotiate, userpswd, insecure, url_server_check]
-
-    is_running = False
-    for i in range(1,11):
-      proc = subprocess.Popen(command_with_flags, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-      Logger.info("Try %d, command: %s" % (i, " ".join(command_with_flags)))
-      (stdout, stderr) = proc.communicate()
-      response = stdout
-      if '200' in response:
-        is_running = True
-        Logger.info('Solr Server up and running')
-        break
-      Logger.info("Response: %s" % str(response))
-      time.sleep(5)
-
-    if is_running == False :
-      Logger.info('Solr Server not running.')
-      raise ComponentIsNotRunning()
-
-    if params.security_enabled:
-        kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
-        Execute(kinit_cmd,
-                user = params.smokeuser,
-                logoutput = True
-        )
-
-    create_collection_cmd = format("SOLR_INCLUDE={solr_conf_dir}/solr.in.sh solr create -c smokeuser_ExampleCollection -s 2 -d data_driven_schema_configs")
-    create_collection_output = "success"
-    create_collection_exists_output = "Collection 'smokeuser_ExampleCollection' already exists!"
-
-    Logger.info("Creating solr collection from example: %s" % create_collection_cmd)
-    call_and_match_output(create_collection_cmd, format("({create_collection_output})|({create_collection_exists_output})"), "Failed to create collection")
-
-    list_collection_cmd = "curl " + url + "admin/collections?action=list"
-    list_collection_output = "<str>smokeuser_ExampleCollection</str>"
-    Logger.info("List Collections: %s" % list_collection_cmd)
-    call_and_match_output(list_collection_cmd, format("({list_collection_output})"), "Failed to create collection \"smokeuser_ExampleCollection\" or check that collection exists")
-
-    delete_collection_cmd = format("SOLR_INCLUDE={solr_conf_dir}/solr.in.sh solr delete -c smokeuser_ExampleCollection")
-
-    Logger.info("Deleting solr collection : %s" % delete_collection_cmd)
-
-    Execute(delete_collection_cmd,
-      user = params.solr_user,
-      logoutput=True
-    )
-
-
-if __name__ == "__main__":
-  SolrServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr.py
deleted file mode 100644
index 175f92d..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-import os
-
-def solr(type = None, upgrade_type=None):
-  import params
-
-  if type == 'server':
-    effective_version = params.iop_stack_version if upgrade_type is None else format_hdp_stack_version(params.version)
-
-    params.HdfsResource(params.solr_hdfs_home_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.solr_user,
-                         mode=params.solr_hdfs_user_mode
-    )
-    params.HdfsResource(None, action="execute")
-
-    Directory([params.log_dir,params.pid_dir,params.solr_conf_dir,params.solr_data_dir],
-              mode=0755,
-              cd_access='a',
-              owner=params.solr_user,
-              recursive=True,
-              group=params.user_group
-      )
-
-    XmlConfig("solr-site.xml",
-              conf_dir=params.solr_conf_dir,
-              configurations=params.solr_site,
-              configuration_attributes=params.config['configuration_attributes']['solr-site'],
-              owner=params.solr_user,
-              group=params.user_group,
-              mode=0644
-    )
-
-    File(format("{solr_conf_dir}/solr.in.sh"),
-         content=InlineTemplate(params.solr_in_sh_template),
-         owner=params.solr_user,
-         group=params.user_group
-    )
-
-    File(format("{solr_conf_dir}/log4j.properties"),
-           mode=0644,
-           group=params.user_group,
-           owner=params.solr_user,
-           content=params.log4j_props
-    )
-
-    if effective_version is not None and effective_version != "" and compare_versions(effective_version, '4.2.0.0') >= 0:
-      File(format("{solr_data_dir}/solr.xml"),
-           mode=0644,
-           group=params.user_group,
-           owner=params.solr_user,
-           content=Template("solr.xml.j2")
-      )
-    else:
-      Directory(format("{solr_data_dir}/data"),
-           owner=params.solr_user,
-           recursive=True,
-           group=params.user_group
-      )
-
-      File(format("{solr_data_dir}/data/solr.xml"),
-           mode=0644,
-           group=params.user_group,
-           owner=params.solr_user,
-           content=Template("solr.xml.j2")
-      )
-
-    #solr-webapp is temp dir, need to own by solr in order for it to wirte temp files into.
-    Directory(format("{solr_home}"),
-              owner=params.solr_user,
-              recursive=True,
-    )
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_client.py
deleted file mode 100755
index 27c8235..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_client.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-class SolrClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    
-
-  def configure(self, env):
-    print 'Configure the solr client';
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  SolrClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
deleted file mode 100644
index 984ace8..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_server.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import iop_select
-from solr_service import solr_service
-from solr import solr
-
-class SolrServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-
-  def configure(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    solr(type='server', upgrade_type=upgrade_type)
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    Logger.info("Executing Stack Upgrade pre-restart")
-    import params
-    env.set_params(params)
-    if params.version and compare_versions(format_hdp_stack_version(params.version), '4.1.0.0') >= 0:
-      iop_select.select("solr-server", params.version)
-      conf_select.select(params.stack_name, "solr", params.version)
-
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    solr_service(action = 'start')
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    solr_service(action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.solr_pid_file)
-
-  def security_status(self, env):
-    import status_params
-    env.set_params(status_params)
-    if status_params.security_enabled:
-      props_value_check = {"solr.hdfs.security.kerberos.enabled":"true"}
-      props_empty_check = ["solr.hdfs.security.kerberos.keytabfile",
-                           "solr.hdfs.security.kerberos.principal"]
-      props_read_check = ["solr.hdfs.security.kerberos.keytabfile"]
-      solr_site_props = build_expectations('solr-site', props_value_check, props_empty_check, props_read_check)
-
-      solr_expectations = {}
-      solr_expectations.update(solr_site_props)
-
-      security_params = get_params_from_filesystem(status_params.solr_conf_dir,
-                                                   {'solr-site.xml': FILE_TYPE_XML})
-      result_issues = validate_security_config_properties(security_params,solr_expectations)
-
-      if not result_issues: # If all validations passed successfully
-        try:
-          if 'solr-site' not in security_params \
-            or 'solr.hdfs.security.kerberos.keytabfile' not in security_params['solr-site'] \
-            or 'solr.hdfs.security.kerberos.principal' not in security_params['solr-site']:
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out({"securityIssuesFound": "Keytab file or principal are not set property."})
-            return
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.solr_user,
-                                security_params['solr-site']['solr.hdfs.security.kerberos.keytabfile'],
-                                security_params['solr-site']['solr.hdfs.security.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
-
-if __name__ == "__main__":
-  SolrServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_service.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_service.py
deleted file mode 100644
index 65c1473..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_service.py
+++ /dev/null
@@ -1,71 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import os
-from resource_management import *
-from resource_management.libraries.functions.validate import call_and_match_output
-
-def solr_service(action='start'):
-  import params
-  cmd = format("{solr_home}/bin/solr")
-
-  if action == 'start':
-
-    if params.security_enabled:
-      if params.solr_principal is None:
-        solr_principal_with_host = 'missing_principal'
-      else:
-        solr_principal_with_host = params.solr_principal.replace("_HOST", params.hostname)
-      kinit_cmd = format("{kinit_path_local} -kt {solr_keytab} {solr_principal_with_host};")
-      Execute(kinit_cmd,user=params.solr_user)
-
-    Execute ('echo "Creating znode" ' + params.zookeeper_chroot)
-    Execute (params.cloud_scripts + '/zkcli.sh -zkhost ' + params.zookeeper_hosts_list + ' -cmd makepath ' + params.zookeeper_chroot, user=params.solr_user, ignore_failures=True )
-
-    # copy titan directory and jar for titan and solr integration
-    if (('titan-env' in params.configuration_tags) and not (os.path.exists(params.solr_conf_trg_file))):
-            Execute(("cp", "-r", params.titan_solr_conf_dir, params.solr_conf_trg_dir), sudo = True)
-            Execute(("cp", params.titan_solr_jar_file, params.solr_jar_trg_file), sudo = True)
-            Execute(("chmod", "644", params.solr_jar_trg_file), sudo=True)
-            Execute(("mv", params.solr_solr_conf_dir, params.solr_titan_conf_dir), sudo = True)
-
-    daemon_cmd = format("SOLR_INCLUDE={solr_conf_dir}/solr.in.sh {cmd} start -c -V")
-    no_op_test = format("ls {solr_pid_file} >/dev/null 2>&1 && ps `cat {solr_pid_file}` >/dev/null 2>&1")
-    Execute(daemon_cmd,
-            not_if=no_op_test,
-            user=params.solr_user
-    )
-
-    # create collection for titan and solr integration
-    if (('titan-env' in params.configuration_tags) and (os.path.exists(params.solr_conf_trg_file))):
-        create_collection_cmd = format("SOLR_INCLUDE={solr_conf_dir}/solr.in.sh solr create -c {titan_solr_configset} -s 2 -d {titan_solr_configset}")
-        create_collection_output = "success"
-        create_collection_exists_output = format("Collection '{titan_solr_configset}' already exists!")
-        call_and_match_output(create_collection_cmd, format("({create_collection_output})|({create_collection_exists_output})"), "Failed to create collection")
-
-  elif action == 'stop':
-    daemon_cmd = format("export SOLR_PID_DIR=" + params.pid_dir + "; SOLR_INCLUDE={solr_conf_dir}/solr.in.sh {cmd} stop -all")
-    no_op_test = format("! ((`SOLR_INCLUDE={solr_conf_dir}/solr.in.sh {cmd} status |grep process |wc -l`))")
-    rm_pid = format("rm -f {solr_pid_file}")
-    Execute(daemon_cmd,
-            not_if=no_op_test,
-            user=params.solr_user
-    )
-    Execute(rm_pid)

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
deleted file mode 100644
index f3eab39..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/solr_upgrade.py
+++ /dev/null
@@ -1,135 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import os
-from resource_management import *
-from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import get_unique_id_and_date
-
-class SolrServerUpgrade(Script):
-  def pre_upgrade_conf41(self, env):
-    """
-    Create /etc/solr/4.1.0.0/0 directory and copies Solr config files here.
-    Create symlinks accordingly.
-
-    conf-select create-conf-dir --package solr --stack-version 4.1.0.0 --conf-version 0
-    cp -r /usr/iop/4.1.0.0/solr/conf/* /etc/solr/4.1.0.0/0/.
-    unlink or rm -r /usr/iop/4.1.0.0/solr/conf
-    ln -s /etc/solr/4.1.0.0/0 /usr/iop/4.1.0.0/solr/conf
-    conf-select set-conf-dir --package solr --stack-version 4.1.0.0 --conf-version 0
-    """
-    import params
-    env.set_params(params)
-
-    solr41_conf_dir="/usr/iop/4.1.0.0/solr/conf"
-    solr41_etc_dir="/etc/solr/4.1.0.0/0"
-    if not os.path.exists(solr41_etc_dir):
-      conf_select.create(params.stack_name, "solr", "4.1.0.0")
-
-    content_path=solr41_conf_dir
-    if not os.path.isfile("/usr/iop/4.1.0.0/solr/conf/solr.in.sh"):
-      content_path = "/etc/solr/conf.backup"
-
-    for each in os.listdir(content_path):
-      File(os.path.join(solr41_etc_dir, each),
-           owner=params.solr_user,
-           content = StaticFile(os.path.join(content_path, each)))
-
-    if not os.path.islink(solr41_conf_dir):
-      Directory(solr41_conf_dir,
-                action="delete",
-                recursive=True)
-
-    if os.path.islink(solr41_conf_dir):
-      os.unlink(solr41_conf_dir)
-
-    if not os.path.islink(solr41_conf_dir):
-      Link(solr41_conf_dir,
-           to=solr41_etc_dir
-      )
-
-    conf_select.select(params.stack_name, "solr", "4.1.0.0")
-
-  def pre_stop_backup_cores(self, env):
-    """
-    Backs up the Solr cores under Solr's home directory.
-    cp -r /var/lib/solr/data/* /tmp/solr/cores
-    """
-    import params
-    env.set_params(params)
-
-    if compare_versions(format_hdp_stack_version(params.current_version), '4.2.0.0') >= 0:
-      solr_home_dir=params.solr_data_dir
-    else: #4.1.0.0
-      solr_home_dir=params.old_lib_dir + "/data"
-
-    unique = get_unique_id_and_date()
-    backup_solr_dir="/tmp/upgrades/{0}/solr_{1}".format(params.current_version, unique)
-    backup_solr_cores="/tmp/solr/cores"
-
-    if os.path.isdir(solr_home_dir) and not os.path.isdir(backup_solr_dir):
-      os.makedirs(backup_solr_dir)
-      Execute(('cp', '-r', solr_home_dir+"/.", backup_solr_dir),
-              sudo=True
-      )
-
-    if params.upgrade_direction is not None and params.upgrade_direction == Direction.UPGRADE:
-      Directory(backup_solr_cores,
-                action="delete",
-                recursive=True)
-
-      Directory(backup_solr_cores,
-                mode=0755,
-                cd_access='a',
-                owner=params.solr_user,
-                recursive=True,
-                group=params.user_group
-      )
-
-      Execute(('cp', '-r', solr_home_dir+"/.", backup_solr_cores),
-              user=params.solr_user
-      )
-
-  def pre_start_migrate_cores(self, env):
-    """
-    Copy the Solr cores from previous version to the new Solr home directory if solr_home is a differnet directory.
-    cp -r /tmp/solr/cores/* /opt/solr/data/.
-    """
-    import params
-    env.set_params(params)
-
-    if params.upgrade_direction is not None and params.upgrade_direction == Direction.UPGRADE:
-      backup_solr_cores="/tmp/solr/cores"
-      solr_home_dir=params.solr_data_dir
-
-      Directory(format(solr_home_dir),
-                owner=params.solr_user,
-                recursive=True,
-                group=params.user_group
-      )
-
-      if os.path.isdir(solr_home_dir) and os.path.isdir(backup_solr_cores):
-        Execute(('cp', '-rn', backup_solr_cores+"/.", solr_home_dir),
-                 user=params.solr_user,
-                 logoutput=True
-        )
-
-if __name__ == "__main__":
-  SolrServerUpgrade().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/status_params.py
deleted file mode 100644
index 5bd0000..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/scripts/status_params.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import status_params
-
-config = Script.get_config()
-
-solr_user = config['configurations']['solr-env']['solr_user']
-hostname = config['hostname']
-kinit_path_local = functions.get_kinit_path()
-tmp_dir = Script.get_tmp_dir()
-solr_pid_dir = config['configurations']['solr-env']['solr_pid_dir']
-solr_port = config['configurations']['solr-env']['solr_port']
-solr_pid_file = format("{solr_pid_dir}/solr-{solr_port}.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/templates/solr.xml.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/templates/solr.xml.j2 b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/templates/solr.xml.j2
deleted file mode 100644
index e247452..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SOLR/package/templates/solr.xml.j2
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
-   This is an example of a simple "solr.xml" file for configuring one or 
-   more Solr Cores, as well as allowing Cores to be added, removed, and 
-   reloaded via HTTP requests.
-
-   More information about options available in this configuration file, 
-   and Solr Core administration can be found online:
-   http://wiki.apache.org/solr/CoreAdmin
--->
-
-<solr>
-
-  <solrcloud>
-
-    <str name="host">${host:}</str>
-    <int name="hostPort">${jetty.port:8983}</int>
-    <str name="hostContext">${hostContext:solr}</str>
-
-    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
-
-    <int name="zkClientTimeout">${zkClientTimeout:30000}</int>
-    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:600000}</int>
-    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:60000}</int>
-
-  </solrcloud>
-
-  <shardHandlerFactory name="shardHandlerFactory"
-    class="HttpShardHandlerFactory">
-    <int name="socketTimeout">${socketTimeout:600000}</int>
-    <int name="connTimeout">${connTimeout:60000}</int>
-  </shardHandlerFactory>
-
-</solr>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/alerts.json b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/alerts.json
deleted file mode 100644
index 0e38f16..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/alerts.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
-  "SPARK": {
-    "service": [],
-    "SPARK_JOBHISTORYSERVER": [
-      {
-        "name": "SPARK_JOBHISTORYSERVER_PROCESS",
-        "label": "Spark History Server",
-        "description": "This host-level alert is triggered if the Spark History Server cannot be determined to be up.",
-        "interval": 1,
-        "scope": "HOST",
-        "source": {
-          "type": "PORT",
-          "uri": "{{spark-defaults/spark.history.ui.port}}",
-          "default_port": 18080,
-          "reporting": {
-            "ok": {
-              "text": "TCP OK - {0:.3f}s response on port {1}"
-            },
-            "warning": {
-              "text": "TCP OK - {0:.3f}s response on port {1}",
-              "value": 1.5
-            },
-            "critical": {
-              "text": "Connection failed: {0} to {1}:{2}",
-              "value": 5
-            }
-          }
-        }
-      }
-    ]
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-defaults.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-defaults.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-defaults.xml
deleted file mode 100644
index 369b3da..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-defaults.xml
+++ /dev/null
@@ -1,159 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>spark.eventLog.enabled</name>
-    <value>true</value>
-    <description>Whether to log Spark events, useful for reconstructing the Web UI after the application has finished.</description>
-  </property>
-
-  <property>
-    <name>spark.eventLog.dir</name>
-    <value>/iop/apps/4.2.0.0/spark/logs/history-server</value>
-    <description>Base directory in which Spark events are logged, if spark.eventLog.enabled is true. Within this base directory, Spark creates a sub-directory for each application, and logs the events specific to the application in this directory. Users may want to set this to a unified location like an HDFS directory so history files can be read by the history server.</description>
-  </property>
-
-  <property>
-    <name>spark.yarn.executor.memoryOverhead</name>
-    <value>384</value>
-    <description>
-      The amount of off heap memory (in megabytes) to be allocated per executor.
-      This is memory that accounts for things like VM overheads, interned strings,
-      other native overheads, etc.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.driver.memoryOverhead</name>
-    <value>384</value>
-    <description>
-      The amount of off heap memory (in megabytes) to be allocated per driver.
-      This is memory that accounts for things like VM overheads, interned strings,
-      other native overheads, etc.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.applicationMaster.waitTries</name>
-    <value>10</value>
-    <description>
-      Set the number of times the ApplicationMaster waits for the the Spark master and then
-      also the number of tries it waits for the SparkContext to be initialized.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.scheduler.heartbeat.interval-ms</name>
-    <value>5000</value>
-    <description>
-      The interval in ms in which the Spark application master heartbeats into the YARN ResourceManager.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.max.executor.failures</name>
-    <value>3</value>
-    <description>
-      The maximum number of executor failures before failing the application.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.queue</name>
-    <value>default</value>
-    <description>
-      The name of the YARN queue to which the application is submitted.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.containerLauncherMaxThreads</name>
-    <value>25</value>
-    <description>
-      The maximum number of threads to use in the application master for launching executor containers.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.submit.file.replication</name>
-    <value>3</value>
-    <description>
-      HDFS replication level for the files uploaded into HDFS for the application.
-      These include things like the Spark jar, the app jar, and any distributed cache files/archives.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.yarn.preserve.staging.files</name>
-    <value>false</value>
-    <description>
-      Set to true to preserve the staged files (Spark jar, app jar, distributed cache files) at the 
-      end of the job rather then delete them.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.history.ui.port</name>
-    <value>18080</value>
-    <description>
-      The port to which the web interface of the History Server binds.
-    </description>
-  </property>
-  <property>
-    <name>spark.driver.extraJavaOptions</name>
-    <value>-Diop.version={{iop_full_version}}</value>
-    <description>
-      Specifies parameters that are passed to the JVM of the Spark driver.
-    </description>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>spark.yarn.am.extraJavaOptions</name>
-    <value>-Diop.version={{iop_full_version}}</value>
-    <description>
-      Specifies the parameters that are passed to the JVM of the Spark Application Master.
-    </description>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>spark.history.kerberos.principal</name>
-    <value>none</value>
-    <description>
-      Kerberos principal name for the Spark History Server.
-    </description>
-  </property>
-
-  <property>
-    <name>spark.history.kerberos.keytab</name>
-    <value>none</value>
-    <description>
-      Location of the kerberos keytab file for the Spark History Server.
-    </description>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-env.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-env.xml
deleted file mode 100644
index 9a40cfc..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-env.xml
+++ /dev/null
@@ -1,114 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-
-  <property>
-    <name>spark_thriftserver_port</name>
-    <value>10015</value>
-    <description>
-      TCP port number to listen on, default 10015.
-    </description>
-  </property>
-
-  <property>
-    <name>spark_user</name>
-    <value>spark</value>
-    <property-type>USER</property-type>
-    <description>Spark User.</description>
-  </property>
-
-  <property>
-    <name>spark_group</name>
-    <value>spark</value>
-    <property-type>GROUP</property-type>
-    <description>spark group</description>
-  </property>
-
-  <property>
-    <name>spark_log_dir</name>
-    <value>/var/log/spark</value>
-    <description>Spark Log Dir</description>
-  </property>
-
-  <property>
-    <name>spark_pid_dir</name>
-    <value>/var/run/spark</value>
-  </property>
-  
-  <!-- spark-env.sh -->
-  <property>
-    <name>content</name>
-    <description>This is the jinja template for spark-env.sh file</description>
-    <value>
-#!/usr/bin/env bash
-
-# This file is sourced when running various Spark programs.
-# Copy it as spark-env.sh and edit that to configure Spark for your site.
-
-# Options read in YARN client mode
-#SPARK_EXECUTOR_INSTANCES="2" #Number of workers to start (Default: 2)
-#SPARK_EXECUTOR_CORES="1" #Number of cores for the workers (Default: 1).
-#SPARK_EXECUTOR_MEMORY="1G" #Memory per Worker (e.g. 1000M, 2G) (Default: 1G)
-#SPARK_DRIVER_MEMORY="512 Mb" #Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)
-#SPARK_YARN_APP_NAME="spark" #The name of your application (Default: Spark)
-#SPARK_YARN_QUEUE="~@~Xdefault~@~Y" #The hadoop queue to use for allocation requests (Default: @~Xdefault~@~Y)
-#SPARK_YARN_DIST_FILES="" #Comma separated list of files to be distributed with the job.
-#SPARK_YARN_DIST_ARCHIVES="" #Comma separated list of archives to be distributed with the job.
-
-# Generic options for the daemons used in the standalone deploy mode
-
-# Alternate conf dir. (Default: ${SPARK_HOME}/conf)
-export SPARK_CONF_DIR=${SPARK_HOME:-{{spark_home}}}/conf
-
-# Where log files are stored.(Default:${SPARK_HOME}/logs)
-#export SPARK_LOG_DIR=${SPARK_HOME:-{{spark_home}}}/logs
-export SPARK_LOG_DIR={{spark_log_dir}}
-
-# Where the pid file is stored. (Default: /tmp)
-export SPARK_PID_DIR={{spark_pid_dir}}
-
-# A string representing this instance of spark.(Default: $USER)
-SPARK_IDENT_STRING=$USER
-
-# The scheduling priority for daemons. (Default: 0)
-SPARK_NICENESS=0
-
-export SPARK_PUBLIC_DNS={{spark_history_server_host}}
-export SPARK_HISTORY_OPTS="-Dspark.history.ui.port={{spark_history_ui_port}} -Dspark.history.fs.logDirectory={{spark_eventlog_dir_default}}"
-export HIVE_SERVER2_THRIFT_BIND_HOST={{spark_thrift_server_host}}
-export HIVE_SERVER2_THRIFT_PORT={{spark_thriftserver_port}}
-
-export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
-export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
-
-export JAVA_LIBRARY_PATH=$JAVA_LIBRARY_PATH:/usr/iop/current/hadoop-client/lib/native
-export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/iop/current/hadoop-client/lib/native
-export SPARK_YARN_USER_ENV="JAVA_LIBRARY_PATH=$JAVA_LIBRARY_PATH,LD_LIBRARY_PATH=$LD_LIBRARY_PATH"
-
-# The java implementation to use.
-export JAVA_HOME={{java_home}}
-
-</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-javaopts-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-javaopts-properties.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-javaopts-properties.xml
deleted file mode 100644
index c2db325..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-javaopts-properties.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="true">
-  <property>
-    <name>content</name>
-    <description>Spark-javaopts-properties</description>
-    <value> </value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-log4j.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-log4j.xml
deleted file mode 100644
index 2ba64fb..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-log4j.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false">
-  <property>
-    <name>content</name>
-    <description>Spark-log4j-Properties</description>
-    <value>
-# Set everything to be logged to the console
-log4j.rootCategory=INFO, console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
-
-# Settings to quiet third party logs that are too verbose
-log4j.logger.org.eclipse.jetty=WARN
-log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-
-    </value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-metrics-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-metrics-properties.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-metrics-properties.xml
deleted file mode 100644
index 9af5f2e..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/configuration/spark-metrics-properties.xml
+++ /dev/null
@@ -1,160 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="true">
-  <property>
-    <name>content</name>
-    <description>Spark-metrics-properties</description>
-    <value>
-# syntax: [instance].sink|source.[name].[options]=[value]
-
-# This file configures Spark's internal metrics system. The metrics system is
-# divided into instances which correspond to internal components.
-# Each instance can be configured to report its metrics to one or more sinks.
-# Accepted values for [instance] are "master", "worker", "executor", "driver",
-# and "applications". A wild card "*" can be used as an instance name, in
-# which case all instances will inherit the supplied property.
-#
-# Within an instance, a "source" specifies a particular set of grouped metrics.
-# there are two kinds of sources:
-# 1. Spark internal sources, like MasterSource, WorkerSource, etc, which will
-# collect a Spark component's internal state. Each instance is paired with a
-# Spark source that is added automatically.
-# 2. Common sources, like JvmSource, which will collect low level state.
-# These can be added through configuration options and are then loaded
-# using reflection.
-#
-# A "sink" specifies where metrics are delivered to. Each instance can be
-# assigned one or more sinks.
-#
-# The sink|source field specifies whether the property relates to a sink or
-# source.
-#
-# The [name] field specifies the name of source or sink.
-#
-# The [options] field is the specific property of this source or sink. The
-# source or sink is responsible for parsing this property.
-#
-# Notes:
-# 1. To add a new sink, set the "class" option to a fully qualified class
-# name (see examples below).
-# 2. Some sinks involve a polling period. The minimum allowed polling period
-# is 1 second.
-# 3. Wild card properties can be overridden by more specific properties.
-# For example, master.sink.console.period takes precedence over
-# *.sink.console.period.
-# 4. A metrics specific configuration
-# "spark.metrics.conf=${SPARK_HOME}/conf/metrics.properties" should be
-# added to Java properties using -Dspark.metrics.conf=xxx if you want to
-# customize metrics system. You can also put the file in ${SPARK_HOME}/conf
-# and it will be loaded automatically.
-# 5. MetricsServlet is added by default as a sink in master, worker and client
-# driver, you can send http request "/metrics/json" to get a snapshot of all the
-# registered metrics in json format. For master, requests "/metrics/master/json" and
-# "/metrics/applications/json" can be sent seperately to get metrics snapshot of
-# instance master and applications. MetricsServlet may not be configured by self.
-#
-
-## List of available sinks and their properties.
-
-# org.apache.spark.metrics.sink.ConsoleSink
-# Name: Default: Description:
-# period 10 Poll period
-# unit seconds Units of poll period
-
-# org.apache.spark.metrics.sink.CSVSink
-# Name: Default: Description:
-# period 10 Poll period
-# unit seconds Units of poll period
-# directory /tmp Where to store CSV files
-
-# org.apache.spark.metrics.sink.GangliaSink
-# Name: Default: Description:
-# host NONE Hostname or multicast group of Ganglia server
-# port NONE Port of Ganglia server(s)
-# period 10 Poll period
-# unit seconds Units of poll period
-# ttl 1 TTL of messages sent by Ganglia
-# mode multicast Ganglia network mode ('unicast' or 'multicast')
-
-# org.apache.spark.metrics.sink.JmxSink
-
-# org.apache.spark.metrics.sink.MetricsServlet
-# Name: Default: Description:
-# path VARIES* Path prefix from the web server root
-# sample false Whether to show entire set of samples for histograms ('false' or 'true')
-#
-# * Default path is /metrics/json for all instances except the master. The master has two paths:
-# /metrics/aplications/json # App information
-# /metrics/master/json # Master information
-
-# org.apache.spark.metrics.sink.GraphiteSink
-# Name: Default: Description:
-# host NONE Hostname of Graphite server
-# port NONE Port of Graphite server
-# period 10 Poll period
-# unit seconds Units of poll period
-# prefix EMPTY STRING Prefix to prepend to metric name
-
-## Examples
-# Enable JmxSink for all instances by class name
-#*.sink.jmx.class=org.apache.spark.metrics.sink.JmxSink
-
-# Enable ConsoleSink for all instances by class name
-#*.sink.console.class=org.apache.spark.metrics.sink.ConsoleSink
-
-# Polling period for ConsoleSink
-#*.sink.console.period=10
-
-#*.sink.console.unit=seconds
-
-# Master instance overlap polling period
-#master.sink.console.period=15
-
-#master.sink.console.unit=seconds
-
-# Enable CsvSink for all instances
-#*.sink.csv.class=org.apache.spark.metrics.sink.CsvSink
-
-# Polling period for CsvSink
-#*.sink.csv.period=1
-
-#*.sink.csv.unit=minutes
-
-# Polling directory for CsvSink
-#*.sink.csv.directory=/tmp/
-
-# Worker instance overlap polling period
-#worker.sink.csv.period=10
-
-#worker.sink.csv.unit=minutes
-
-# Enable jvm source for instance master, worker, driver and executor
-#master.source.jvm.class=org.apache.spark.metrics.source.JvmSource
-
-#worker.source.jvm.class=org.apache.spark.metrics.source.JvmSource
-
-#driver.source.jvm.class=org.apache.spark.metrics.source.JvmSource
-
-#executor.source.jvm.class=org.apache.spark.metrics.source.JvmSource
-
-    </value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/kerberos.json b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/kerberos.json
deleted file mode 100644
index a8dfe16..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/kerberos.json
+++ /dev/null
@@ -1,55 +0,0 @@
-{
-  "services": [
-    {
-      "name": "SPARK",
-      "identities": [
-        {
-          "name": "/smokeuser"
-        },
-        {
-          "name": "/hdfs"
-        },
-        {
-          "name": "sparkuser",
-          "principal": {
-            "value": "${spark-env/spark_user}-${cluster_name}@${realm}",
-            "type" : "user",
-            "configuration": "spark-defaults/spark.history.kerberos.principal",
-            "local_username" : "${spark-env/spark_user}"
-          },
-          "keytab": {
-            "file": "${keytab_dir}/spark.headless.keytab",
-            "owner": {
-              "name": "${spark-env/spark_user}",
-              "access": "r"
-            },
-            "group": {
-              "name": "${cluster-env/user_group}",
-               "access": ""
-            },
-            "configuration": "spark-defaults/spark.history.kerberos.keytab"
-           }
-        }
-      ],
-      "components": [
-        {
-          "name": "SPARK_JOBHISTORYSERVER"
-        },
-        {
-          "name": "SPARK_CLIENT"
-        },
-        {
-          "name": "SPARK_THRIFTSERVER",
-          "identities": [
-            {
-              "name": "/HDFS/NAMENODE/hdfs"
-            },
-            {
-              "name": "/HIVE/HIVE_SERVER/hive_server_hive"
-            }
-          ]
-        }		
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
deleted file mode 100644
index a213260..0000000
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml
+++ /dev/null
@@ -1,190 +0,0 @@
-<?xml version="1.0"?>
-<!--Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SPARK</name>
-      <displayName>Spark</displayName>
-      <comment>Apache Spark is a fast and general engine for large-scale data processing</comment>
-      <version>1.6.1</version>
-      <components>
-        <component>
-          <name>SPARK_JOBHISTORYSERVER</name>
-          <displayName>Spark History Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-               <scope>host</scope>
-               <auto-deploy>
-                 <enabled>true</enabled>
-               </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-             </auto-deploy>
-           </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/job_history_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-        <component>
-          <name>SPARK_THRIFTSERVER</name>
-          <displayName>Spark Thrift Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HIVE/HIVE_METASTORE</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>HIVE/HIVE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/thrift_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-        <component>
-          <name>SPARK_CLIENT</name>
-          <displayName>Spark Client</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-               <scope>host</scope>
-               <auto-deploy>
-                 <enabled>true</enabled>
-               </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-             </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/spark_client.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>spark-env.sh</fileName>
-              <dictionaryName>spark-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>properties</type>
-              <fileName>spark-defaults.conf</fileName>
-              <dictionaryName>spark-defaults</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>log4j.properties</fileName>
-              <dictionaryName>spark-log4j</dictionaryName>
-            </configFile>                          
-          </configFiles>
-        </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>redhat7,redhat6,suse11</osFamily>
-          <packages>
-            <package>
-              <name>spark-core_4_2_0*</name>
-            </package>
-            <package>
-              <name>spark_4_2_0*</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>ubuntu12</osFamily>
-          <packages>
-            <package>
-              <name>spark-core_4_2_0*</name>
-            </package>
-            <package>
-               <name>spark_4_2_0*</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <configuration-dependencies>
-        <config-type>spark-env</config-type>
-        <config-type>spark-defaults</config-type>
-        <config-type>spark-log4j</config-type>
-      </configuration-dependencies>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <requiredServices>
-        <service>HDFS</service>
-        <service>YARN</service>
-        <service>HIVE</service>
-      </requiredServices>
-
-    </service>
-  </services>
-</metainfo>


Mime
View raw message