ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alejan...@apache.org
Subject [24/51] [partial] ambari git commit: AMBARI-21349. Create BigInsights Stack Skeleton in Ambari 2.5 (alejandro)
Date Wed, 28 Jun 2017 00:24:22 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
new file mode 100755
index 0000000..bc19704
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/params.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from resource_management.libraries.functions import format
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
+from utils import get_bare_principal
+
+from resource_management.libraries.functions.get_stack_version import get_stack_version
+from resource_management.libraries.functions.is_empty import is_empty
+from resource_management.core.logger import Logger
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import get_kinit_path
+
+import status_params
+
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+stack_name = default("/hostLevelParams/stack_name", None)
+
+version = default("/commandParams/version", None)
+# Version that is CURRENT.
+current_version = default("/hostLevelParams/current_version", None)
+
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+iop_stack_version = format_stack_version(stack_version_unformatted)
+upgrade_direction = default("/commandParams/upgrade_direction", None)
+
+# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
+# downgrade_from_version provides the source-version the downgrade is happening from
+downgrade_from_version = default("/commandParams/downgrade_from_version", None)
+
+# default kafka parameters
+kafka_home = '/usr/iop/current/kafka-broker'
+kafka_bin = kafka_home+'/bin/kafka'
+conf_dir = "/usr/iop/current/kafka-broker/config"
+
+kafka_user = config['configurations']['kafka-env']['kafka_user']
+kafka_log_dir = config['configurations']['kafka-env']['kafka_log_dir']
+kafka_pid_dir = status_params.kafka_pid_dir
+kafka_pid_file = kafka_pid_dir+"/kafka.pid"
+# This is hardcoded on the kafka bash process lifecycle on which we have no control over
+kafka_managed_pid_dir = "/var/run/kafka"
+kafka_managed_log_dir = "/var/log/kafka"
+hostname = config['hostname']
+user_group = config['configurations']['cluster-env']['user_group']
+java64_home = config['hostLevelParams']['java_home']
+kafka_env_sh_template = config['configurations']['kafka-env']['content']
+kafka_hosts = config['clusterHostInfo']['kafka_broker_hosts']
+kafka_hosts.sort()
+
+zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']
+zookeeper_hosts.sort()
+
+if (('kafka-log4j' in config['configurations']) and ('content' in config['configurations']['kafka-log4j'])):
+    log4j_props = config['configurations']['kafka-log4j']['content']
+else:
+    log4j_props = None
+
+kafka_metrics_reporters=""
+metric_collector_host = ""
+metric_collector_port = ""
+
+ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
+has_metric_collector = not len(ams_collector_hosts) == 0
+
+if has_metric_collector:
+  metric_collector_host = ams_collector_hosts[0]
+  metric_collector_port = default("/configurations/ams-site/timeline.metrics.service.webapp.address", "0.0.0.0:6188")
+  if metric_collector_port and metric_collector_port.find(':') != -1:
+    metric_collector_port = metric_collector_port.split(':')[1]
+
+  if not len(kafka_metrics_reporters) == 0:
+      kafka_metrics_reporters = kafka_metrics_reporters + ','
+
+  kafka_metrics_reporters = kafka_metrics_reporters + "org.apache.hadoop.metrics2.sink.kafka.KafkaTimelineMetricsReporter"
+
+
+# Security-related params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+kafka_kerberos_enabled = ('security.inter.broker.protocol' in config['configurations']['kafka-broker'] and
+                          config['configurations']['kafka-broker']['security.inter.broker.protocol'] == "SASL_PLAINTEXT")
+
+if security_enabled and iop_stack_version != "" and 'kafka_principal_name' in config['configurations']['kafka-env'] and compare_versions(iop_stack_version, '4.1') >= 0:
+    _hostname_lowercase = config['hostname'].lower()
+    _kafka_principal_name = config['configurations']['kafka-env']['kafka_principal_name']
+    kafka_jaas_principal = _kafka_principal_name.replace('_HOST',_hostname_lowercase)
+    kafka_keytab_path = config['configurations']['kafka-env']['kafka_keytab']
+    kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name)
+    kafka_kerberos_params = "-Djava.security.auth.login.config="+ conf_dir +"/kafka_jaas.conf"
+else:
+    kafka_kerberos_params = ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/service_check.py
new file mode 100755
index 0000000..6f5fe72
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/service_check.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.validate import call_and_match_output
+from resource_management.libraries.functions.format import format
+from resource_management.core.logger import Logger
+
+class ServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+
+    # TODO, Kafka was introduced in IOP 4.1 only tentatively supports running in a kerberized cluster
+    # Kafka uses its own Zookeeper instance and it does not yet have the capability of running in a secure mode.
+    kafka_config = self.read_kafka_config()
+
+    create_topic_cmd_created_output = "Created topic \"ambari_kafka_service_check\"."
+    create_topic_cmd_exists_output = "Topic \"ambari_kafka_service_check\" already exists."
+
+    source_cmd = format("source {conf_dir}/kafka-env.sh")
+    create_topic_cmd = format("{kafka_home}/bin/kafka-topics.sh --zookeeper {kafka_config[zookeeper.connect]} --create --topic ambari_kafka_service_check --partitions 1 --replication-factor 1")
+    command = source_cmd + " ; " + create_topic_cmd
+
+    Logger.info("Running kafka create topic command: %s" % command)
+    call_and_match_output(command, format("({create_topic_cmd_created_output})|({create_topic_cmd_exists_output})"), "Failed to check that topic exists")
+
+  def read_kafka_config(self):
+    import params
+
+    kafka_config = {}
+    with open(params.conf_dir+"/server.properties", "r") as conf_file:
+      for line in conf_file:
+        if line.startswith("#") or not line.strip():
+          continue
+
+        key,value = line.split("=")
+        kafka_config[key] = value.replace("\n","")
+
+    return kafka_config
+
+if __name__ == "__main__":
+    ServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/status_params.py
new file mode 100755
index 0000000..57bdf5e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/status_params.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from resource_management.libraries.functions import format
+from resource_management.libraries.script.script import Script
+
+config = Script.get_config()
+
+kafka_pid_dir = config['configurations']['kafka-env']['kafka_pid_dir']
+kafka_pid_file = format("{kafka_pid_dir}/kafka.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
new file mode 100755
index 0000000..16fc526
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/upgrade.py
@@ -0,0 +1,88 @@
+
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os
+
+from resource_management import *
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions import Direction
+from resource_management.core.exceptions import Fail
+
+def run_migration(env, upgrade_type):
+  """
+  If the acl migration script is present, then run it for either upgrade or downgrade.
+  That script was introduced in HDP 2.3.4.0 and requires stopping all Kafka brokers first.
+  Requires configs to be present.
+  :param env: Environment.
+  :param upgrade_type: "rolling" or "nonrolling
+  """
+  import params
+
+  if upgrade_type is None:
+    raise Fail('Parameter "upgrade_type" is missing.')
+
+  if params.upgrade_direction is None:
+    raise Fail('Parameter "upgrade_direction" is missing.')
+
+  if params.upgrade_direction == Direction.DOWNGRADE and params.downgrade_from_version is None:
+    raise Fail('Parameter "downgrade_from_version" is missing.')
+
+  if not params.security_enabled:
+    Logger.info("Skip running the Kafka ACL migration script since cluster security is not enabled.")
+    return
+
+  Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), params.upgrade_direction))
+
+  # If the schema upgrade script exists in the version upgrading to, then attempt to upgrade/downgrade it while still using the present bits.
+  kafka_acls_script = None
+  command_suffix = ""
+  if params.upgrade_direction == Direction.UPGRADE:
+    kafka_acls_script = format("/usr/hdp/{version}/kafka/bin/kafka-acls.sh")
+    command_suffix = "--upgradeAcls"
+  elif params.upgrade_direction == Direction.DOWNGRADE:
+    kafka_acls_script = format("/usr/hdp/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
+    command_suffix = "--downgradeAcls"
+
+  if kafka_acls_script is not None:
+    if os.path.exists(kafka_acls_script):
+      Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
+      if params.zookeeper_connect is None:
+        raise Fail("Could not retrieve property kafka-broker/zookeeper.connect")
+
+      acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer --authorizer-properties zookeeper.connect={1} {2}".\
+        format(kafka_acls_script, params.zookeeper_connect, command_suffix)
+
+      Execute(acls_command,
+              user=params.kafka_user,
+              logoutput=True)
+    else:
+      Logger.info("Did not find Kafka acls script: {0}".format(kafka_acls_script))
+
+
+def prestart(env, component):
+  import params
+
+  if params.version and compare_versions(format_stack_version(params.version), '4.1.0.0') >= 0:
+    conf_select.select(params.stack_name, "kafka", params.version)
+    stack_select.select(component, params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/utils.py
new file mode 100755
index 0000000..2f1fa5e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/scripts/utils.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import re
+
+def get_bare_principal(normalized_principal_name):
+    """
+    Given a normalized principal name (nimbus/c6501.ambari.apache.org@EXAMPLE.COM) returns just the
+    primary component (nimbus)
+    :param normalized_principal_name: a string containing the principal name to process
+    :return: a string containing the primary component value or None if not valid
+    """
+
+    bare_principal = None
+
+    if normalized_principal_name:
+        match = re.match(r"([^/@]+)(?:/[^@])?(?:@.*)?", normalized_principal_name)
+
+    if match:
+        bare_principal = match.group(1)
+
+    return bare_principal

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/templates/kafka_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/templates/kafka_jaas.conf.j2 b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/templates/kafka_jaas.conf.j2
new file mode 100755
index 0000000..56c558d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KAFKA/package/templates/kafka_jaas.conf.j2
@@ -0,0 +1,41 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+KafkaServer {
+   com.sun.security.auth.module.Krb5LoginModule required
+   useKeyTab=true
+   keyTab="{{kafka_keytab_path}}"
+   storeKey=true
+   useTicketCache=false
+   serviceName="{{kafka_bare_jaas_principal}}"
+   principal="{{kafka_jaas_principal}}";
+};
+KafkaClient {
+   com.sun.security.auth.module.Krb5LoginModule required
+   useTicketCache=true
+   renewTicket=true
+   serviceName="{{kafka_bare_jaas_principal}}";
+};
+Client {
+   com.sun.security.auth.module.Krb5LoginModule required
+   useKeyTab=true
+   keyTab="{{kafka_keytab_path}}"
+   storeKey=true
+   useTicketCache=false
+   serviceName="zookeeper"
+   principal="{{kafka_jaas_principal}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/kerberos-env.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/kerberos-env.xml
new file mode 100755
index 0000000..1a70ac0
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/kerberos-env.xml
@@ -0,0 +1,308 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration supports_final="false">
+  <property require-input="true">
+    <name>kdc_type</name>
+    <display-name>KDC type</display-name>
+    <value>mit-kdc</value>
+    <description>
+      The type of KDC being used. Either mit-kdc or active-directory
+    </description>
+    <value>mit-kdc</value>
+    <display-name>KDC type</display-name>
+    <value-attributes>
+      <type>componentHost</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>manage_identities</name>
+    <description>
+      Indicates whether the Ambari user and service Kerberos identities (principals and keytab files)
+      should be managed (created, deleted, updated, etc...) by Ambari or managed manually.
+    </description>
+    <value>true</value>
+    <display-name>Manage Kerberos Identities</display-name>
+    <value-attributes>
+      <visible>false</visible>
+      <overridable>false</overridable>
+      <type>boolean</type>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>install_packages</name>
+    <display-name>Install OS-specific Kerberos client package(s)</display-name>
+    <description>
+      Indicates whether Ambari should install the Kerberos client package(s) or not. If not, it is
+      expected that Kerberos utility programs (such as kadmin, kinit, klist, and kdestroy) are
+      compatible with MIT Kerberos 5 version 1.10.3 in command line options and behaviors.
+    </description>
+    <value>true</value>
+    <value-attributes>
+      <type>boolean</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property require-input="true">
+    <name>ldap_url</name>
+    <display-name>LDAP url</display-name>
+    <description>
+      The URL to the Active Directory LDAP Interface
+    </description>
+    <value/>
+    <value-attributes>
+      <type>host</type>
+      <visible>false</visible>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property require-input="true">
+    <name>container_dn</name>
+    <display-name>Container DN</display-name>
+    <description>
+      The distinguished name (DN) of the container used store service principals
+    </description>
+    <value-attributes>
+      <visible>false</visible>
+      <overridable>false</overridable>
+    </value-attributes>
+    <value/>
+  </property>
+
+  <property require-input="true">
+    <name>encryption_types</name>
+    <display-name>Encryption Types</display-name>
+    <description>
+      The supported list of session key encryption types that should be returned by the KDC.
+    </description>
+    <value>aes des3-cbc-sha1 rc4 des-cbc-md5</value>
+    <value-attributes>
+      <type>multiLine</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property require-input="true">
+    <name>realm</name>
+    <description>
+      The default realm to use when creating service principals
+    </description>
+    <display-name>Realm name</display-name>
+    <value/>
+    <value-attributes>
+      <type>host</type>
+      <editable-only-at-install>true</editable-only-at-install>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property require-input="true">
+    <name>kdc_hosts</name>
+    <description>
+      A comma-delimited list of IP addresses or FQDNs declaring the KDC hosts.
+      Optionally a port number may be included in each entry by separating each host and port by a
+      colon (:). Example:  kdc1.example.com:88, kdc2.example.com:88
+    </description>
+    <display-name>KDC hosts</display-name>
+    <value/>
+    <value-attributes>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>admin_server_host</name>
+    <display-name>Kadmin host</display-name>
+    <description>
+      The IP address or FQDN for the KDC Kerberos administrative host. Optionally a port number may be included.
+    </description>
+    <value/>
+    <value-attributes>
+      <type>host</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>executable_search_paths</name>
+    <display-name>Executable Search Paths</display-name>
+    <description>
+      A comma-delimited list of search paths to use to find Kerberos utilities like kadmin and kinit.
+    </description>
+    <value>/usr/bin, /usr/kerberos/bin, /usr/sbin, /usr/lib/mit/bin, /usr/lib/mit/sbin</value>
+    <value-attributes>
+      <overridable>false</overridable>
+      <type>multiLine</type>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>password_length</name>
+    <display-name>Password Length</display-name>
+    <description>
+      The length required length for generated passwords.
+    </description>
+    <value>20</value>
+    <value-attributes>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>password_min_lowercase_letters</name>
+    <display-name>Password Minimum # Lowercase Letters</display-name>
+    <description>
+      The minimum number of lowercase letters (a-z) required in generated passwords
+    </description>
+    <value>1</value>
+    <value-attributes>
+      <type>int</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>password_min_uppercase_letters</name>
+    <display-name>Password Minimum # Uppercase Letters</display-name>
+    <description>
+      The minimum number of uppercase letters (A-Z) required in generated passwords
+    </description>
+    <value>1</value>
+    <value-attributes>
+      <type>int</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>password_min_digits</name>
+    <display-name>Password Minimum # Digits</display-name>
+    <description>
+      The minimum number of digits (0-9) required in generated passwords
+    </description>
+    <value>1</value>
+    <value-attributes>
+      <type>int</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>password_min_punctuation</name>
+    <display-name>Password Minimum # Punctuation Characters</display-name>
+    <description>
+      The minimum number of punctuation characters (?.!$%^*()-_+=~) required in generated passwords
+    </description>
+    <value>1</value>
+    <value-attributes>
+      <type>int</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>password_min_whitespace</name>
+    <display-name>Password Minimum # Whitespace Characters</display-name>
+    <description>
+      The minimum number of whitespace characters required in generated passwords
+    </description>
+    <value>0</value>
+    <value-attributes>
+      <type>int</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>service_check_principal_name</name>
+    <display-name>Test Kerberos Principal</display-name>
+    <description>
+      The principal name to use when executing the Kerberos service check
+    </description>
+    <value>${cluster_name}-${short_date}</value>
+    <value-attributes>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>case_insensitive_username_rules</name>
+    <display-name>Enable case insensitive username rules</display-name>
+    <description>
+      Force principal names to resolve to lowercase local usernames in auth-to-local rules
+    </description>
+    <value>false</value>
+    <value-attributes>
+      <overridable>false</overridable>
+      <type>boolean</type>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>ad_create_attributes_template</name>
+    <display-name>Account Attribute Template</display-name>
+    <description>
+      A Velocity template to use to generate a JSON-formatted document containing the set of
+      attribute names and values needed to create a new Kerberos identity in the relevant
+      Active Directory.
+      Variables include:
+      principal_name, principal_primary, principal_instance, realm, realm_lowercase,
+      normalized_principal, principal digest, password, is_service, container_dn
+    </description>
+    <value>
+{
+  "objectClass": ["top", "person", "organizationalPerson", "user"],
+  "cn": "$principal_name",
+  #if( $is_service )
+  "servicePrincipalName": "$principal_name",
+  #end
+  "userPrincipalName": "$normalized_principal",
+  "unicodePwd": "$password",
+  "accountExpires": "0",
+  "userAccountControl": "66048"
+}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <empty-value-valid>true</empty-value-valid>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kdc_create_attributes</name>
+    <display-name>Principal Attributes</display-name>
+    <description>
+      The set of attributes to use when creating a new Kerberos identity in the relevant (MIT) KDC.
+    </description>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/krb5-conf.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/krb5-conf.xml
new file mode 100755
index 0000000..ce5d4b4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/configuration/krb5-conf.xml
@@ -0,0 +1,109 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <property require-input="false">
+    <name>domains</name>
+    <display-name>Domains</display-name>
+    <description>
+      A comma-separated list of domain names used to map server host names to the Realm name (e.g. .example.com,example.com). This is optional
+    </description>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>manage_krb5_conf</name>
+    <display-name>Manage Kerberos client krb5.conf</display-name>
+    <description>
+      Indicates whether your krb5.conf file should be managed by the wizard or should you manage it yourself
+    </description>
+    <value>true</value>
+    <value-attributes>
+      <overridable>false</overridable>
+      <type>boolean</type>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>conf_dir</name>
+    <display-name>krb5-conf directory path</display-name>
+    <description>The krb5.conf configuration directory</description>
+    <value>/etc</value>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>krb5-conf template</display-name>
+    <description>Customizable krb5.conf template (Jinja template engine)</description>
+    <value>
+[libdefaults]
+  renew_lifetime = 7d
+  forwardable = true
+  default_realm = {{realm}}
+  ticket_lifetime = 24h
+  dns_lookup_realm = false
+  dns_lookup_kdc = false
+  #default_tgs_enctypes = {{encryption_types}}
+  #default_tkt_enctypes = {{encryption_types}}
+
+{% if domains %}
+[domain_realm]
+{% for domain in domains.split(',') %}
+  {{domain}} = {{realm}}
+{% endfor %}
+{% endif %}
+
+[logging]
+  default = FILE:/var/log/krb5kdc.log
+  admin_server = FILE:/var/log/kadmind.log
+  kdc = FILE:/var/log/krb5kdc.log
+
+[realms]
+  {{realm}} = {
+{%- if kdc_hosts &gt; 0 -%}
+{%- set kdc_host_list = kdc_hosts.split(',')  -%}
+{%- if kdc_host_list and kdc_host_list|length &gt; 0 %}
+    admin_server = {{admin_server_host|default(kdc_host_list[0]|trim(), True)}}
+{%- if kdc_host_list -%}
+{% for kdc_host in kdc_host_list %}
+    kdc = {{kdc_host|trim()}}
+{%- endfor -%}
+{% endif %}
+{%- endif %}
+{%- endif %}
+  }
+
+{# Append additional realm declarations below #}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <overridable>false</overridable>
+    </value-attributes>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/kerberos.json b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/kerberos.json
new file mode 100755
index 0000000..6ab7610
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/kerberos.json
@@ -0,0 +1,17 @@
+{
+  "services": [
+    {
+      "name": "KERBEROS",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        }
+      ],
+      "components": [
+        {
+          "name": "KERBEROS_CLIENT"
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/metainfo.xml b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/metainfo.xml
new file mode 100755
index 0000000..a9b6ca2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/metainfo.xml
@@ -0,0 +1,147 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>KERBEROS</name>
+      <displayName>Kerberos</displayName>
+      <comment>A computer network authentication protocol which works on
+        the basis of 'tickets' to allow nodes communicating over a
+        non-secure network to prove their identity to one another in a
+        secure manner.
+      </comment>
+      <version>1.10.3</version>
+
+      <components>
+        <component>
+          <name>KERBEROS_CLIENT</name>
+          <displayName>Kerberos Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>ALL</cardinality>
+          <versionAdvertised>false</versionAdvertised>
+          <auto-deploy>
+            <enabled>true</enabled>
+          </auto-deploy>
+          <commandScript>
+            <script>scripts/kerberos_client.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>1200</timeout>
+          </commandScript>
+          <customCommands>
+            <customCommand>
+              <name>SET_KEYTAB</name>
+              <commandScript>
+                <script>scripts/kerberos_client.py</script>
+                <scriptType>PYTHON</scriptType>
+                <timeout>1000</timeout>
+              </commandScript>
+            </customCommand>
+            <customCommand>
+              <name>REMOVE_KEYTAB</name>
+              <commandScript>
+                <script>scripts/kerberos_client.py</script>
+                <scriptType>PYTHON</scriptType>
+                <timeout>1000</timeout>
+              </commandScript>
+            </customCommand>
+          </customCommands>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>krb5.conf</fileName>
+              <dictionaryName>krb5-conf</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,redhat6</osFamily>
+          <packages>
+            <package>
+              <name>krb5-server</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-libs</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-workstation</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+          </packages>
+        </osSpecific>
+
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14</osFamily>
+          <packages>
+            <package>
+              <name>krb5-kdc</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-admin-server</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-user</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-config</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+          </packages>
+        </osSpecific>
+
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>krb5</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-client</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+            <package>
+              <name>krb5-server</name>
+              <skipUpgrade>true</skipUpgrade>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <configuration-dependencies>
+        <config-type>krb5-conf</config-type>
+        <config-type>kerberos-env</config-type>
+      </configuration-dependencies>
+      <restartRequiredAfterChange>true</restartRequiredAfterChange>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_client.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_client.py
new file mode 100755
index 0000000..8e2fa93
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_client.py
@@ -0,0 +1,79 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from kerberos_common import *
+from resource_management.libraries.functions.security_commons import cached_kinit_executor
+
+class KerberosClient(KerberosScript):
+  def install(self, env):
+    install_packages = default('/configurations/kerberos-env/install_packages', "true")
+    if install_packages:
+      self.install_packages(env)
+    else:
+      print "Kerberos client packages are not being installed, manual installation is required."
+
+    self.configure(env)
+
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    if params.manage_krb5_conf:
+      self.write_krb5_conf()
+    #delete krb cache to prevent using old krb tickets on fresh kerberos setup
+    self.clear_tmp_cache()
+
+    #self.setup_jce()
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+  def security_status(self, env):
+    import status_params
+    if status_params.security_enabled:
+      if status_params.smoke_user and status_params.smoke_user_keytab:
+        try:
+          cached_kinit_executor(status_params.kinit_path_local,
+                                status_params.smoke_user,
+                                status_params.smoke_user_keytab,
+                                status_params.smoke_user_principal,
+                                status_params.hostname,
+                                status_params.tmp_dir)
+          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
+        except Exception as e:
+          self.put_structured_out({"securityState": "ERROR"})
+          self.put_structured_out({"securityStateErrorInfo": str(e)})
+      else:
+        self.put_structured_out({"securityState": "UNKNOWN"})
+        self.put_structured_out({"securityStateErrorInfo": "Missing smoke user credentials"})
+    else:
+      self.put_structured_out({"securityState": "UNSECURED"})
+
+  def set_keytab(self, env):
+    self.write_keytab_file()
+
+  def remove_keytab(self, env):
+    self.delete_keytab_file()
+
+  #def download_install_jce(self, env):
+  #  self.setup_jce()
+
+
+if __name__ == "__main__":
+  KerberosClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_common.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_common.py
new file mode 100755
index 0000000..a05aead
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_common.py
@@ -0,0 +1,473 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import base64
+import os
+import string
+import subprocess
+import sys
+import tempfile
+from tempfile import gettempdir
+
+from resource_management import *
+from utils import get_property_value
+from ambari_commons.os_utils import remove_file
+from ambari_agent import Constants
+
+class KerberosScript(Script):
+  KRB5_REALM_PROPERTIES = [
+    'kdc',
+    'admin_server',
+    'default_domain',
+    'master_kdc'
+  ]
+
+  KRB5_SECTION_NAMES = [
+    'libdefaults',
+    'logging',
+    'realms',
+    'domain_realm',
+    'capaths',
+    'ca_paths',
+    'appdefaults',
+    'plugins'
+  ]
+
+  @staticmethod
+  def create_random_password():
+    import random
+
+    chars = string.digits + string.ascii_letters
+    return ''.join(random.choice(chars) for x in range(13))
+
+  @staticmethod
+  def write_conf_section(output_file, section_name, section_data):
+    if section_name is not None:
+      output_file.write('[%s]\n' % section_name)
+
+      if section_data is not None:
+        for key, value in section_data.iteritems():
+          output_file.write(" %s = %s\n" % (key, value))
+
+
+  @staticmethod
+  def _write_conf_realm(output_file, realm_name, realm_data):
+    """ Writes out realm details
+
+    Example:
+
+     EXAMPLE.COM = {
+      kdc = kerberos.example.com
+      admin_server = kerberos.example.com
+     }
+
+    """
+    if realm_name is not None:
+      output_file.write(" %s = {\n" % realm_name)
+
+      if realm_data is not None:
+        for key, value in realm_data.iteritems():
+          if key in KerberosScript.KRB5_REALM_PROPERTIES:
+            output_file.write("  %s = %s\n" % (key, value))
+
+      output_file.write(" }\n")
+
+  @staticmethod
+  def write_conf_realms_section(output_file, section_name, realms_data):
+    if section_name is not None:
+      output_file.write('[%s]\n' % section_name)
+
+      if realms_data is not None:
+        for realm, realm_data in realms_data.iteritems():
+          KerberosScript._write_conf_realm(output_file, realm, realm_data)
+          output_file.write('\n')
+
+  @staticmethod
+  def write_krb5_conf():
+    import params
+
+    Directory(params.krb5_conf_dir,
+              owner='root',
+              create_parents=True,
+              group='root',
+              mode=0755
+    )
+
+    if (params.krb5_conf_template is None) or not params.krb5_conf_template.strip():
+      content = Template('krb5_conf.j2')
+    else:
+      content = InlineTemplate(params.krb5_conf_template)
+
+    File(params.krb5_conf_path,
+         content=content,
+         owner='root',
+         group='root',
+         mode=0644
+    )
+
+  @staticmethod
+  def invoke_kadmin(query, admin_identity=None, default_realm=None):
+    """
+    Executes the kadmin or kadmin.local command (depending on whether auth_identity is set or not
+    and returns command result code and standard out data.
+
+    :param query: the kadmin query to execute
+    :param admin_identity: the identity for the administrative user (optional)
+    :param default_realm: the default realm to assume
+    :return: return_code, out
+    """
+    if (query is not None) and (len(query) > 0):
+      auth_principal = None
+      auth_keytab_file = None
+
+      if admin_identity is not None:
+        auth_principal = get_property_value(admin_identity, 'principal')
+
+      if auth_principal is None:
+        kadmin = 'kadmin.local'
+        credential = ''
+      else:
+        kadmin = 'kadmin -p "%s"' % auth_principal
+
+        auth_password = get_property_value(admin_identity, 'password')
+
+        if auth_password is None:
+          auth_keytab = get_property_value(admin_identity, 'keytab')
+
+          if auth_keytab is not None:
+            (fd, auth_keytab_file) = tempfile.mkstemp()
+            os.write(fd, base64.b64decode(auth_keytab))
+            os.close(fd)
+
+          credential = '-k -t %s' % auth_keytab_file
+        else:
+          credential = '-w "%s"' % auth_password
+
+      if (default_realm is not None) and (len(default_realm) > 0):
+        realm = '-r %s' % default_realm
+      else:
+        realm = ''
+
+      try:
+        command = '%s %s %s -q "%s"' % (kadmin, credential, realm, query.replace('"', '\\"'))
+        return shell.checked_call(command)
+      except:
+        raise
+      finally:
+        if auth_keytab_file is not None:
+          os.remove(auth_keytab_file)
+
+  @staticmethod
+  def create_keytab_file(principal, path, auth_identity=None):
+    success = False
+
+    if (principal is not None) and (len(principal) > 0):
+      if (auth_identity is None) or (len(auth_identity) == 0):
+        norandkey = '-norandkey'
+      else:
+        norandkey = ''
+
+      if (path is not None) and (len(path) > 0):
+        keytab_file = '-k %s' % path
+      else:
+        keytab_file = ''
+
+      try:
+        result_code, output = KerberosScript.invoke_kadmin(
+          'ktadd %s %s %s' % (keytab_file, norandkey, principal),
+          auth_identity)
+
+        success = (result_code == 0)
+      except:
+        raise Fail("Failed to create keytab for principal: %s (in %s)" % (principal, path))
+
+    return success
+
+  @staticmethod
+  def create_keytab(principal, auth_identity=None):
+    keytab = None
+
+    (fd, temp_path) = tempfile.mkstemp()
+    os.remove(temp_path)
+
+    try:
+      if KerberosScript.create_keytab_file(principal, temp_path, auth_identity):
+        with open(temp_path, 'r') as f:
+          keytab = base64.b64encode(f.read())
+    finally:
+      if os.path.isfile(temp_path):
+        os.remove(temp_path)
+
+    return keytab
+
+  @staticmethod
+  def principal_exists(identity, auth_identity=None):
+    exists = False
+
+    if identity is not None:
+      principal = get_property_value(identity, 'principal')
+
+      if (principal is not None) and (len(principal) > 0):
+        try:
+          result_code, output = KerberosScript.invoke_kadmin('getprinc %s' % principal,
+                                                             auth_identity)
+          exists = (output is not None) and (("Principal: %s" % principal) in output)
+        except:
+          raise Fail("Failed to determine if principal exists: %s" % principal)
+
+    return exists
+
+  @staticmethod
+  def change_principal_password(identity, auth_identity=None):
+    success = False
+
+    if identity is not None:
+      principal = get_property_value(identity, 'principal')
+
+      if (principal is not None) and (len(principal) > 0):
+        password = get_property_value(identity, 'password')
+
+        if password is None:
+          credentials = '-randkey'
+        else:
+          credentials = '-pw "%s"' % password
+
+        try:
+          result_code, output = KerberosScript.invoke_kadmin(
+            'change_password %s %s' % (credentials, principal),
+            auth_identity)
+
+          success = (result_code == 0)
+        except:
+          raise Fail("Failed to create principal: %s" % principal)
+
+    return success
+
+  @staticmethod
+  def create_principal(identity, auth_identity=None):
+    success = False
+
+    if identity is not None:
+      principal = get_property_value(identity, 'principal')
+
+      if (principal is not None) and (len(principal) > 0):
+        password = get_property_value(identity, 'password')
+
+        if password is None:
+          credentials = '-randkey'
+        else:
+          credentials = '-pw "%s"' % password
+
+        try:
+          result_code, out = KerberosScript.invoke_kadmin(
+            'addprinc %s %s' % (credentials, principal),
+            auth_identity)
+
+          success = (result_code == 0)
+        except:
+          raise Fail("Failed to create principal: %s" % principal)
+
+    return success
+
+  @staticmethod
+  def clear_tmp_cache():
+    tmp_dir = Constants.AGENT_TMP_DIR
+    if tmp_dir is None:
+      tmp_dir = gettempdir()
+    curl_krb_cache_path = os.path.join(tmp_dir, "curl_krb_cache")
+    Directory(curl_krb_cache_path, action="delete")
+
+  @staticmethod
+  def create_principals(identities, auth_identity=None):
+    if identities is not None:
+      for identity in identities:
+        KerberosScript.create_principal(identity, auth_identity)
+
+  @staticmethod
+  def create_or_update_administrator_identity():
+    import params
+
+    if params.realm is not None:
+      admin_identity = params.get_property_value(params.realm, 'admin_identity')
+
+      if KerberosScript.principal_exists(admin_identity):
+        KerberosScript.change_principal_password(admin_identity)
+      else:
+        KerberosScript.create_principal(admin_identity)
+
+  @staticmethod
+  def test_kinit(identity, user=None):
+    principal = get_property_value(identity, 'principal')
+    kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+    kdestroy_path_local = functions.get_kdestroy_path(default('/configurations/kerberos-env/executable_search_paths', None))
+
+    if principal is not None:
+      keytab_file = get_property_value(identity, 'keytab_file')
+      keytab = get_property_value(identity, 'keytab')
+      password = get_property_value(identity, 'password')
+
+      # If a test keytab file is available, simply use it
+      if (keytab_file is not None) and (os.path.isfile(keytab_file)):
+        command = '%s -k -t %s %s' % (kinit_path_local, keytab_file, principal)
+        Execute(command,
+          user = user,
+        )
+        return shell.checked_call(kdestroy_path_local)
+
+      # If base64-encoded test keytab data is available; then decode it, write it to a temporary file
+      # use it, and then remove the temporary file
+      elif keytab is not None:
+        (fd, test_keytab_file) = tempfile.mkstemp()
+        os.write(fd, base64.b64decode(keytab))
+        os.close(fd)
+
+        try:
+          command = '%s -k -t %s %s' % (kinit_path_local, test_keytab_file, principal)
+          Execute(command,
+            user = user,
+          )
+          return shell.checked_call(kdestroy_path_local)
+        except:
+          raise
+        finally:
+          if test_keytab_file is not None:
+            os.remove(test_keytab_file)
+
+      # If no keytab data is available and a password was supplied, simply use it.
+      elif password is not None:
+        process = subprocess.Popen([kinit_path_local, principal], stdin=subprocess.PIPE)
+        stdout, stderr = process.communicate(password)
+        if process.returncode:
+          err_msg = Logger.filter_text("Execution of kinit returned %d. %s" % (process.returncode, stderr))
+          raise Fail(err_msg)
+        else:
+          return shell.checked_call(kdestroy_path_local)
+      else:
+        return 0, ''
+    else:
+      return 0, ''
+
+
+  def write_keytab_file(self):
+    import params
+    import stat
+
+    if params.kerberos_command_params is not None:
+      for item  in params.kerberos_command_params:
+        keytab_content_base64 = get_property_value(item, 'keytab_content_base64')
+        if (keytab_content_base64 is not None) and (len(keytab_content_base64) > 0):
+          keytab_file_path = get_property_value(item, 'keytab_file_path')
+          if (keytab_file_path is not None) and (len(keytab_file_path) > 0):
+            head, tail = os.path.split(keytab_file_path)
+            if head:
+              Directory(head, create_parents=True, mode=0755, owner="root", group="root")
+
+            owner = get_property_value(item, 'keytab_file_owner_name')
+            owner_access = get_property_value(item, 'keytab_file_owner_access')
+            group = get_property_value(item, 'keytab_file_group_name')
+            group_access = get_property_value(item, 'keytab_file_group_access')
+            mode = 0
+
+            if owner_access == 'rw':
+              mode |= stat.S_IREAD | stat.S_IWRITE
+            else:
+              mode |= stat.S_IREAD
+
+            if group_access == 'rw':
+              mode |= stat.S_IRGRP | stat.S_IWGRP
+            elif group_access == 'r':
+              mode |= stat.S_IRGRP
+
+            keytab_content = base64.b64decode(keytab_content_base64)
+
+            # to hide content in command output
+            def make_lambda(data):
+              return lambda: data
+
+            File(keytab_file_path,
+                 content=make_lambda(keytab_content),
+                 mode=mode,
+                 owner=owner,
+                 group=group)
+
+            principal = get_property_value(item, 'principal')
+            if principal is not None:
+              curr_content = Script.structuredOut
+
+              if "keytabs" not in curr_content:
+                curr_content['keytabs'] = {}
+
+              curr_content['keytabs'][principal.replace("_HOST", params.hostname)] = keytab_file_path
+
+              self.put_structured_out(curr_content)
+
+  def delete_keytab_file(self):
+    import params
+
+    if params.kerberos_command_params is not None:
+      for item in params.kerberos_command_params:
+        keytab_file_path = get_property_value(item, 'keytab_file_path')
+        if (keytab_file_path is not None) and (len(keytab_file_path) > 0):
+
+          # Delete the keytab file
+          File(keytab_file_path, action="delete")
+
+          principal = get_property_value(item, 'principal')
+          if principal is not None:
+            curr_content = Script.structuredOut
+
+            if "keytabs" not in curr_content:
+              curr_content['keytabs'] = {}
+
+            curr_content['keytabs'][principal.replace("_HOST", params.hostname)] = '_REMOVED_'
+
+            self.put_structured_out(curr_content)
+
+  def setup_jce(self):
+    import params
+
+    if not params.jdk_name:
+      return
+    jce_curl_target = None
+    if params.jce_policy_zip is not None:
+      jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+      Directory(params.artifact_dir,
+                create_parents = True,
+                )
+      File(jce_curl_target,
+           content = DownloadSource(format("{jce_location}/{jce_policy_zip}")),
+           )
+    elif params.security_enabled:
+      # Something weird is happening
+      raise Fail("Security is enabled, but JCE policy zip is not specified.")
+
+    # The extraction will occur only after the security flag is set
+    if params.security_enabled:
+      security_dir = format("{java_home}/jre/lib/security")
+
+      File([format("{security_dir}/US_export_policy.jar"), format("{security_dir}/local_policy.jar")],
+           action = "delete",
+           )
+
+      extract_cmd = ("unzip", "-o", "-j", "-q", jce_curl_target, "-d", security_dir)
+      Execute(extract_cmd,
+              only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
+              path = ['/bin/','/usr/bin'],
+              sudo = True
+      )

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_server.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_server.py
new file mode 100755
index 0000000..b98b265
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/kerberos_server.py
@@ -0,0 +1,141 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from kerberos_common import *
+from ambari_commons.os_check import OSCheck
+
+class KerberosServer(KerberosScript):
+  @staticmethod
+  def write_kadm5_acl():
+    import params
+
+    Directory(params.kadm5_acl_dir,
+              owner='root',
+              create_parents=True,
+              group='root',
+              mode=0700
+    )
+
+    if (params.kadm5_acl_template is None) or not params.kadm5_acl_template.strip():
+      content = Template('kadm5_acl.j2')
+    else:
+      content = InlineTemplate(params.kadm5_acl_template)
+
+    File(params.kadm5_acl_path,
+         content=content,
+         owner='root',
+         group='root',
+         mode=0600
+    )
+
+  @staticmethod
+  def write_kdc_conf():
+    import params
+
+    Directory(params.kdc_conf_dir,
+              owner='root',
+              create_parents=True,
+              group='root',
+              mode=0700
+    )
+
+    if (params.kdc_conf_template is None) or not params.kdc_conf_template.strip():
+      content = Template('kdc_conf.j2')
+    else:
+      content = InlineTemplate(params.kdc_conf_template)
+
+    File(params.kdc_conf_path,
+         content=content,
+         owner='root',
+         group='root',
+         mode=0600
+    )
+
+  def install(self, env):
+    import params
+
+    self.install_packages(env)
+    self.configure(env)
+
+    # Create the Kerberos database (only on install, for now)
+    Execute(
+      "%s create -s -P '%s'" % (params.kdb5_util_path, KerberosScript.create_random_password()))
+
+    # Create or update the administrator account
+    KerberosScript.create_or_update_administrator_identity()
+
+
+  def start(self, env):
+    # Attempt to reconfigure the service before starting
+    self.configure(env)
+
+    # Create or update the administrator account
+    KerberosScript.create_or_update_administrator_identity()
+
+    if OSCheck.is_suse_family():
+      Execute('rckadmind start')
+      Execute('rckrb5kdc start')
+    elif OSCheck.is_ubuntu_family():
+      Execute('service krb5-kdc start')
+      Execute('service krb5-admin-server start')
+    else:
+      Execute('service krb5kdc start')
+      Execute('service kadmin start')
+
+  def stop(self, env):
+    if OSCheck.is_suse_family():
+      Execute('rckadmind stop')
+      Execute('rckrb5kdc stop')
+    elif OSCheck.is_ubuntu_family():
+      Execute('service krb5-kdc stop')
+      Execute('service krb5-admin-server stop')
+    else:
+      Execute('service krb5kdc stop')
+      Execute('service kadmin stop')
+
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    KerberosServer.write_krb5_conf()
+    KerberosServer.write_kdc_conf()
+    KerberosServer.write_kadm5_acl()
+
+  def status(self, env):
+    import params
+
+    if OSCheck.is_suse_family():
+      try:
+        Execute('checkproc `which krb5kdc`')
+        Execute('checkproc `which kadmind`')
+      except Fail as ex:
+        raise ComponentIsNotRunning()
+
+    elif OSCheck.is_ubuntu_family():
+      check_process_status(params.kdamin_pid_path)
+      check_process_status(params.krb5kdc_pid_path)
+
+    else:
+      check_process_status(params.kdamin_pid_path)
+      check_process_status(params.krb5kdc_pid_path)
+
+
+if __name__ == "__main__":
+  KerberosServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/params.py
new file mode 100755
index 0000000..03f208e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/params.py
@@ -0,0 +1,200 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from utils import get_property_value, get_unstructured_data
+from ambari_commons.os_check import OSCheck
+
+krb5_conf_dir = '/etc'
+krb5_conf_file = 'krb5.conf'
+krb5_conf_path = krb5_conf_dir + '/' + krb5_conf_file
+
+if OSCheck.is_suse_family():
+  kdc_conf_dir = '/var/lib/kerberos/krb5kdc'
+elif OSCheck.is_ubuntu_family():
+  kdc_conf_dir = '/etc/krb5kdc'
+else:
+  kdc_conf_dir = '/var/kerberos/krb5kdc'
+kdc_conf_file = 'kdc.conf'
+kdc_conf_path = kdc_conf_dir + '/' + kdc_conf_file
+
+kadm5_acl_dir = kdc_conf_dir  # Typically kadm5.acl and kdc.conf exist in the same directory
+kadm5_acl_file = 'kadm5.acl'
+kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+configurations = None
+keytab_details = None
+default_group = None
+kdc_server_host = None
+cluster_host_info = None
+
+hostname = config['hostname']
+
+kdb5_util_path = 'kdb5_util'
+
+kdamin_pid_path = '/var/run/kadmind.pid'
+krb5kdc_pid_path = '/var/run/krb5kdc.pid'
+
+smoke_test_principal = None
+smoke_test_keytab_file = None
+
+smoke_user = 'ambari-qa'
+
+manage_identities = 'true'
+
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_name = default("/hostLevelParams/jdk_name", None)
+java_home = config['hostLevelParams']['java_home']
+java_version = int(config['hostLevelParams']['java_version'])
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+if config is not None:
+  kerberos_command_params = get_property_value(config, 'kerberosCommandParams')
+
+  cluster_host_info = get_property_value(config, 'clusterHostInfo')
+  if cluster_host_info is not None:
+    kdc_server_hosts = get_property_value(cluster_host_info, 'kdc_server_hosts')
+
+    if (kdc_server_hosts is not None) and (len(kdc_server_hosts) > 0):
+      kdc_server_host = kdc_server_hosts[0]
+
+  configurations = get_property_value(config, 'configurations')
+  if configurations is not None:
+    cluster_env = get_property_value(configurations, 'cluster-env')
+
+    if cluster_env is not None:
+      smoke_test_principal = get_property_value(cluster_env, 'smokeuser_principal_name', None, True, None)
+      smoke_test_keytab_file = get_property_value(cluster_env, 'smokeuser_keytab', None, True, None)
+      smoke_user = get_property_value(cluster_env, 'smokeuser', smoke_user, True, smoke_user)
+
+      default_group = get_property_value(cluster_env, 'user_group')
+
+      if default_group is None:
+        default_group = get_property_value(cluster_env, 'user-group')
+
+    # ##############################################################################################
+    # Get krb5.conf template data
+    # ##############################################################################################
+    realm = 'EXAMPLE.COM'
+    domains = ''
+    kdc_hosts = 'localhost'
+    admin_server_host = None
+    admin_principal = None
+    admin_password = None
+    admin_keytab = None
+    test_principal = None
+    test_password = None
+    test_keytab = None
+    test_keytab_file = None
+    encryption_types = None
+    manage_krb5_conf = "true"
+    krb5_conf_template = None
+
+    krb5_conf_data = get_property_value(configurations, 'krb5-conf')
+
+    kerberos_env = get_property_value(configurations, "kerberos-env")
+
+    if kerberos_env is not None:
+      manage_identities = get_property_value(kerberos_env, "manage_identities", "true", True, "true")
+      encryption_types = get_property_value(kerberos_env, "encryption_types", None, True, None)
+      realm = get_property_value(kerberos_env, "realm", None, True, None)
+      kdc_hosts = get_property_value(kerberos_env, 'kdc_hosts', kdc_hosts)
+      admin_server_host = get_property_value(kerberos_env, 'admin_server_host', admin_server_host)
+
+    if krb5_conf_data is not None:
+      realm = get_property_value(krb5_conf_data, 'realm', realm)
+      domains = get_property_value(krb5_conf_data, 'domains', domains)
+
+      admin_principal = get_property_value(krb5_conf_data, 'admin_principal', admin_principal, True, None)
+      admin_password = get_property_value(krb5_conf_data, 'admin_password', admin_password, True, None)
+      admin_keytab = get_property_value(krb5_conf_data, 'admin_keytab', admin_keytab, True, None)
+
+      test_principal = get_property_value(krb5_conf_data, 'test_principal', test_principal, True, None)
+      test_password = get_property_value(krb5_conf_data, 'test_password', test_password, True, None)
+      test_keytab = get_property_value(krb5_conf_data, 'test_keytab', test_keytab, True, None)
+      test_keytab_file = get_property_value(krb5_conf_data, 'test_keytab_file', test_keytab_file, True, None)
+
+      krb5_conf_template = get_property_value(krb5_conf_data, 'content', krb5_conf_template)
+      krb5_conf_dir = get_property_value(krb5_conf_data, 'conf_dir', krb5_conf_dir)
+      krb5_conf_file = get_property_value(krb5_conf_data, 'conf_file', krb5_conf_file)
+      krb5_conf_path = krb5_conf_dir + '/' + krb5_conf_file
+
+      manage_krb5_conf = get_property_value(krb5_conf_data, 'manage_krb5_conf', "true")
+
+    # For backward compatibility, ensure that kdc_host exists. This may be needed if the krb5.conf
+    # template in krb5-conf/content had not be updated during the Ambari upgrade to 2.4.0 - which
+    # will happen if the template was altered from its stack-default value.
+    kdc_host_parts = kdc_hosts.split(',')
+    if kdc_host_parts:
+      kdc_host = kdc_host_parts[0]
+    else:
+      kdc_host = kdc_hosts
+
+    # ##############################################################################################
+    # Get kdc.conf template data
+    # ##############################################################################################
+    kdcdefaults_kdc_ports = "88"
+    kdcdefaults_kdc_tcp_ports = "88"
+
+    kdc_conf_template = None
+
+    kdc_conf_data = get_property_value(configurations, 'kdc-conf')
+
+    if kdc_conf_data is not None:
+      kdcdefaults_kdc_ports = get_property_value(kdc_conf_data, 'kdcdefaults_kdc_ports', kdcdefaults_kdc_ports)
+      kdcdefaults_kdc_tcp_ports = get_property_value(kdc_conf_data, 'kdcdefaults_kdc_tcp_ports', kdcdefaults_kdc_tcp_ports)
+
+      kdc_conf_template = get_property_value(kdc_conf_data, 'content', kdc_conf_template)
+      kdc_conf_dir = get_property_value(kdc_conf_data, 'conf_dir', kdc_conf_dir)
+      kdc_conf_file = get_property_value(kdc_conf_data, 'conf_file', kdc_conf_file)
+      kdc_conf_path = kdc_conf_dir + '/' + kdc_conf_file
+
+    # ##############################################################################################
+    # Get kadm5.acl template data
+    # ##############################################################################################
+    kdcdefaults_kdc_ports = '88'
+    kdcdefaults_kdc_tcp_ports = '88'
+
+    kadm5_acl_template = None
+
+    kadm5_acl_data = get_property_value(configurations, 'kadm5-acl')
+
+    if kadm5_acl_data is not None:
+      kadm5_acl_template = get_property_value(kadm5_acl_data, 'content', kadm5_acl_template)
+      kadm5_acl_dir = get_property_value(kadm5_acl_data, 'conf_dir', kadm5_acl_dir)
+      kadm5_acl_file = get_property_value(kadm5_acl_data, 'conf_file', kadm5_acl_file)
+      kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file
+
+  # ################################################################################################
+  # Get commandParams
+  # ################################################################################################
+  command_params = get_property_value(config, 'commandParams')
+  if command_params is not None:
+    keytab_details = get_unstructured_data(command_params, 'keytab')
+
+    if manage_identities:
+      smoke_test_principal = get_property_value(command_params, 'principal_name', smoke_test_principal)
+      smoke_test_keytab_file = get_property_value(command_params, 'keytab_file', smoke_test_keytab_file)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/service_check.py
new file mode 100755
index 0000000..7c09171
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/service_check.py
@@ -0,0 +1,81 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from kerberos_common import *
+from resource_management import *
+
+# hashlib is supplied as of Python 2.5 as the replacement interface for md5
+# and other secure hashes.  In 2.6, md5 is deprecated.  Import hashlib if
+# available, avoiding a deprecation warning under 2.6.  Import md5 otherwise,
+# preserving 2.4 compatibility.
+try:
+  import hashlib
+  _md5 = hashlib.md5
+except ImportError:
+  import md5
+  _md5 = md5.new
+
+class KerberosServiceCheck(KerberosScript):
+  def service_check(self, env):
+    import params
+
+    # If Ambari IS managing Kerberos identities (kerberos-env/manage_identities = true), it is
+    # expected that a (smoke) test principal and its associated keytab file is available for use
+    # **  If not available, this service check will fail
+    # **  If available, this service check will execute
+    #
+    # If Ambari IS NOT managing Kerberos identities (kerberos-env/manage_identities = false), the
+    # smoke test principal and its associated keytab file may not be available
+    # **  If not available, this service check will execute
+    # **  If available, this service check will execute
+
+    if ((params.smoke_test_principal is not None) and
+          (params.smoke_test_keytab_file is not None) and
+          os.path.isfile(params.smoke_test_keytab_file)):
+      print "Performing kinit using %s" % params.smoke_test_principal
+
+      ccache_file_name = _md5("{0}|{1}".format(params.smoke_test_principal,params.smoke_test_keytab_file)).hexdigest()
+      ccache_file_path = "{0}{1}kerberos_service_check_cc_{2}".format(params.tmp_dir, os.sep, ccache_file_name)
+
+      kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+      kinit_command = "{0} -c {1} -kt {2} {3}".format(kinit_path_local, ccache_file_path, params.smoke_test_keytab_file, params.smoke_test_principal)
+
+      try:
+        # kinit
+        Execute(kinit_command,
+                user=params.smoke_user
+        )
+      finally:
+        File(ccache_file_path, # Since kinit might fail to write to the cache file for various reasons, an existence check should be done before cleanup
+             action = "delete",
+        )
+    elif params.manage_identities:
+      err_msg = Logger.filter_text("Failed to execute kinit test due to principal or keytab not found or available")
+      raise Fail(err_msg)
+    else:
+      # Ambari is not managing identities so if the smoke user does not exist, indicate why....
+      print "Skipping this service check since Ambari is not managing Kerberos identities and the smoke user " \
+            "credentials are not available. To execute this service check, the smoke user principal name " \
+            "and keytab file location must be set in the cluster_env and the smoke user's keytab file must" \
+            "exist in the configured location."
+
+if __name__ == "__main__":
+  KerberosServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/status_params.py
new file mode 100755
index 0000000..bbae4a3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/status_params.py
@@ -0,0 +1,32 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+hostname = config['hostname']
+kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+smoke_user = config['configurations']['cluster-env']['smokeuser']
+smoke_user_principal = config['configurations']['cluster-env']['smokeuser_principal_name']

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/utils.py
new file mode 100755
index 0000000..199e6d7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/scripts/utils.py
@@ -0,0 +1,105 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+def get_property_value(dictionary, property_name, default_value=None, trim_string=False,
+                       empty_value=""):
+  """
+  Get a property value from a dictionary, applying applying rules as necessary.
+
+  If dictionary does not contain a value for property_name or the value for property_name is None,
+  null_value is used as the value to return.  Then, if trim_string is True and the value is None
+  or the value is an empty string, empty_value will be return else the (current) value is returned.
+
+  Note: the property value will most likely be a string or a unicode string, however in the event
+  it is not (for example a number), this method will behave properly and return the value as is.
+
+  :param dictionary: a dictionary of values
+  :param property_name: the name of a dictionary item to retrieve
+  :param default_value: the value to use if the item is not in the dictionary or the value of the item is None
+  :param trim_string: a Boolean value indicating whether to strip whitespace from the value (True) or not (False)
+  :param empty_value: the value to use if the (current) value is None or an empty string, if trim_string is True
+  :return: the requested property value with rules applied
+  """
+  # If property_name is not in the dictionary, set value to null_value
+  if property_name in dictionary:
+    value = dictionary[property_name]
+    if value is None:
+      value = default_value
+  else:
+    value = default_value
+
+  if trim_string:
+    # If the value is none, consider it empty...
+    if value is None:
+      value = empty_value
+    elif (type(value) == str) or (type(value) == unicode):
+      value = value.strip()
+
+      if len(value) == 0:
+        value = empty_value
+
+  return value
+
+def get_unstructured_data(dictionary, property_name):
+  prefix = property_name + '/'
+  prefix_len = len(prefix)
+  return dict((k[prefix_len:], v) for k, v in dictionary.iteritems() if k.startswith(prefix))
+
+def split_host_and_port(host):
+  """
+  Splits a string into its host and port components
+
+  :param host: a string matching the following pattern: <host name | ip address>[:port]
+  :return: a Dictionary containing 'host' and 'port' entries for the input value
+  """
+
+  if host is None:
+    host_and_port = None
+  else:
+    host_and_port = {}
+    parts = host.split(":")
+
+    if parts is not None:
+      length = len(parts)
+
+      if length > 0:
+        host_and_port['host'] = parts[0]
+
+        if length > 1:
+          host_and_port['port'] = int(parts[1])
+
+  return host_and_port
+
+def set_port(host, port):
+  """
+  Sets the port for a host specification, potentially replacing an existing port declaration
+
+  :param host: a string matching the following pattern: <host name | ip address>[:port]
+  :param port: a string or integer declaring the (new) port
+  :return: a string declaring the new host/port specification
+  """
+  if port is None:
+    return host
+  else:
+    host_and_port = split_host_and_port(host)
+
+    if (host_and_port is not None) and ('host' in host_and_port):
+      return "%s:%s" % (host_and_port['host'], port)
+    else:
+      return host

http://git-wip-us.apache.org/repos/asf/ambari/blob/1863c3b9/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/templates/kadm5_acl.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/templates/kadm5_acl.j2 b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/templates/kadm5_acl.j2
new file mode 100755
index 0000000..d82ae23
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/services/KERBEROS/package/templates/kadm5_acl.j2
@@ -0,0 +1,20 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+*/admin@{{realm}}	*
+
+{# Append additional realm declarations should be placed below #}


Mime
View raw message