ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rle...@apache.org
Subject ambari git commit: AMBARI-9580. Set kdc_type in kerberos-env rather than krb5-conf configuration (rlevas)
Date Thu, 12 Feb 2015 00:21:27 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 52250d5e1 -> b9c9832cd


AMBARI-9580. Set kdc_type in kerberos-env rather than krb5-conf configuration (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b9c9832c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b9c9832c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b9c9832c

Branch: refs/heads/trunk
Commit: b9c9832cd71a621e63b90002c0e04bd282dec5dc
Parents: 52250d5
Author: Robert Levas <rlevas@hortonworks.com>
Authored: Wed Feb 11 19:21:15 2015 -0500
Committer: Robert Levas <rlevas@hortonworks.com>
Committed: Wed Feb 11 19:21:15 2015 -0500

----------------------------------------------------------------------
 .../server/controller/KerberosHelper.java       | 22 ++++++++++----------
 .../1.10.3-10/configuration/kerberos-env.xml    |  8 +++++++
 .../1.10.3-10/configuration/krb5-conf.xml       |  7 -------
 .../KERBEROS/configuration/krb5-conf.xml        |  7 -------
 .../server/controller/KerberosHelperTest.java   | 20 ++++--------------
 .../python/stacks/2.2/KERBEROS/use_cases.py     | 16 ++++++++++----
 .../journalnode-upgrade-hdfs-secure.json        |  8 +++----
 .../stacks/2.2/configs/journalnode-upgrade.json |  6 +++---
 .../wizard/stack/hdp/version2.0.1/KERBEROS.json |  2 +-
 ambari-web/app/data/HDP2/site_properties.js     |  2 +-
 10 files changed, 44 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
index ce319e6..2481f32 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java
@@ -705,18 +705,18 @@ public class KerberosHelper {
 
     KDCType kdcType = null;
     String kdcTypeProperty = kerberosEnvProperties.get("kdc_type");
-    if (kdcTypeProperty == null) {
-      // TODO: (rlevas) Only pull from kerberos-env, this is only for transitional purposes
(AMBARI 9121)
-      kdcTypeProperty = krb5ConfProperties.get("kdc_type");
+    if(kdcTypeProperty == null) {
+      String message = "The 'kerberos-env/kdc_type' value must be set to a valid KDC type";
+      LOG.error(message);
+      throw new IllegalArgumentException(message);
     }
-    if (kdcTypeProperty != null) {
-      try {
-        kdcType = KDCType.translate(kdcTypeProperty);
-      } catch (IllegalArgumentException e) {
-        String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
-        LOG.error(message);
-        throw new AmbariException(message);
-      }
+
+    try {
+      kdcType = KDCType.translate(kdcTypeProperty);
+    } catch (IllegalArgumentException e) {
+      String message = String.format("Invalid 'kdc_type' value: %s", kdcTypeProperty);
+      LOG.error(message);
+      throw new AmbariException(message);
     }
 
     kerberosDetails.setSecurityType(cluster.getSecurityType());

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index 03b13d6..74b6f71 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -22,6 +22,14 @@
 
 <configuration supports_final="false">
   <property require-input="true">
+    <name>kdc_type</name>
+    <description>
+      The type of KDC being used. Either mit-kdc or active-directory
+    </description>
+    <value>mit-kdc</value>
+  </property>
+
+  <property require-input="true">
     <name>ldap_url</name>
     <description>
       The URL to the Active Directory LDAP Interface

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
index 38f6ab2..3a2c81f 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/krb5-conf.xml
@@ -114,13 +114,6 @@
     <value/>
   </property>
   <property require-input="true">
-    <name>kdc_type</name>
-    <description>
-      The type of KDC being used. Either mit-kdc or active-directory
-    </description>
-    <value>mit-kdc</value>
-  </property>
-  <property require-input="true">
     <name>kdc_host</name>
     <description>
       The IP address or FQDN for the KDC host. Optionally a port number may be included.

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
index 8e6b9ba..44bb209 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
@@ -92,13 +92,6 @@
     <value/>
   </property>
   <property require-input="true">
-    <name>kdc_type</name>
-    <description>
-      The type of KDC being used. Either mit-kdc or active-directory
-    </description>
-    <value>mit-kdc</value>
-  </property>
-  <property require-input="true">
     <name>kdc_host</name>
     <description>
       The IP address or FQDN of the KDC or Active Directory server, optionally a port number
may be

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index ad6128c..fd36d9d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -385,18 +385,15 @@ public class KerberosHelperTest extends EasyMockSupport {
     expectLastCall().once();
 
     final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
-    // TODO: (rlevas) Add when AMBARI 9121 is complete
-    // expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
+    expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
 
     final Config kerberosEnvConfig = createNiceMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
 
     final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
-    expect(krb5ConfProperties.get("kdc_type")).andReturn("mit-kdc").once();
     expect(krb5ConfProperties.get("realm")).andReturn("FOOBAR.COM").once();
 
     final Config krb5ConfConfig = createNiceMock(Config.class);
-    // TODO: (rlevas) Remove when AMBARI 9121 is complete
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -636,18 +633,15 @@ public class KerberosHelperTest extends EasyMockSupport {
     expectLastCall().once();
 
     final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
-    // TODO: (rlevas) Add when AMBARI 9121 is complete
-    // expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
+    expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
 
     final Config kerberosEnvConfig = createNiceMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
 
     final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
-    expect(krb5ConfProperties.get("kdc_type")).andReturn("mit-kdc").once();
     expect(krb5ConfProperties.get("realm")).andReturn("FOOBAR.COM").once();
 
     final Config krb5ConfConfig = createNiceMock(Config.class);
-    // TODO: (rlevas) Remove when AMBARI 9121 is complete
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -862,18 +856,15 @@ public class KerberosHelperTest extends EasyMockSupport {
         .once();
 
     final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
-    // TODO: (rlevas) Add when AMBARI 9121 is complete
-    // expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
+    expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
 
     final Config kerberosEnvConfig = createNiceMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
 
     final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
-    expect(krb5ConfProperties.get("kdc_type")).andReturn("mit-kdc").once();
     expect(krb5ConfProperties.get("realm")).andReturn("FOOBAR.COM").once();
 
     final Config krb5ConfConfig = createNiceMock(Config.class);
-    // TODO: (rlevas) Remove when AMBARI 9121 is complete
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
 
     final Cluster cluster = createNiceMock(Cluster.class);
@@ -1143,18 +1134,15 @@ public class KerberosHelperTest extends EasyMockSupport {
         .once();
 
     final Map<String, String> kerberosEnvProperties = createNiceMock(Map.class);
-    // TODO: (rlevas) Add when AMBARI 9121 is complete
-    // expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
+    expect(kerberosEnvProperties.get("kdc_type")).andReturn("mit-kdc").once();
 
     final Config kerberosEnvConfig = createNiceMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).once();
 
     final Map<String, String> krb5ConfProperties = createNiceMock(Map.class);
-    expect(krb5ConfProperties.get("kdc_type")).andReturn("mit-kdc").once();
     expect(krb5ConfProperties.get("realm")).andReturn("FOOBAR.COM").once();
 
     final Config krb5ConfConfig = createNiceMock(Config.class);
-    // TODO: (rlevas) Remove when AMBARI 9121 is complete
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).once();
 
     final Cluster cluster = createNiceMock(Cluster.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py b/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
index 0634a37..8b896c1 100644
--- a/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
+++ b/ambari-server/src/test/python/stacks/2.2/KERBEROS/use_cases.py
@@ -74,11 +74,13 @@ def get_manged_kdc_use_case():
     json_data = json.load(f)
 
   json_data['clusterHostInfo']['kdc_server_hosts'] = ['c6401.ambari.apache.org']
+  json_data['configurations']['kerberos-env'] = {
+    'kdc_type': 'mit-kdc'
+  }
   json_data['configurations']['krb5-conf'] = {
     'libdefaults_default_tgs_enctypes': 'aes256-cts-hmac-sha1-96',
     'libdefaults_default_tkt_enctypes': 'aes256-cts-hmac-sha1-96',
     'realm': 'MANAGED_REALM.COM',
-    'kdc_type': 'mit-kdc',
     'kdc_host': 'c6401.ambari.apache.org',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
@@ -92,6 +94,9 @@ def get_unmanged_kdc_use_case():
   with open(config_file, "r") as f:
     json_data = json.load(f)
 
+  json_data['configurations']['kerberos-env'] = {
+    'kdc_type': 'mit-kdc'
+  }
   json_data['configurations']['krb5-conf'] = {
     'libdefaults_default_tgs_enctypes': 'aes256-cts-hmac-sha1-96',
     'libdefaults_default_tkt_enctypes': 'aes256-cts-hmac-sha1-96',
@@ -99,7 +104,6 @@ def get_unmanged_kdc_use_case():
     'conf_file': 'krb5_unmanaged.conf',
     'content': krb5_conf_template,
     'realm': 'OSCORPINDUSTRIES.COM',
-    'kdc_type': 'mit-kdc',
     'kdc_host': 'ad.oscorp_industries.com',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
@@ -118,6 +122,9 @@ def get_unmanged_ad_use_case():
   with open(config_file, "r") as f:
     json_data = json.load(f)
 
+  json_data['configurations']['kerberos-env'] = {
+    'kdc_type': 'active-directory',
+  }
   json_data['configurations']['krb5-conf'] = {
     'libdefaults_default_tgs_enctypes': 'aes256-cts-hmac-sha1-96',
     'libdefaults_default_tkt_enctypes': 'aes256-cts-hmac-sha1-96',
@@ -125,7 +132,6 @@ def get_unmanged_ad_use_case():
     'conf_file': 'krb5_ad.conf',
     'content': krb5_conf_template,
     'realm': 'OSCORPINDUSTRIES.COM',
-    'kdc_type': 'active-directory',
     'kdc_host': 'ad.oscorp_industries.com',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"
@@ -151,12 +157,14 @@ def get_cross_realm_use_case():
                         '}\n'
 
   json_data['clusterHostInfo']['kdc_server_hosts'] = ['c6401.ambari.apache.org']
+  json_data['configurations']['kerberos-env'] = {
+    'kdc_type': 'mit-kdc'
+  }
   json_data['configurations']['krb5-conf'] = {
     'libdefaults_default_tgs_enctypes': 'aes256-cts-hmac-sha1-96',
     'libdefaults_default_tkt_enctypes': 'aes256-cts-hmac-sha1-96',
     'content': _krb5_conf_template,
     'realm': 'MANAGED_REALM.COM',
-    'kdc_type': 'mit-kdc',
     'kdc_host': 'c6401.ambari.apache.org',
     'admin_principal': "admin/admin",
     'admin_password': "hadoop"

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
index 3511ae9..0e6ed83 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
@@ -164,10 +164,11 @@
             "content": "\n# The file containing the running pid\nPID_FILE={{webhcat_pid_file}}\n\nTEMPLETON_LOG_DIR={{templeton_log_dir}}/\n\n\nWEBHCAT_LOG_DIR={{templeton_log_dir}}/\n\n#
The console error log\nERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log\n\n# The
console log\nCONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log\n\n#TEMPLETON_JAR=templeton_jar_name\n\n#HADOOP_PREFIX=hadoop_prefix\n\n#HCAT_PREFIX=hive_prefix\n\n#
Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME={{hadoop_home}}"
         }, 
         "kerberos-env": {
-            "ldap_url": "", 
+            "kdc_type": "mit-kdc",
+            "ldap_url": "",
             "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\",
\"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n
 \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n
 \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n
   ", 
             "container_dn": ""
-        }, 
+        },
         "hive-log4j": {
             "content": "\n# Licensed to the Apache Software Foundation (ASF) under one\n#
or more contributor license agreements.  See the NOTICE file\n# distributed with this work
for additional information\n# regarding copyright ownership.  The ASF licenses this file\n#
to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file
except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n# 
   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed
to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n#
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License
for the specific language governing permissions and\n# limitations under the License.\n\n#
Define some default values that can be overridden by system properties\nhive.log.threshold=ALL\nhive.root.logger=INFO,DRFA\nhive.log.dir=${java.io.tmpdir}/${user.na
 me}\nhive.log.file=hive.log\n\n# Define the root logger to the system property \"hadoop.root.logger\".\nlog4j.rootLogger=${hive.root.logger},
EventCounter\n\n# Logging Threshold\nlog4j.threshold=${hive.log.threshold}\n\n#\n# Daily Rolling
File Appender\n#\n# Use the PidDailyerRollingFileAppend class instead if you want to use separate
log files\n# for different CLI session.\n#\n# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender\n\nlog4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender\n\nlog4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}\n\n#
Rollver at midnight\nlog4j.appender.DRFA.DatePattern=.yyyy-MM-dd\n\n# 30-day backup\n#log4j.appender.DRFA.MaxBackupIndex=30\nlog4j.appender.DRFA.layout=org.apache.log4j.PatternLayout\n\n#
Pattern format: Date LogLevel LoggerName LogMessage\n#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601}
%p %c: %m%n\n# Debugging Pattern format\nlog4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601}
%-5p [%t
 ]: %c{2} (%F:%M(%L)) - %m%n\n\n\n#\n# console\n# Add \"console\" to rootlogger above if you
want to use this\n#\n\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd
HH:mm:ss} [%t]: %p %c{2}: %m%n\nlog4j.appender.console.encoding=UTF-8\n\n#custom logging levels\n#log4j.logger.xxx=DEBUG\n\n#\n#
Event Counter Appender\n# Sends counts of logging messages at different severity levels to
Hadoop Metrics.\n#\nlog4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter\n\n\nlog4j.category.DataNucleus=ERROR,DRFA\nlog4j.category.Datastore=ERROR,DRFA\nlog4j.category.Datastore.Schema=ERROR,DRFA\nlog4j.category.JPOX.Datastore=ERROR,DRFA\nlog4j.category.JPOX.Plugin=ERROR,DRFA\nlog4j.category.JPOX.MetaData=ERROR,DRFA\nlog4j.category.JPOX.Query=ERROR,DRFA\nlog4j.category.JPOX.General=ERROR,DRFA\nlog4j.category.JPOX.En
 hancer=ERROR,DRFA\n\n\n# Silence useless ZK logs\nlog4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA\nlog4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA"
         }, 
@@ -1009,8 +1010,7 @@
             "logging_kdc": "FILE:/var/log/krb5kdc.log", 
             "domains": "", 
             "logging_default": "FILE:/var/log/krb5libs.log", 
-            "kdc_type": "mit-kdc", 
-            "libdefaults_dns_lookup_realm": "false", 
+            "libdefaults_dns_lookup_realm": "false",
             "libdefaults_renew_lifetime": "7d", 
             "libdefaults_default_tkt_enctypes": "\n      aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96
des3-cbc-sha1 arcfour-hmac-md5\n      camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc
des-cbc-md5 des-cbc-md4\n    "
         }, 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
index 3dc3d42..d6c839d 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
@@ -164,7 +164,8 @@
             "content": "\n# The file containing the running pid\nPID_FILE={{webhcat_pid_file}}\n\nTEMPLETON_LOG_DIR={{templeton_log_dir}}/\n\n\nWEBHCAT_LOG_DIR={{templeton_log_dir}}/\n\n#
The console error log\nERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log\n\n# The
console log\nCONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log\n\n#TEMPLETON_JAR=templeton_jar_name\n\n#HADOOP_PREFIX=hadoop_prefix\n\n#HCAT_PREFIX=hive_prefix\n\n#
Set HADOOP_HOME to point to a specific hadoop install directory\nexport HADOOP_HOME={{hadoop_home}}"
         }, 
         "kerberos-env": {
-            "ldap_url": "", 
+            "kdc_type": "mit-kdc",
+            "ldap_url": "",
             "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\",
\"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n
 \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n
 \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n
   ", 
             "container_dn": ""
         }, 
@@ -1009,8 +1010,7 @@
             "logging_kdc": "FILE:/var/log/krb5kdc.log", 
             "domains": "", 
             "logging_default": "FILE:/var/log/krb5libs.log", 
-            "kdc_type": "mit-kdc", 
-            "libdefaults_dns_lookup_realm": "false", 
+            "libdefaults_dns_lookup_realm": "false",
             "libdefaults_renew_lifetime": "7d", 
             "libdefaults_default_tkt_enctypes": "\n      aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96
des3-cbc-sha1 arcfour-hmac-md5\n      camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc
des-cbc-md5 des-cbc-md4\n    "
         }, 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json b/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
index e0f837b..b34c7b4 100644
--- a/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
+++ b/ambari-web/app/assets/data/wizard/stack/hdp/version2.0.1/KERBEROS.json
@@ -139,7 +139,7 @@
         "service_name" : "KERBEROS",
         "stack_name" : "HDP",
         "stack_version" : "2.2",
-        "type" : "krb5-conf.xml"
+        "type" : "kerberos-env.xml"
       }
     },
     {

http://git-wip-us.apache.org/repos/asf/ambari/blob/b9c9832c/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 2b2294c..d73d131 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -1964,7 +1964,7 @@ var hdp2properties = [
     "isRequiredByAgent": true,
     "displayType": "masterHost",
     "serviceName": "KERBEROS",
-    "filename": "krb5-conf.xml",
+    "filename": "kerberos-env.xml",
     "category": "KDC",
     "index": 0
   },


Mime
View raw message