ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rle...@apache.org
Subject ambari git commit: AMBARI-12501. Kerberos: Allow setting/clearing attributes for MIT KDC identities (rlevas)
Date Fri, 24 Jul 2015 12:50:49 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 df43b426a -> 6e41d7ab2


AMBARI-12501. Kerberos: Allow setting/clearing attributes for MIT KDC identities (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6e41d7ab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6e41d7ab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6e41d7ab

Branch: refs/heads/branch-2.1
Commit: 6e41d7ab29e59a91e62a88c6f9ed3dd39b1e727b
Parents: df43b42
Author: Robert Levas <rlevas@hortonworks.com>
Authored: Fri Jul 24 08:50:23 2015 -0400
Committer: Robert Levas <rlevas@hortonworks.com>
Committed: Fri Jul 24 08:50:38 2015 -0400

----------------------------------------------------------------------
 .../kerberos/ADKerberosOperationHandler.java    |   2 +-
 .../kerberos/KerberosOperationHandler.java      |  10 +-
 .../kerberos/MITKerberosOperationHandler.java   | 137 +++++--
 .../server/upgrade/SchemaUpgradeHelper.java     |   1 +
 .../server/upgrade/UpgradeCatalog211.java       | 152 +++++++
 .../1.10.3-10/configuration/kerberos-env.xml    |  15 +-
 .../ADKerberosOperationHandlerTest.java         |   5 +-
 .../MITKerberosOperationHandlerTest.java        |  42 +-
 .../server/upgrade/UpgradeCatalog211Test.java   | 409 +++++++++++++++++++
 .../journalnode-upgrade-hdfs-secure.json        |   2 +-
 .../stacks/2.2/configs/journalnode-upgrade.json |   2 +-
 .../2.2/configs/pig-service-check-secure.json   |   2 +-
 .../2.2/configs/ranger-admin-upgrade.json       |   2 +-
 .../2.2/configs/ranger-usersync-upgrade.json    |   2 +-
 .../python/stacks/2.3/configs/hbase_secure.json |   2 +-
 15 files changed, 721 insertions(+), 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
index 33350c0..fb21883 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
@@ -166,7 +166,7 @@ public class ADKerberosOperationHandler extends KerberosOperationHandler {
     this.ldapContext = createLdapContext();
     this.searchControls = createSearchControls();
 
-    this.createTemplate = kerberosConfiguration.get(KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE);
+    this.createTemplate = kerberosConfiguration.get(KERBEROS_ENV_AD_CREATE_ATTRIBUTES_TEMPLATE);
 
     this.gson = new Gson();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
index 20426f0..425aa06 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosOperationHandler.java
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.server.serveraction.kerberos;
 
-import org.apache.ambari.server.security.SecurePasswordHelper;
 import org.apache.ambari.server.utils.ShellCommandUtil;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory;
@@ -64,9 +63,14 @@ public abstract class KerberosOperationHandler {
   public final static String KERBEROS_ENV_PRINCIPAL_CONTAINER_DN = "container_dn";
 
   /**
-   * Kerberos-env configuration property name: create_attributes_template
+   * Kerberos-env configuration property name: ad_create_attributes_template
    */
-  public final static String KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE = "create_attributes_template";
+  public final static String KERBEROS_ENV_AD_CREATE_ATTRIBUTES_TEMPLATE = "ad_create_attributes_template";
+
+  /**
+   * Kerberos-env configuration property name: kdc_create_attributes
+   */
+  public final static String KERBEROS_ENV_KDC_CREATE_ATTRIBUTES = "kdc_create_attributes";
 
   /**
    * Kerberos-env configuration property name: encryption_types

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
index 29fb4b5..d3e3fa4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandler.java
@@ -49,6 +49,11 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
 
   private final static Logger LOG = LoggerFactory.getLogger(MITKerberosOperationHandler.class);
 
+  /**
+   * A String containing user-specified attributes used when creating principals
+   */
+  private String createAttributes = null;
+
   private String adminServerHost = null;
 
   /**
@@ -89,6 +94,12 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
       setKeyEncryptionTypes(translateEncryptionTypes(kerberosConfiguration.get(KERBEROS_ENV_ENCRYPTION_TYPES), "\\s+"));
       setAdminServerHost(kerberosConfiguration.get(KERBEROS_ENV_ADMIN_SERVER_HOST));
       setExecutableSearchPaths(kerberosConfiguration.get(KERBEROS_ENV_EXECUTABLE_SEARCH_PATHS));
+      setCreateAttributes(kerberosConfiguration.get(KERBEROS_ENV_KDC_CREATE_ATTRIBUTES));
+    } else {
+      setKeyEncryptionTypes(null);
+      setAdminServerHost(null);
+      setExecutableSearchPaths((String) null);
+      setCreateAttributes(null);
     }
 
     // Pre-determine the paths to relevant Kerberos executables
@@ -170,8 +181,10 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
     } else if ((password == null) || password.isEmpty()) {
       throw new KerberosOperationException("Failed to create new principal - no password specified");
     } else {
-      // Create the kdamin query:  add_principal <-randkey|-pw <password>> <principal>
-      ShellCommandUtil.Result result = invokeKAdmin(String.format("add_principal -pw \"%s\" %s", password, principal));
+      String createAttributes = getCreateAttributes();
+      // Create the kdamin query:  add_principal <-randkey|-pw <password>> [<options>] <principal>
+      ShellCommandUtil.Result result = invokeKAdmin(String.format("add_principal -pw \"%s\" %s %s",
+          password, (createAttributes == null) ? "" : createAttributes, principal));
 
       // If there is data from STDOUT, see if the following string exists:
       //    Principal "<principal>" created
@@ -179,6 +192,8 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
       if ((stdOut != null) && stdOut.contains(String.format("Principal \"%s\" created", principal))) {
         return getKeyNumber(principal);
       } else {
+        LOG.error("Failed to execute kadmin query: add_principal -pw \"********\" {} {}\nSTDOUT: {}\nSTDERR: {}",
+            (createAttributes == null) ? "" : createAttributes, principal, stdOut, result.getStderr());
         throw new KerberosOperationException(String.format("Failed to create service principal for %s\nSTDOUT: %s\nSTDERR: %s",
             principal, stdOut, result.getStderr()));
       }
@@ -248,6 +263,42 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
   }
 
   /**
+   * Sets the KDC administrator server host address
+   *
+   * @param adminServerHost the ip address or FQDN of the KDC administrator server
+   */
+  public void setAdminServerHost(String adminServerHost) {
+    this.adminServerHost = adminServerHost;
+  }
+
+  /**
+   * Gets the IP address or FQDN of the KDC administrator server
+   *
+   * @return the IP address or FQDN of the KDC administrator server
+   */
+  public String getAdminServerHost() {
+    return this.adminServerHost;
+  }
+
+  /**
+   * Sets the (additional) principal creation attributes
+   *
+   * @param createAttributes the additional principal creations attributes
+   */
+  public void setCreateAttributes(String createAttributes) {
+    this.createAttributes = createAttributes;
+  }
+
+  /**
+   * Gets the (additional) principal creation attributes
+   *
+   * @return the additional principal creations attributes or null
+   */
+  public String getCreateAttributes() {
+    return createAttributes;
+  }
+
+  /**
    * Retrieves the current key number assigned to the identity identified by the specified principal
    *
    * @param principal a String declaring the principal to look up
@@ -310,7 +361,7 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
    * @throws KerberosRealmException               if the realm does not map to a KDC
    * @throws KerberosOperationException           if an unexpected error occurred
    */
-  private ShellCommandUtil.Result invokeKAdmin(String query)
+  protected ShellCommandUtil.Result invokeKAdmin(String query)
       throws KerberosOperationException {
     ShellCommandUtil.Result result = null;
 
@@ -381,40 +432,15 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
       command.add("-q");
       command.add(query);
 
+      if(LOG.isDebugEnabled()) {
+        LOG.debug(String.format("Executing: %s", createCleanCommand(command)));
+      }
+
       result = executeCommand(command.toArray(new String[command.size()]));
 
       if (!result.isSuccessful()) {
-        // Build command string, replacing administrator password with "********"
-        StringBuilder cleanCommand = new StringBuilder();
-        Iterator<String> iterator = command.iterator();
-
-        if (iterator.hasNext()) {
-          cleanCommand.append(iterator.next());
-        }
-
-        while (iterator.hasNext()) {
-          String part = iterator.next();
-
-          cleanCommand.append(' ');
-
-          if (part.contains(" ")) {
-            cleanCommand.append('"');
-            cleanCommand.append(part);
-            cleanCommand.append('"');
-          } else {
-            cleanCommand.append(part);
-          }
-
-          if ("-w".equals(part)) {
-            // Skip the password and use "********" instead
-            if (iterator.hasNext()) {
-              iterator.next();
-            }
-            cleanCommand.append(" ********");
-          }
-        }
         String message = String.format("Failed to execute kadmin:\n\tCommand: %s\n\tExitCode: %s\n\tSTDOUT: %s\n\tSTDERR: %s",
-            cleanCommand.toString(), result.getExitCode(), result.getStdout(), result.getStderr());
+            createCleanCommand(command), result.getExitCode(), result.getStdout(), result.getStderr());
         LOG.warn(message);
 
         // Test STDERR to see of any "expected" error conditions were encountered...
@@ -453,20 +479,41 @@ public class MITKerberosOperationHandler extends KerberosOperationHandler {
   }
 
   /**
-   * Sets the KDC administrator server host address
+   * Build the kadmin command string, replacing administrator password with "********"
    *
-   * @param adminServerHost the ip address or FQDN of the KDC administrator server
+   * @param command a List of items making up the command
+   * @return the cleaned command string
    */
-  public void setAdminServerHost(String adminServerHost) {
-    this.adminServerHost = adminServerHost;
-  }
+  private String createCleanCommand(List<String> command) {
+    StringBuilder cleanedCommand = new StringBuilder();
+    Iterator<String> iterator = command.iterator();
 
-  /**
-   * Gets the IP address or FQDN of the KDC administrator server
-   *
-   * @return the IP address or FQDN of the KDC administrator server
-   */
-  public String getAdminServerHost() {
-    return adminServerHost;
+    if (iterator.hasNext()) {
+      cleanedCommand.append(iterator.next());
+    }
+
+    while (iterator.hasNext()) {
+      String part = iterator.next();
+
+      cleanedCommand.append(' ');
+
+      if (part.contains(" ")) {
+        cleanedCommand.append('"');
+        cleanedCommand.append(part);
+        cleanedCommand.append('"');
+      } else {
+        cleanedCommand.append(part);
+      }
+
+      if ("-w".equals(part)) {
+        // Skip the password and use "********" instead
+        if (iterator.hasNext()) {
+          iterator.next();
+        }
+        cleanedCommand.append(" ********");
+      }
+    }
+
+    return cleanedCommand.toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index 5dc58e8..88e500b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -177,6 +177,7 @@ public class SchemaUpgradeHelper {
       catalogBinder.addBinding().to(UpgradeCatalog170.class);
       catalogBinder.addBinding().to(UpgradeCatalog200.class);
       catalogBinder.addBinding().to(UpgradeCatalog210.class);
+      catalogBinder.addBinding().to(UpgradeCatalog211.class);
       catalogBinder.addBinding().to(FinalUpgradeCatalog.class);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
new file mode 100644
index 0000000..616d4c4
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog211.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.orm.dao.DaoUtils;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+
+
+/**
+ * Upgrade catalog for version 2.1.1.
+ */
+public class UpgradeCatalog211 extends AbstractUpgradeCatalog {
+  @Inject
+  DaoUtils daoUtils;
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Don't forget to register new UpgradeCatalogs in {@link SchemaUpgradeHelper.UpgradeHelperModule#configure()}
+   *
+   * @param injector Guice injector to track dependencies and uses bindings to inject them.
+   */
+  @Inject
+  public UpgradeCatalog211(Injector injector) {
+    super(injector);
+
+    daoUtils = injector.getInstance(DaoUtils.class);
+  }
+
+  // ----- UpgradeCatalog ----------------------------------------------------
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getTargetVersion() {
+    return "2.1.1";
+  }
+
+  // ----- AbstractUpgradeCatalog --------------------------------------------
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getSourceVersion() {
+    return "2.1.0";
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void executeDDLUpdates() throws AmbariException, SQLException {
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void executePreDMLUpdates() throws AmbariException, SQLException {
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void executeDMLUpdates() throws AmbariException, SQLException {
+    addNewConfigurationsFromXml();
+    updateExistingConfigurations();
+  }
+
+
+  // ----- UpgradeCatalog211 --------------------------------------------
+
+
+  /**
+   * Iterates over the set of clusters to call service-specific configuration update routines.
+   *
+   * @throws AmbariException if an error occurs while updating the configurations
+   */
+  protected void updateExistingConfigurations() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if ((clusterMap != null) && !clusterMap.isEmpty()) {
+        // Iterate through the clusters and perform any configuration updates
+        for (final Cluster cluster : clusterMap.values()) {
+          updateKerberosConfigurations(cluster);
+
+          /* *********************************************************
+           * Add additional configuration update methods here
+           * ********************************************************* */
+        }
+      }
+    }
+  }
+
+  /**
+   * Updates the Kerberos configurations for the given cluster
+   * <p/>
+   * Performs the following updates:
+   * <ul>
+   * <li>Rename <code>create_attributes_template</code> to <code>ad_create_attributes_template</code></li>
+   * </ul>
+   *
+   * @param cluster the cluster
+   * @throws AmbariException if an error occurs while updating the configurations
+   */
+  protected void updateKerberosConfigurations(Cluster cluster) throws AmbariException {
+    Config config = cluster.getDesiredConfigByType("kerberos-env");
+
+    if (config != null) {
+      // Rename create_attributes_template to ad_create_attributes_template
+      String value = config.getProperties().get("create_attributes_template");
+      Map<String, String> updates = Collections.singletonMap("ad_create_attributes_template", value);
+      Set<String> removes = Collections.singleton("create_attributes_template");
+
+      updateConfigurationPropertiesForCluster(cluster, "kerberos-env", updates, removes, true, false);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index e9665f3..60df2e0 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -168,11 +168,12 @@
     <value>false</value>
   </property>
 
-  <property require-input="true">
-    <name>create_attributes_template</name>
+  <property>
+    <name>ad_create_attributes_template</name>
     <description>
       A Velocity template to use to generate a JSON-formatted document containing the set of
-      attribute names and values needed to create a new Kerberos identity in the relevant KDC.
+      attribute names and values needed to create a new Kerberos identity in the relevant
+      Active Directory.
       Variables include:
       principal_name, principal_primary, principal_instance, realm, realm_lowercase,
       normalized_principal, principal digest, password, is_service, container_dn
@@ -191,4 +192,12 @@
 }
     </value>
   </property>
+
+  <property>
+    <name>kdc_create_attributes</name>
+    <description>
+      The set of attributes to use when creating a new Kerberos identity in the relevant (MIT) KDC.
+    </description>
+    <value/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
index d7fffb2..9ad3da6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandlerTest.java
@@ -27,7 +27,6 @@ import org.junit.Test;
 
 import javax.naming.AuthenticationException;
 import javax.naming.CommunicationException;
-import javax.naming.InvalidNameException;
 import javax.naming.Name;
 import javax.naming.NamingEnumeration;
 import javax.naming.directory.Attributes;
@@ -344,7 +343,7 @@ public class ADKerberosOperationHandlerTest extends KerberosOperationHandlerTest
       {
         put(ADKerberosOperationHandler.KERBEROS_ENV_LDAP_URL, DEFAULT_LDAP_URL);
         put(ADKerberosOperationHandler.KERBEROS_ENV_PRINCIPAL_CONTAINER_DN, DEFAULT_PRINCIPAL_CONTAINER_DN);
-        put(ADKerberosOperationHandler.KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE, "" +
+        put(ADKerberosOperationHandler.KERBEROS_ENV_AD_CREATE_ATTRIBUTES_TEMPLATE, "" +
             "#set( $user = \"${principal_primary}-${principal_digest}\" )" +
             "{" +
             "  \"objectClass\": [" +
@@ -508,7 +507,7 @@ public class ADKerberosOperationHandlerTest extends KerberosOperationHandlerTest
       handler.createPrincipal(evaluatedPrincipal, "some password", true);
     }
 
-    kerberosEnvMap.put(ADKerberosOperationHandler.KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE,
+    kerberosEnvMap.put(ADKerberosOperationHandler.KERBEROS_ENV_AD_CREATE_ATTRIBUTES_TEMPLATE,
         "#set( $user = \"${principal_primary}-${principal_digest}\" )" +
             "{" +
             "  \"objectClass\": [" +

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
index 9b9a28c..8c096b0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
@@ -29,19 +29,22 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.utils.ShellCommandUtil;
+import org.easymock.Capture;
 import org.easymock.EasyMock;
 import org.easymock.IAnswer;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
+import java.lang.reflect.Method;
 import java.util.HashMap;
 import java.util.Map;
 
 import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.verify;
 
 public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTest {
 
@@ -56,6 +59,8 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
       put(MITKerberosOperationHandler.KERBEROS_ENV_ENCRYPTION_TYPES, null);
       put(MITKerberosOperationHandler.KERBEROS_ENV_KDC_HOST, "localhost");
       put(MITKerberosOperationHandler.KERBEROS_ENV_ADMIN_SERVER_HOST, "localhost");
+      put(MITKerberosOperationHandler.KERBEROS_ENV_AD_CREATE_ATTRIBUTES_TEMPLATE, "AD Create Template");
+      put(MITKerberosOperationHandler.KERBEROS_ENV_KDC_CREATE_ATTRIBUTES, "-attr1 -attr2 foo=345");
     }
   };
 
@@ -64,7 +69,7 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
     injector = Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
-        Configuration configuration =EasyMock.createNiceMock(Configuration.class);
+        Configuration configuration = EasyMock.createNiceMock(Configuration.class);
         expect(configuration.getServerOsFamily()).andReturn("redhat6").anyTimes();
         replay(configuration);
 
@@ -112,7 +117,38 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
   }
 
   @Test
-  public void testCreateServicePrincipalExceptions() throws Exception {
+  public void testCreateServicePrincipal_AdditionalAttributes() throws Exception {
+    Method invokeKAdmin = MITKerberosOperationHandler.class.getDeclaredMethod("invokeKAdmin", String.class);
+
+    Capture<? extends String> query = new Capture<String>();
+
+    ShellCommandUtil.Result result1 = createNiceMock(ShellCommandUtil.Result.class);
+    expect(result1.getStderr()).andReturn("").anyTimes();
+    expect(result1.getStdout()).andReturn("Principal \"" + DEFAULT_ADMIN_PRINCIPAL + "\" created\"").anyTimes();
+
+    ShellCommandUtil.Result result2 = createNiceMock(ShellCommandUtil.Result.class);
+    expect(result2.getStderr()).andReturn("").anyTimes();
+    expect(result2.getStdout()).andReturn("Key: vno 1").anyTimes();
+
+    MITKerberosOperationHandler handler = createMockBuilder(MITKerberosOperationHandler.class)
+        .addMockedMethod(invokeKAdmin)
+        .createStrictMock();
+
+    expect(handler.invokeKAdmin(capture(query))).andReturn(result1).once();
+    expect(handler.invokeKAdmin("get_principal " + DEFAULT_ADMIN_PRINCIPAL)).andReturn(result2).once();
+
+    replay(handler, result1, result2);
+
+    handler.open(new KerberosCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD, null), DEFAULT_REALM, KERBEROS_ENV_MAP);
+    handler.createPrincipal(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD, false);
+
+    verify(handler, result1, result2);
+
+    Assert.assertTrue(query.getValue().contains(" " + KERBEROS_ENV_MAP.get(MITKerberosOperationHandler.KERBEROS_ENV_KDC_CREATE_ATTRIBUTES) + " "));
+  }
+
+  @Test
+  public void testCreateServicePrincipal_Exceptions() throws Exception {
     MITKerberosOperationHandler handler = new MITKerberosOperationHandler();
     handler.open(new KerberosCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD, null), DEFAULT_REALM, KERBEROS_ENV_MAP);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
new file mode 100644
index 0000000..2ba44bf
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog211Test.java
@@ -0,0 +1,409 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ConfigurationRequest;
+import org.apache.ambari.server.controller.ConfigurationResponse;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.easymock.Capture;
+import org.easymock.EasyMockSupport;
+import org.junit.Assert;
+import org.junit.Test;
+
+import javax.persistence.EntityManager;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+
+
+/**
+ * {@link UpgradeCatalog211} unit tests.
+ */
+public class UpgradeCatalog211Test extends EasyMockSupport {
+
+  @Test
+  public void testExecuteDDLUpdates() throws Exception {
+    Injector injector = initInjector();
+
+    try {
+      Provider<EntityManager> entityManagerProvider = initEntityManagerProvider();
+
+      final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+      final OsFamily osFamily = createNiceMock(OsFamily.class);
+      Configuration configuration = createNiceMock(Configuration.class);
+      Connection connection = createNiceMock(Connection.class);
+      Statement statement = createNiceMock(Statement.class);
+      ResultSet resultSet = createNiceMock(ResultSet.class);
+      expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
+      dbAccessor.getConnection();
+      expectLastCall().andReturn(connection).anyTimes();
+      connection.createStatement();
+      expectLastCall().andReturn(statement).anyTimes();
+      statement.executeQuery(anyObject(String.class));
+      expectLastCall().andReturn(resultSet).anyTimes();
+
+      // Create DDL sections with their own capture groups
+      // Example: AlertSectionDDL alertSectionDDL = new AlertSectionDDL();
+
+      // Execute any DDL schema changes
+      // Example: alertSectionDDL.execute(dbAccessor);
+
+      // Replay sections
+      replayAll();
+
+      AbstractUpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor, osFamily, entityManagerProvider.get());
+      Class<?> c = AbstractUpgradeCatalog.class;
+      Field f = c.getDeclaredField("configuration");
+      f.setAccessible(true);
+      f.set(upgradeCatalog, configuration);
+
+      upgradeCatalog.executeDDLUpdates();
+      verifyAll();
+
+      // Verify sections
+      // Example: alertSectionDDL.verify(dbAccessor);
+    } finally {
+      destroyInjector(injector);
+    }
+  }
+
+  @Test
+  public void testExecutePreDMLUpdates() throws Exception {
+
+    final UpgradeCatalog211 upgradeCatalog211 = createMockBuilder(UpgradeCatalog211.class)
+        // Add mocked methods. Example: .addMockedMethod(cleanupStackUpdates)
+        .createMock();
+
+    final Injector injector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(UpgradeCatalog211.class).toInstance(upgradeCatalog211);
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    setInjector(upgradeCatalog211, injector);
+
+    replayAll();
+
+    injector.getInstance(UpgradeCatalog211.class).executePreDMLUpdates();
+
+    verifyAll();
+  }
+
+  @Test
+  public void testExecuteDMLUpdates() throws Exception {
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    final OsFamily osFamily = createNiceMock(OsFamily.class);
+    
+    final Cluster cluster = createMock(Cluster.class);
+
+    final Clusters clusters = createMock(Clusters.class);
+    expect(clusters.getClusters())
+        .andReturn(Collections.singletonMap("c1", cluster));
+
+    final AmbariManagementController controller = createNiceMock(AmbariManagementController.class);
+    expect(controller.getClusters())
+        .andReturn(clusters)
+        .once();
+
+    final Injector injector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(controller);
+        bind(DBAccessor.class).toInstance(dbAccessor);
+        bind(OsFamily.class).toInstance(osFamily);
+      }
+    });
+
+    Method addNewConfigurationsFromXml =
+        AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
+
+    Method updateKerberosConfigurations =
+        UpgradeCatalog211.class.getDeclaredMethod("updateKerberosConfigurations", Cluster.class);
+
+    UpgradeCatalog211 upgradeCatalog211 = createMockBuilder(UpgradeCatalog211.class)
+        .addMockedMethod(addNewConfigurationsFromXml)
+        .addMockedMethod(updateKerberosConfigurations)
+        .createMock();
+
+    setInjector(upgradeCatalog211, injector);
+
+    upgradeCatalog211.addNewConfigurationsFromXml();
+    expectLastCall().once();
+
+    upgradeCatalog211.updateKerberosConfigurations(anyObject(Cluster.class));
+    expectLastCall().once();
+
+    replayAll();
+
+    upgradeCatalog211.executeDMLUpdates();
+
+    verifyAll();
+  }
+
+  @Test
+  public void testUpdateKerberosConfiguration() throws Exception {
+    final AmbariManagementController controller = createNiceMock(AmbariManagementController.class);
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    final OsFamily osFamily = createNiceMock(OsFamily.class);
+
+    final Map<String, String> propertiesKerberosEnv = new HashMap<String, String>() {
+      {
+        put("create_attributes_template", "create_attributes_template content");
+        put("realm", "EXAMPLE.COM");
+        put("container_dn", "");
+        put("ldap_url", "");
+        put("encryption_types", "aes des3-cbc-sha1 rc4 des-cbc-md5");
+        put("kdc_host", "c6407.ambari.apache.org");
+        put("admin_server_host", "c6407.ambari.apache.org");
+        put("kdc_type", "mit-kdc");
+      }
+    };
+
+    final Config configKerberosEnv = createNiceMock(Config.class);
+    expect(configKerberosEnv.getProperties()).andReturn(propertiesKerberosEnv).anyTimes();
+    expect(configKerberosEnv.getTag()).andReturn("tag1").anyTimes();
+
+    final Cluster cluster = createNiceMock(Cluster.class);
+    expect(cluster.getDesiredConfigByType("kerberos-env")).andReturn(configKerberosEnv).once();
+
+    final Injector injector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(controller);
+        bind(DBAccessor.class).toInstance(dbAccessor);
+        bind(OsFamily.class).toInstance(osFamily);
+      }
+    });
+
+    /* *********************************************************
+     * Expects for updateConfigurationPropertiesForCluster
+     * **** */
+    expect(cluster.getConfigsByType("kerberos-env"))
+        .andReturn(Collections.singletonMap("tag1", configKerberosEnv))
+        .once();
+
+    expect(cluster.getDesiredConfigByType("kerberos-env"))
+        .andReturn(configKerberosEnv)
+        .once();
+
+    Capture<ConfigurationRequest> captureCR = new Capture<ConfigurationRequest>();
+    expect(controller.createConfiguration(capture(captureCR)))
+        .andReturn(createNiceMock(ConfigurationResponse.class))
+        .once();
+
+    /* ****
+     * Expects for updateConfigurationPropertiesForCluster (end)
+     * ********************************************************* */
+
+    replayAll();
+
+    injector.getInstance(UpgradeCatalog211.class).updateKerberosConfigurations(cluster);
+
+    verifyAll();
+
+    ConfigurationRequest capturedCR = captureCR.getValue();
+    Assert.assertNotNull(capturedCR);
+
+    Map<String, String> capturedCRProperties = capturedCR.getProperties();
+    Assert.assertNotNull(capturedCRProperties);
+    Assert.assertFalse(capturedCRProperties.containsKey("create_attributes_template"));
+    Assert.assertTrue(capturedCRProperties.containsKey("ad_create_attributes_template"));
+
+    for (String property : propertiesKerberosEnv.keySet()) {
+      if ("create_attributes_template".equals(property)) {
+        Assert.assertEquals("create_attributes_template/ad_create_attributes_template", propertiesKerberosEnv.get(property), capturedCRProperties.get("ad_create_attributes_template"));
+      } else {
+        Assert.assertEquals(property, propertiesKerberosEnv.get(property), capturedCRProperties.get(property));
+      }
+    }
+  }
+
+  @Test
+  public void testGetSourceVersion() {
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    final OsFamily osFamily = createNiceMock(OsFamily.class);
+    Provider<EntityManager> entityManagerProvider = initEntityManagerProvider();
+
+    replayAll();
+
+    UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor, osFamily, entityManagerProvider.get());
+
+    Assert.assertEquals("2.1.0", upgradeCatalog.getSourceVersion());
+
+    verifyAll();
+  }
+
+  @Test
+  public void testGetTargetVersion() throws Exception {
+    final DBAccessor dbAccessor = createNiceMock(DBAccessor.class);
+    final OsFamily osFamily = createNiceMock(OsFamily.class);
+    Provider<EntityManager> entityManagerProvider = initEntityManagerProvider();
+
+    replayAll();
+
+    UpgradeCatalog upgradeCatalog = getUpgradeCatalog(dbAccessor, osFamily, entityManagerProvider.get());
+
+    Assert.assertEquals("2.1.1", upgradeCatalog.getTargetVersion());
+
+    verifyAll();
+  }
+
+  private Provider<EntityManager> initEntityManagerProvider() {
+    Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
+
+    EntityManager entityManager = createNiceMock(EntityManager.class);
+    expect(entityManagerProvider.get())
+        .andReturn(entityManager)
+        .anyTimes();
+
+    return entityManagerProvider;
+  }
+
+  private Injector initInjector() {
+    Injector injector;
+
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+
+    // inject AmbariMetaInfo to ensure that stacks get populated in the DB
+    injector.getInstance(AmbariMetaInfo.class);
+
+    // load the stack entity
+    StackDAO stackDAO = injector.getInstance(StackDAO.class);
+    stackDAO.find("HDP", "2.2.0");
+
+    return injector;
+  }
+
+  private void destroyInjector(Injector injector) {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  private AbstractUpgradeCatalog getUpgradeCatalog(final DBAccessor dbAccessor, final OsFamily osFamily, final EntityManager entityManager) {
+    Module module = new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(OsFamily.class).toInstance(osFamily);
+        binder.bind(EntityManager.class).toInstance(entityManager);
+      }
+    };
+
+    Injector injector = Guice.createInjector(module);
+    return injector.getInstance(UpgradeCatalog211.class);
+  }
+
+  private void setInjector(UpgradeCatalog211 upgradeCatalog211, Injector injector) throws NoSuchFieldException, IllegalAccessException {
+    Field fieldInjector = AbstractUpgradeCatalog.class.getDeclaredField("injector");
+    if (fieldInjector != null) {
+      fieldInjector.set(upgradeCatalog211, injector);
+    }
+  }
+
+  // *********** Inner Classes that represent sections of the DDL ***********
+  // ************************************************************************
+
+  /**
+   * Example *SectionDDL class
+   */
+  /*
+  class AlertSectionDDL implements SectionDDL {
+    HashMap<String, Capture<String>> stringCaptures;
+    HashMap<String, Capture<Class>> classCaptures;
+
+
+    public AlertSectionDDL() {
+      stringCaptures = new HashMap<String, Capture<String>>();
+      classCaptures = new HashMap<String, Capture<Class>>();
+
+      Capture<String> textCaptureC = new Capture<String>();
+      Capture<String> textCaptureH = new Capture<String>();
+      Capture<Class>  classFromC = new Capture<Class>();
+      Capture<Class>  classFromH = new Capture<Class>();
+      Capture<Class>  classToC = new Capture<Class>();
+      Capture<Class>  classToH = new Capture<Class>();
+
+      stringCaptures.put("textCaptureC", textCaptureC);
+      stringCaptures.put("textCaptureH", textCaptureH);
+      classCaptures.put("classFromC", classFromC);
+      classCaptures.put("classFromH", classFromH);
+      classCaptures.put("classToC", classToC);
+      classCaptures.put("classToH", classToH);
+    }
+
+    @Override
+    public void execute(DBAccessor dbAccessor) throws SQLException {
+      Capture<String> textCaptureC = stringCaptures.get("textCaptureC");
+      Capture<String> textCaptureH = stringCaptures.get("textCaptureH");
+      Capture<Class>  classFromC = classCaptures.get("classFromC");
+      Capture<Class>  classFromH = classCaptures.get("classFromH");
+      Capture<Class>  classToC = classCaptures.get("classToC");
+      Capture<Class>  classToH = classCaptures.get("classToH");
+
+      dbAccessor.changeColumnType(eq("alert_current"), capture(textCaptureC), capture(classFromC), capture(classToC));
+      dbAccessor.changeColumnType(eq("alert_history"), capture(textCaptureH), capture(classFromH), capture(classToH));
+    }
+
+    @Override
+    public void verify(DBAccessor dbAccessor) throws SQLException {
+      Capture<String> textCaptureC = stringCaptures.get("textCaptureC");
+      Capture<String> textCaptureH = stringCaptures.get("textCaptureH");
+      Capture<Class>  classFromC = classCaptures.get("classFromC");
+      Capture<Class>  classFromH = classCaptures.get("classFromH");
+      Capture<Class>  classToC = classCaptures.get("classToC");
+      Capture<Class>  classToH = classCaptures.get("classToH");
+
+      Assert.assertEquals("latest_text", textCaptureC.getValue());
+      Assert.assertEquals(String.class, classFromC.getValue());
+      Assert.assertEquals(char[].class, classToC.getValue());
+
+      Assert.assertEquals("alert_text", textCaptureH.getValue());
+      Assert.assertEquals(String.class, classFromH.getValue());
+      Assert.assertEquals(char[].class, classToH.getValue());
+    }
+  }
+  */
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
index d56d08e..e06882a 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade-hdfs-secure.json
@@ -169,7 +169,7 @@
             "kdc_host": "c6406.ambari.apache.org",
             "admin_server_host": "c6406.ambari.apache.org",
             "ldap_url": "",
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
+            "ad_create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
             "container_dn": ""
         },
         "hive-log4j": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
index 8b10691..1f23f11 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/journalnode-upgrade.json
@@ -169,7 +169,7 @@
             "kdc_host": "c6406.ambari.apache.org",
             "admin_server_host": "c6406.ambari.apache.org",
             "ldap_url": "",
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
+            "ad_create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
             "container_dn": ""
         }, 
         "hive-log4j": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json b/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
index ca7e521..ad7a75a 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/pig-service-check-secure.json
@@ -139,7 +139,7 @@
             "REPOSITORY_CONFIG_PASSWORD": "hadoop"
         }, 
         "kerberos-env": {
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
+            "ad_create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
             "realm": "EXAMPLE.COM", 
             "container_dn": "", 
             "ldap_url": "", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
index 5776a1e..f47a247 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
@@ -191,7 +191,7 @@
             "content": "\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO"
         }, 
         "kerberos-env": {
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
+            "ad_create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
             "realm": "EXAMPLE.COM", 
             "container_dn": "", 
             "ldap_url": "", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
index 633ce75..fa4d614 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
@@ -190,7 +190,7 @@
             "content": "\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO"
         }, 
         "kerberos-env": {
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
+            "ad_create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
             "kdc_host": "c6407.ambari.apache.org",
             "admin_server_host": "c6407.ambari.apache.org",
             "realm": "EXAMPLE.COM",

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e41d7ab/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json b/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json
index 19e68d9..d31c621 100644
--- a/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json
+++ b/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json
@@ -136,7 +136,7 @@
         }, 
         "kerberos-env": {
             "kdc_host": "c6405.ambari.apache.org", 
-            "create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ", 
+            "ad_create_attributes_template": "\n{\n  \"objectClass\": [\"top\", \"person\", \"organizationalPerson\", \"user\"],\n  \"cn\": \"$principal_name\",\n  #if( $is_service )\n  \"servicePrincipalName\": \"$principal_name\",\n  #end\n  \"userPrincipalName\": \"$normalized_principal\",\n  \"unicodePwd\": \"$password\",\n  \"accountExpires\": \"0\",\n  \"userAccountControl\": \"66048\"\n}\n    ",
             "realm": "EXAMPLE.COM", 
             "container_dn": "", 
             "ldap_url": "", 


Mime
View raw message