Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 167201740D for ; Thu, 19 Feb 2015 21:17:18 +0000 (UTC) Received: (qmail 20419 invoked by uid 500); 19 Feb 2015 21:17:18 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 20319 invoked by uid 500); 19 Feb 2015 21:17:17 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 20309 invoked by uid 99); 19 Feb 2015 21:17:17 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 19 Feb 2015 21:17:17 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id BA276E0664; Thu, 19 Feb 2015 21:17:17 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: rlevas@apache.org To: commits@ambari.apache.org Date: Thu, 19 Feb 2015 21:17:18 -0000 Message-Id: In-Reply-To: <7de1a9b20c194a719978b763899cc1d1@git.apache.org> References: <7de1a9b20c194a719978b763899cc1d1@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/2] ambari git commit: AMBARI-9666. Kerberos: Adding a service to a Kerberized cluster requires Kerberos-related tasks occur before INSTALL stage (rlevas) AMBARI-9666. Kerberos: Adding a service to a Kerberized cluster requires Kerberos-related tasks occur before INSTALL stage (rlevas) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1646be07 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1646be07 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1646be07 Branch: refs/heads/trunk Commit: 1646be07045abf0812468fa9f127d7af5b8e6538 Parents: ab491ea Author: Robert Levas Authored: Thu Feb 19 16:16:58 2015 -0500 Committer: Robert Levas Committed: Thu Feb 19 16:17:06 2015 -0500 ---------------------------------------------------------------------- .../ambari/server/agent/HeartBeatHandler.java | 37 +- .../AmbariManagementControllerImpl.java | 60 ++- .../server/controller/AuthToLocalBuilder.java | 17 + .../server/controller/KerberosHelper.java | 384 ++++++++++++++----- .../internal/HostComponentResourceProvider.java | 39 +- .../orm/dao/KerberosPrincipalHostDAO.java | 18 +- .../kerberos/KerberosServerAction.java | 6 + .../UpdateKerberosConfigsServerAction.java | 163 ++------ .../ambari/server/state/ConfigHelper.java | 48 +++ .../server/state/cluster/ClustersImpl.java | 8 + .../AbstractKerberosDescriptorContainer.java | 65 ++++ .../state/kerberos/KerberosDescriptorType.java | 3 +- .../HDFS/2.1.0.2.0/kerberos.json | 5 +- .../package/scripts/kerberos_common.py | 11 +- .../OOZIE/4.0.0.2.0/kerberos.json | 5 +- .../server/agent/TestHeartbeatHandler.java | 1 + .../AmbariManagementControllerTest.java | 6 +- .../controller/AuthToLocalBuilderTest.java | 21 + .../server/controller/KerberosHelperTest.java | 111 +++--- .../HostComponentResourceProviderTest.java | 129 +------ .../UpdateKerberosConfigsServerActionTest.java | 25 +- .../KerberosComponentDescriptorTest.java | 30 +- .../state/kerberos/KerberosDescriptorTest.java | 28 ++ .../kerberos/KerberosServiceDescriptorTest.java | 31 ++ .../resources/stacks/HDP/2.0.8/kerberos.json | 5 +- 25 files changed, 774 insertions(+), 482 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java index eacd025..14e524e 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java @@ -459,21 +459,22 @@ public class HeartBeatHandler { "SET_KEYTAB".equalsIgnoreCase(report.getCustomCommand()) && RequestExecution.Status.COMPLETED.name().equalsIgnoreCase(report.getStatus())) { - Map structuredOutput; + WriteKeytabsStructuredOut writeKeytabsStructuredOut; try { - structuredOutput = gson.fromJson(report.getStructuredOut(), - new TypeToken>() { - }.getType()); + writeKeytabsStructuredOut = gson.fromJson(report.getStructuredOut(), WriteKeytabsStructuredOut.class); } catch (JsonSyntaxException ex) { //Json structure was incorrect do nothing, pass this data further for processing - structuredOutput = null; + writeKeytabsStructuredOut = null; } - if (structuredOutput != null) { - for (Map.Entry entry : structuredOutput.entrySet()) { - String principal = entry.getKey(); - if (!kerberosPrincipalHostDAO.exists(principal, hostname)) { - kerberosPrincipalHostDAO.create(principal, hostname); + if (writeKeytabsStructuredOut != null) { + Map keytabs = writeKeytabsStructuredOut.getKeytabs(); + if (keytabs != null) { + for (Map.Entry entry : keytabs.entrySet()) { + String principal = entry.getKey(); + if (!kerberosPrincipalHostDAO.exists(principal, hostname)) { + kerberosPrincipalHostDAO.create(principal, hostname); + } } } } @@ -1102,4 +1103,20 @@ public class HeartBeatHandler { } } + /** + * This class is used for mapping json of structured output for keytab distribution actions. + */ + private static class WriteKeytabsStructuredOut { + @SerializedName("keytabs") + private Map keytabs; + + public Map getKeytabs() { + return keytabs; + } + + public void setKeytabs(Map keytabs) { + this.keytabs = keytabs; + } + } + } http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java index 7e811e2..3b59fed 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java @@ -65,6 +65,7 @@ import org.apache.ambari.server.security.authorization.Users; import org.apache.ambari.server.security.ldap.AmbariLdapDataPopulator; import org.apache.ambari.server.security.ldap.LdapBatchDto; import org.apache.ambari.server.security.ldap.LdapSyncDto; +import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException; import org.apache.ambari.server.stageplanner.RoleGraph; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; @@ -1834,7 +1835,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle } } - private List doStageCreation(RequestStageContainer requestStages, + private RequestStageContainer doStageCreation(RequestStageContainer requestStages, Cluster cluster, Map> changedServices, Map> changedComps, @@ -1856,7 +1857,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle if ((changedServices == null || changedServices.isEmpty()) && (changedComps == null || changedComps.isEmpty()) && (changedScHosts == null || changedScHosts.isEmpty())) { - return null; + LOG.debug("Created 0 stages"); + return requestStages; } // smoke test any service that goes from installed to started @@ -1882,10 +1884,12 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle String HostParamsJson = StageUtils.getGson().toJson( customCommandExecutionHelper.createDefaultHostParams(cluster)); - Stage stage = createNewStage(requestStages.getLastStageId() + 1, cluster, + Stage stage = createNewStage(requestStages.getLastStageId(), cluster, requestStages.getId(), requestProperties.get(REQUEST_CONTEXT_PROPERTY), clusterHostInfoJson, "{}", HostParamsJson); + Collection componentsToEnableKerberos = new ArrayList(); + //HACK String jobtrackerHost = getJobTrackerHost(cluster); for (String compName : changedScHosts.keySet()) { @@ -1923,6 +1927,22 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle scHost.getServiceComponentName(), scHost.getHostName(), nowTimestamp, scHost.getDesiredStackVersion().getStackId()); + + // If the state is transitioning from INIT TO INSTALLED and the cluster has Kerberos + // enabled, mark this ServiceComponentHost to see if anything needs to be done to + // make sure it is properly configured. The Kerberos-related stages needs to be + // between the INSTALLED and STARTED states because some services need to set up + // the host (i,e, create user accounts, etc...) before Kerberos-related tasks an + // occur (like distribute keytabs) + if((oldSchState == State.INIT) && kerberosHelper.isClusterKerberosEnabled(cluster)) { + try { + kerberosHelper.configureService(cluster, scHost); + } catch (KerberosInvalidConfigurationException e) { + throw new AmbariException(e.getMessage(), e); + } + + componentsToEnableKerberos.add(scHost); + } } else if (oldSchState == State.STARTED // TODO: oldSchState == State.INSTALLED is always false, looks like a bug //|| oldSchState == State.INSTALLED @@ -2102,14 +2122,38 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle RoleCommandOrder rco = getRoleCommandOrder(cluster); RoleGraph rg = new RoleGraph(rco); + rg.build(stage); - return rg.getStages(); + requestStages.addStages(rg.getStages()); + + if (!componentsToEnableKerberos.isEmpty()) { + Map> serviceFilter = new HashMap>(); + + for (ServiceComponentHost scHost : componentsToEnableKerberos) { + String serviceName = scHost.getServiceName(); + Collection componentFilter = serviceFilter.get(serviceName); + + if (componentFilter == null) { + componentFilter = new HashSet(); + serviceFilter.put(serviceName, componentFilter); + } + + componentFilter.add(scHost.getServiceComponentName()); + } + + kerberosHelper.ensureIdentities(cluster, serviceFilter, null, requestStages); + } + + List stages = requestStages.getStages(); + LOG.debug("Created {} stages", ((stages != null) ? stages.size() : 0)); + + } else { + LOG.debug("Created 0 stages"); } - return null; + return requestStages; } - @Transactional void updateServiceStates( Map> changedServices, @@ -2186,12 +2230,10 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle requestStages = new RequestStageContainer(actionManager.getNextRequestId(), null, requestFactory, actionManager); } - List stages = doStageCreation(requestStages, cluster, changedServices, changedComponents, + requestStages = doStageCreation(requestStages, cluster, changedServices, changedComponents, changedHosts, requestParameters, requestProperties, runSmokeTest, reconfigureClients); - LOG.debug("Created {} stages", ((stages != null) ? stages.size() : 0)); - requestStages.addStages(stages); updateServiceStates(changedServices, changedComponents, changedHosts, ignoredHosts); return requestStages; } http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java index a22c759..c599cc1 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java @@ -198,6 +198,23 @@ public class AuthToLocalBuilder { return new Rule(rule.startsWith("RULE:") ? rule : String.format("RULE:%s", rule)); } + /** + * Creates and returns a deep copy of this AuthToLocalBuilder. + * + * @return a deep copy of this AuthToLocalBuilder + */ + public AuthToLocalBuilder copy() { + AuthToLocalBuilder copy = new AuthToLocalBuilder(); + + // TODO: This needs to be done in a loop rather than use Set.addAll because there may be an issue + // TODO: with the Rule.compareTo method? + for(Rule rule:setRules) { + copy.setRules.add(rule); + } + + return copy; + } + /** * Rule implementation. http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java index db19611..7a0a374 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelper.java @@ -114,7 +114,7 @@ public class KerberosHelper { private static final Logger LOG = LoggerFactory.getLogger(KerberosHelper.class); /** - * config type which contains the property used to determine if keberos is enabled + * config type which contains the property used to determine if Kerberos is enabled */ private static final String SECURITY_ENABLED_CONFIG_TYPE = "cluster-env"; @@ -297,7 +297,7 @@ public class KerberosHelper { * executed to complete this task; or null if no stages need to be executed. * @throws AmbariException */ - public RequestStageContainer ensureIdentities(Cluster cluster, Map> serviceComponentFilter, + public RequestStageContainer ensureIdentities(Cluster cluster, Map> serviceComponentFilter, Collection identityFilter, RequestStageContainer requestStageContainer) throws AmbariException { try { @@ -309,6 +309,161 @@ public class KerberosHelper { } /** + * Updates the relevant configurations for the given Service. + *

+ * If the relevant service and its components have Kerberos descriptors, configuration values from + * the descriptors are used to update the relevant configuration sets. + * + * @param cluster the relevant Cluster + * @param serviceComponentHost the ServiceComponentHost + * @throws AmbariException + */ + public void configureService(Cluster cluster, ServiceComponentHost serviceComponentHost) + throws AmbariException, KerberosInvalidConfigurationException { + + KerberosDetails kerberosDetails = getKerberosDetails(cluster); + + // Set properties... + String serviceName = serviceComponentHost.getServiceName(); + KerberosDescriptor kerberosDescriptor = getKerberosDescriptor(cluster); + KerberosServiceDescriptor serviceDescriptor = kerberosDescriptor.getService(serviceName); + + if (serviceDescriptor != null) { + Map kerberosDescriptorProperties = kerberosDescriptor.getProperties(); + Map> kerberosConfigurations = new HashMap>(); + Map> configurations = calculateConfigurations(cluster, + serviceComponentHost.getHostName(), kerberosDescriptorProperties); + + Map componentDescriptors = serviceDescriptor.getComponents(); + for (KerberosComponentDescriptor componentDescriptor : componentDescriptors.values()) { + if (componentDescriptor != null) { + Map> identityConfigurations; + List identities; + + identities = serviceDescriptor.getIdentities(true); + identityConfigurations = getConfigurations(identities); + if (identityConfigurations != null) { + for (Map.Entry> entry : identityConfigurations.entrySet()) { + mergeConfigurations(kerberosConfigurations, entry.getKey(), entry.getValue(), configurations); + } + } + + identities = componentDescriptor.getIdentities(true); + identityConfigurations = getConfigurations(identities); + if (identityConfigurations != null) { + for (Map.Entry> entry : identityConfigurations.entrySet()) { + mergeConfigurations(kerberosConfigurations, entry.getKey(), entry.getValue(), configurations); + } + } + + mergeConfigurations(kerberosConfigurations, + componentDescriptor.getConfigurations(true), configurations); + } + } + + setAuthToLocalRules(kerberosDescriptor, cluster, kerberosDetails.getDefaultRealm(), configurations, kerberosConfigurations); + + for (Map.Entry> entry : kerberosConfigurations.entrySet()) { + configHelper.updateConfigType(cluster, ambariManagementController, entry.getKey(), entry.getValue(), + ambariManagementController.getAuthName(), String.format("Enabling Kerberos for %s", serviceName)); + } + } + } + + /** + * Sets the relevant auth-to-local rule configuration properties using the services installed on + * the cluster and their relevant Kerberos descriptors to determine the rules to be created. + * + * @param kerberosDescriptor the current Kerberos descriptor + * @param cluster the cluster + * @param realm the default realm + * @param existingConfigurations a map of the current configurations + * @param kerberosConfigurations a map of the configurations to update, this where the generated + * auth-to-local values will be stored + * @throws AmbariException + */ + private void setAuthToLocalRules(KerberosDescriptor kerberosDescriptor, Cluster cluster, String realm, + Map> existingConfigurations, + Map> kerberosConfigurations) + throws AmbariException { + + if (kerberosDescriptor != null) { + + Set authToLocalProperties; + Set authToLocalPropertiesToSet = new HashSet(); + + // Determine which properties need to be set + AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder(); + + addIdentities(authToLocalBuilder, kerberosDescriptor.getIdentities(), null, existingConfigurations); + + authToLocalProperties = kerberosDescriptor.getAuthToLocalProperties(); + if (authToLocalProperties != null) { + authToLocalPropertiesToSet.addAll(authToLocalProperties); + } + + Map services = kerberosDescriptor.getServices(); + if (services != null) { + Map installedServices = cluster.getServices(); + + for (KerberosServiceDescriptor service : services.values()) { + if (installedServices.containsKey(service.getName())) { + + addIdentities(authToLocalBuilder, service.getIdentities(true), null, existingConfigurations); + + authToLocalProperties = service.getAuthToLocalProperties(); + if (authToLocalProperties != null) { + authToLocalPropertiesToSet.addAll(authToLocalProperties); + } + + Map components = service.getComponents(); + if (components != null) { + for (KerberosComponentDescriptor component : components.values()) { + addIdentities(authToLocalBuilder, component.getIdentities(true), null, existingConfigurations); + + authToLocalProperties = component.getAuthToLocalProperties(); + if (authToLocalProperties != null) { + authToLocalPropertiesToSet.addAll(authToLocalProperties); + } + } + } + } + } + } + + if (!authToLocalPropertiesToSet.isEmpty()) { + for (String authToLocalProperty : authToLocalPropertiesToSet) { + String[] parts = authToLocalProperty.split("/"); + + if (parts.length == 2) { + AuthToLocalBuilder builder = authToLocalBuilder.copy(); + String configType = parts[0]; + String propertyName = parts[1]; + + // Add existing auth_to_local configuration, if set + Map existingConfiguration = existingConfigurations.get(configType); + if (existingConfiguration != null) { + builder.addRules(existingConfiguration.get(propertyName)); + } + + // Add/update descriptor auth_to_local configuration, if set + Map kerberosConfiguration = kerberosConfigurations.get(configType); + if (kerberosConfiguration != null) { + builder.addRules(kerberosConfiguration.get(propertyName)); + } else { + kerberosConfiguration = new HashMap(); + kerberosConfigurations.put(configType, kerberosConfiguration); + } + + kerberosConfiguration.put(propertyName, builder.generate(realm)); + } + } + } + } + } + + + /** * Performs operations needed to process Kerberos related tasks on the relevant cluster. *

* Iterates through the components installed on the relevant cluster to determine if work @@ -334,7 +489,7 @@ public class KerberosHelper { @Transactional private RequestStageContainer handle(Cluster cluster, KerberosDetails kerberosDetails, - Map> serviceComponentFilter, + Map> serviceComponentFilter, Collection identityFilter, RequestStageContainer requestStageContainer, Handler handler) throws AmbariException { @@ -353,7 +508,6 @@ public class KerberosHelper { KerberosActionDataFileBuilder kerberosActionDataFileBuilder = null; Map kerberosDescriptorProperties = kerberosDescriptor.getProperties(); Map> kerberosConfigurations = new HashMap>(); - AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder(); // Create a temporary directory to store metadata needed to complete this task. Information // such as which principals and keytabs files to create as well as what configurations need @@ -384,38 +538,7 @@ public class KerberosHelper { if ((serviceComponentHosts != null) && !serviceComponentHosts.isEmpty()) { // Calculate the current host-specific configurations. These will be used to replace // variables within the Kerberos descriptor data - Map> configurations = calculateConfigurations(cluster, hostname); - // add existing kerberos auth_to_local rules to builder - if (configurations.containsKey("core-site")) { - authToLocalBuilder.addRules( - configurations.get("core-site").get("hadoop.security.auth_to_local")); - } - - // A map to hold un-categorized properties. This may come from the KerberosDescriptor - // and will also contain a value for the current host - Map generalProperties = new HashMap(); - - // Make sure the configurations exist. - if (configurations == null) { - configurations = new HashMap>(); - } - - // If any properties are set in the calculated KerberosDescriptor, add them into the - // Map of configurations as an un-categorized type (using an empty string) - if (kerberosDescriptorProperties != null) { - generalProperties.putAll(kerberosDescriptorProperties); - } - - // Add the current hostname under "host" and "hostname" - generalProperties.put("host", hostname); - generalProperties.put("hostname", hostname); - generalProperties.put("cluster_name", clusterName); - - if (configurations.get("") == null) { - configurations.put("", generalProperties); - } else { - configurations.get("").putAll(generalProperties); - } + Map> configurations = calculateConfigurations(cluster, hostname, kerberosDescriptorProperties); // Iterate over the components installed on the current host to get the service and // component-level Kerberos descriptors in order to determine which principals, @@ -430,45 +553,39 @@ public class KerberosHelper { if (serviceDescriptor != null) { String componentName = sch.getServiceComponentName(); + int identitiesAdded = 0; + List serviceIdentities = serviceDescriptor.getIdentities(true); + + // Lazily create the KerberosActionDataFileBuilder instance... + if (kerberosActionDataFileBuilder == null) { + kerberosActionDataFileBuilder = new KerberosActionDataFileBuilder(indexFile); + } + + // Add service-level principals (and keytabs) + identitiesAdded += addIdentities(kerberosActionDataFileBuilder, serviceIdentities, + identityFilter, hostname, serviceName, componentName, configurations); // If there is no filter or the filter contains the current component name, // test to see if this component should be process by querying the handler... if (((componentFilter == null) || componentFilter.contains(componentName)) && handler.shouldProcess(desiredSecurityState, sch)) { KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName); - List serviceIdentities = serviceDescriptor.getIdentities(true); if (componentDescriptor != null) { List componentIdentities = componentDescriptor.getIdentities(true); - int identitiesAdded = 0; // Calculate the set of configurations to update and replace any variables // using the previously calculated Map of configurations for the host. mergeConfigurations(kerberosConfigurations, componentDescriptor.getConfigurations(true), configurations); - // Lazily create the KerberosActionDataFileBuilder instance... - if (kerberosActionDataFileBuilder == null) { - kerberosActionDataFileBuilder = new KerberosActionDataFileBuilder(indexFile); - } - - // Add service-level principals (and keytabs) - identitiesAdded += addIdentities(kerberosActionDataFileBuilder, serviceIdentities, - identityFilter, hostname, serviceName, componentName, configurations); - // Add component-level principals (and keytabs) identitiesAdded += addIdentities(kerberosActionDataFileBuilder, componentIdentities, identityFilter, hostname, serviceName, componentName, configurations); - - if (identitiesAdded > 0) { - serviceComponentHostsToProcess.add(sch); - } - - // Add component-level principals to auth_to_local builder - addIdentities(authToLocalBuilder, componentIdentities, identityFilter, configurations); } + } - // Add service-level principals to auth_to_local builder - addIdentities(authToLocalBuilder, serviceIdentities, identityFilter, configurations); + if (identitiesAdded > 0) { + serviceComponentHostsToProcess.add(sch); } } } @@ -506,20 +623,9 @@ public class KerberosHelper { throw new IllegalArgumentException(e.getMessage(), e); } - // Determine if the any auth_to_local configurations need to be set dynamically - // Lazily create the auth_to_local rules - String authToLocal = null; - for (Map configuration : kerberosConfigurations.values()) { - for (Map.Entry entry : configuration.entrySet()) { - if ("_AUTH_TO_LOCAL_RULES".equals(entry.getValue())) { - if (authToLocal == null) { - authToLocal = authToLocalBuilder.generate(kerberosDetails.getDefaultRealm()); - } - - entry.setValue(authToLocal); - } - } - } + setAuthToLocalRules(kerberosDescriptor, cluster, kerberosDetails.getDefaultRealm(), + calculateConfigurations(cluster, null, kerberosDescriptorProperties), + kerberosConfigurations); } // Ensure the cluster-env/security_enabled flag is set properly @@ -561,6 +667,7 @@ public class KerberosHelper { // Add the cleanup stage... Map commandParameters = new HashMap(); + commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName()); commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath()); Stage stage = createServerActionStage(requestStageContainer.getLastStageId(), @@ -944,21 +1051,7 @@ public class KerberosHelper { if (configurationDescriptor != null) { Map updatedProperties = configurationDescriptor.getProperties(); - - if (updatedProperties != null) { - Map existingProperties = configurations.get(type); - if (existingProperties == null) { - existingProperties = new HashMap(); - configurations.put(type, existingProperties); - } - - for (Map.Entry property : updatedProperties.entrySet()) { - existingProperties.put( - KerberosDescriptor.replaceVariables(property.getKey(), replacements), - KerberosDescriptor.replaceVariables(property.getValue(), replacements) - ); - } - } + mergeConfigurations(configurations, type, updatedProperties, replacements); } } } @@ -966,6 +1059,25 @@ public class KerberosHelper { return configurations; } + private void mergeConfigurations(Map> configurations, String type, + Map updates, + Map> replacements) throws AmbariException { + if (updates != null) { + Map existingProperties = configurations.get(type); + if (existingProperties == null) { + existingProperties = new HashMap(); + configurations.put(type, existingProperties); + } + + for (Map.Entry property : updates.entrySet()) { + existingProperties.put( + KerberosDescriptor.replaceVariables(property.getKey(), replacements), + KerberosDescriptor.replaceVariables(property.getValue(), replacements) + ); + } + } + } + /** * Adds identities to the KerberosActionDataFileBuilder. * @@ -1081,22 +1193,23 @@ public class KerberosHelper { /** * Calculates the map of configurations relative to the cluster and host. *

- * This was borrowed from {@link org.apache.ambari.server.actionmanager.ExecutionCommandWrapper#getExecutionCommand()} + * Most of this was borrowed from {@link org.apache.ambari.server.actionmanager.ExecutionCommandWrapper#getExecutionCommand()} * - * @param cluster the relevant Cluster - * @param hostname the relevant hostname + * @param cluster the relevant Cluster + * @param hostname the relevant hostname + * @param kerberosDescriptorProperties a map of general Kerberos descriptor properties * @return a Map of calculated configuration types * @throws AmbariException */ - private Map> calculateConfigurations(Cluster cluster, String hostname) throws AmbariException { + private Map> calculateConfigurations(Cluster cluster, String hostname, + Map kerberosDescriptorProperties) + throws AmbariException { // For a configuration type, both tag and an actual configuration can be stored // Configurations from the tag is always expanded and then over-written by the actual // global:version1:{a1:A1,b1:B1,d1:D1} + global:{a1:A2,c1:C1,DELETED_d1:x} ==> // global:{a1:A2,b1:B1,c1:C1} Map> configurations = new HashMap>(); - Map> configurationTags = ambariManagementController.findConfigurationTagsWithOverrides(cluster, hostname); - Map>> configurationAttributes = new TreeMap>>(); if (configurationTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) { configHelper.applyCustomConfig( @@ -1124,26 +1237,34 @@ public class KerberosHelper { configurations.put(type, configuration); } - Map>> configAttributes = - configHelper.getEffectiveConfigAttributes(cluster, configurationTags); - - for (Map.Entry>> attributesOccurrence : configAttributes.entrySet()) { - String type = attributesOccurrence.getKey(); - Map> attributes = attributesOccurrence.getValue(); + // A map to hold un-categorized properties. This may come from the KerberosDescriptor + // and will also contain a value for the current host + Map generalProperties = configurations.get(""); + if (generalProperties == null) { + generalProperties = new HashMap(); + configurations.put("", generalProperties); + } - if (!configurationAttributes.containsKey(type)) { - configurationAttributes.put(type, new TreeMap>()); - } - configHelper.cloneAttributesMap(attributes, configurationAttributes.get(type)); + // If any properties are set in the calculated KerberosDescriptor, add them into the + // Map of configurations as an un-categorized type (using an empty string) + if (kerberosDescriptorProperties != null) { + generalProperties.putAll(kerberosDescriptorProperties); } + // Add the current hostname under "host" and "hostname" + generalProperties.put("host", hostname); + generalProperties.put("hostname", hostname); + + // Add the current cluster's name + generalProperties.put("cluster_name", cluster.getClusterName()); + // add clusterHostInfo config Map componentHosts = new HashMap(); for (Map.Entry service : cluster.getServices().entrySet()) { for (Map.Entry serviceComponent : service.getValue().getServiceComponents().entrySet()) { if (StageUtils.getComponentToClusterInfoKeyMap().keySet().contains(serviceComponent.getValue().getName())) { componentHosts.put(StageUtils.getComponentToClusterInfoKeyMap().get(serviceComponent.getValue().getName()), - StringUtils.join(serviceComponent.getValue().getServiceComponentHosts().keySet(), ",")); + StringUtils.join(serviceComponent.getValue().getServiceComponentHosts().keySet(), ",")); } } } @@ -1303,6 +1424,62 @@ public class KerberosHelper { } /** + * Given a list of KerberosIdentityDescriptors, returns a Map fo configuration types to property + * names and values. + *

+ * The property names and values are not expected to have any variable replacements done. + * + * @param identityDescriptors a List of KerberosIdentityDescriptor from which to retrieve configurations + * @return a Map of configuration types to property name/value pairs (as a Map) + */ + private Map> getConfigurations(List identityDescriptors) { + Map> map = new HashMap>(); + + if (identityDescriptors != null) { + for (KerberosIdentityDescriptor identityDescriptor : identityDescriptors) { + KerberosPrincipalDescriptor principalDescriptor = identityDescriptor.getPrincipalDescriptor(); + if (principalDescriptor != null) { + putConfiguration(map, principalDescriptor.getConfiguration(), principalDescriptor.getValue()); + } + + KerberosKeytabDescriptor keytabDescriptor = identityDescriptor.getKeytabDescriptor(); + if (keytabDescriptor != null) { + putConfiguration(map, keytabDescriptor.getConfiguration(), keytabDescriptor.getFile()); + } + } + } + + return map; + } + + /** + * Inserts a configuration property and value into a map of configuration types to property + * name/value pair maps. + * + * @param map the Map to insert into + * @param configuration a configuration property in the form of config-type/property_name + * @param value the value of the configuration property + */ + private void putConfiguration(Map> map, String configuration, String value) { + if (configuration != null) { + String[] principalTokens = configuration.split("/"); + + if (principalTokens.length == 2) { + String type = principalTokens[0]; + String propertyName = principalTokens[1]; + + Map properties = map.get(type); + if (properties == null) { + properties = new HashMap(); + map.put(type, properties); + } + + properties.put(propertyName, value); + } + } + } + + /** * A enumeration of the supported custom operations */ public static enum SupportedCustomOperation { @@ -1612,6 +1789,7 @@ public class KerberosHelper { } Map commandParameters = new HashMap(); + commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName()); commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath()); commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm()); commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name()); @@ -1701,6 +1879,7 @@ public class KerberosHelper { } Map commandParameters = new HashMap(); + commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName()); commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath()); commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm()); commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name()); @@ -1863,6 +2042,7 @@ public class KerberosHelper { } Map commandParameters = new HashMap(); + commandParameters.put(KerberosServerAction.AUTHENTICATED_USER_NAME, ambariManagementController.getAuthName()); commandParameters.put(KerberosServerAction.DATA_DIRECTORY, dataDirectory.getAbsolutePath()); commandParameters.put(KerberosServerAction.DEFAULT_REALM, kerberosDetails.getDefaultRealm()); commandParameters.put(KerberosServerAction.KDC_TYPE, kerberosDetails.getKdcType().name()); http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java index 196ae21..639e170 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java @@ -34,7 +34,6 @@ import com.google.inject.Injector; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.configuration.ComponentSSLConfiguration; import org.apache.ambari.server.controller.AmbariManagementController; -import org.apache.ambari.server.controller.KerberosHelper; import org.apache.ambari.server.controller.MaintenanceStateHelper; import org.apache.ambari.server.controller.RequestStatusResponse; import org.apache.ambari.server.controller.ServiceComponentHostRequest; @@ -60,7 +59,6 @@ import com.google.inject.assistedinject.AssistedInject; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.MaintenanceState; -import org.apache.ambari.server.state.SecurityType; import org.apache.ambari.server.state.ServiceComponent; import org.apache.ambari.server.state.ServiceComponentHost; import org.apache.ambari.server.state.ServiceComponentHostEvent; @@ -124,13 +122,6 @@ public class HostComponentResourceProvider extends AbstractControllerResourcePro @Inject private MaintenanceStateHelper maintenanceStateHelper; - /** - * kerberos helper - */ - @Inject - private KerberosHelper kerberosHelper; - - // ----- Constructors ---------------------------------------------------- /** @@ -415,8 +406,6 @@ public class HostComponentResourceProvider extends AbstractControllerResourcePro clusterNames.add(clusterName); } - boolean addKerberosStages = false; - for (ServiceComponentHostRequest request : requests) { validateServiceComponentHostRequest(request); @@ -475,10 +464,6 @@ public class HostComponentResourceProvider extends AbstractControllerResourcePro // set desired state on host component newState = State.valueOf(request.getDesiredState()); - // determine if this state transition will require that kerberos stages are added to request. - // once set to true will stay true - addKerberosStages = addKerberosStages || requiresKerberosStageAddition(oldState, newState, cluster); - // throw exception if desired state isn't a valid desired state (static check) if (!newState.isValidDesiredState()) { throw new IllegalArgumentException("Invalid arguments, invalid" @@ -563,16 +548,9 @@ public class HostComponentResourceProvider extends AbstractControllerResourcePro // just getting the first cluster Cluster cluster = clusters.getCluster(clusterNames.iterator().next()); - RequestStageContainer requestStages = getManagementController().addStages( + return getManagementController().addStages( stages, cluster, requestProperties, null, null, null, changedScHosts, ignoredScHosts, runSmokeTest, false); - - if (addKerberosStages) { - // adds the necessary kerberos related stages to the request - kerberosHelper.toggleKerberos(cluster, SecurityType.KERBEROS, requestStages); - } - - return requestStages; } @Override @@ -847,21 +825,6 @@ public class HostComponentResourceProvider extends AbstractControllerResourcePro } } - /** - * Determine if kerberos stages need to be added to the request as a result of a - * host component state change. - * - * @param current current host component state - * @param target target host component state - * @param cluster associated cluster - * @return whether kerberos stages should be added to the request - */ - public boolean requiresKerberosStageAddition(State current, State target, Cluster cluster) { - return current == State.INIT && - target == State.INSTALLED && - kerberosHelper.isClusterKerberosEnabled(cluster); - } - // ----- inner classes --------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/KerberosPrincipalHostDAO.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/KerberosPrincipalHostDAO.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/KerberosPrincipalHostDAO.java index 9086e93..fef0597 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/KerberosPrincipalHostDAO.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/KerberosPrincipalHostDAO.java @@ -165,7 +165,7 @@ public class KerberosPrincipalHostDAO { */ @Transactional public void removeByPrincipal(String principalName) { - entityManagerProvider.get().remove(findByPrincipal(principalName)); + remove(findByPrincipal(principalName)); } /** @@ -175,7 +175,7 @@ public class KerberosPrincipalHostDAO { */ @Transactional public void removeByHost(String hostName) { - entityManagerProvider.get().remove(findByHost(hostName)); + remove(findByHost(hostName)); } /** @@ -201,4 +201,18 @@ public class KerberosPrincipalHostDAO { public boolean exists(String principalName, String hostName) { return find(principalName, hostName) != null; } + + /** + * Removes multiple KerberosPrincipalHostEntity items + * + * @param entities a collection of KerberosPrincipalHostEntity items to remove + */ + private void remove(List entities) { + if (entities != null) { + for (KerberosPrincipalHostEntity entity : entities) { + entityManagerProvider.get().remove(entity); + } + } + } + } http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java index 98cc4e2..fc1729b 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/KerberosServerAction.java @@ -46,6 +46,12 @@ import java.util.Map; */ public abstract class KerberosServerAction extends AbstractServerAction { /** + * A (command parameter) property name used to hold the authenticated user's name for use in + * operations that record the acting user. + */ + public static final String AUTHENTICATED_USER_NAME = "authenticated_user_name"; + + /** * A (command parameter) property name used to hold the absolute path to the directory that is to * be used to store transient data while the request is being processed. This is expected to be * a temporary directory. http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerAction.java index 728865f..eca9b79 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerAction.java @@ -18,24 +18,21 @@ package org.apache.ambari.server.serveraction.kerberos; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; import org.apache.ambari.server.controller.AmbariManagementController; -import org.apache.ambari.server.controller.ConfigurationRequest; import org.apache.ambari.server.serveraction.AbstractServerAction; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; -import org.apache.ambari.server.state.Config; +import org.apache.ambari.server.state.ConfigHelper; import org.apache.ambari.server.state.SecurityType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentMap; @@ -46,14 +43,14 @@ import java.util.concurrent.ConcurrentMap; */ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { - private final static Logger LOG = - LoggerFactory.getLogger(UpdateKerberosConfigsServerAction.class); - - private HashMap> configurations = new HashMap>(); + private final static Logger LOG = LoggerFactory.getLogger(UpdateKerberosConfigsServerAction.class); @Inject private AmbariManagementController controller; + @Inject + private ConfigHelper configHelper; + /** * Executes this ServerAction *

@@ -65,12 +62,11 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { * to a given request * @return a CommandReport declaring the status of the task * @throws org.apache.ambari.server.AmbariException - * * @throws InterruptedException */ @Override public CommandReport execute(ConcurrentMap requestSharedDataContext) - throws AmbariException, InterruptedException { + throws AmbariException, InterruptedException { CommandReport commandReport = null; @@ -78,7 +74,9 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { Clusters clusters = controller.getClusters(); Cluster cluster = clusters.getCluster(clusterName); + String authenticatedUserName = getCommandParameterValue(getCommandParameters(), KerberosServerAction.AUTHENTICATED_USER_NAME); String dataDirectoryPath = getCommandParameterValue(getCommandParameters(), KerberosServerAction.DATA_DIRECTORY); + HashMap> configurations = new HashMap>(); // If the data directory path is set, attempt to process further, else assume there is no work to do if (dataDirectoryPath != null) { @@ -103,7 +101,7 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { if (principalTokens.length == 2) { String principalConfigType = principalTokens[0]; String principalConfigProp = principalTokens[1]; - addConfigTypePropVal(principalConfigType, principalConfigProp, principal); + addConfigTypePropVal(configurations, principalConfigType, principalConfigProp, principal); } String keytabPath = record.get(KerberosActionDataFile.KEYTAB_FILE_PATH); @@ -112,7 +110,7 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { if (keytabTokens.length == 2) { String keytabConfigType = keytabTokens[0]; String keytabConfigProp = keytabTokens[1]; - addConfigTypePropVal(keytabConfigType, keytabConfigProp, keytabPath); + addConfigTypePropVal(configurations, keytabConfigType, keytabConfigProp, keytabPath); } } } @@ -126,18 +124,19 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { String configType = record.get(KerberosConfigDataFile.CONFIGURATION_TYPE); String configKey = record.get(KerberosConfigDataFile.KEY); String configVal = record.get(KerberosConfigDataFile.VALUE); - addConfigTypePropVal(configType, configKey, configVal); + addConfigTypePropVal(configurations, configType, configKey, configVal); } } - for (Map.Entry> entry : configurations.entrySet()) { - updateConfigurationPropertiesForCluster( - cluster, - entry.getKey(), // configType - entry.getValue(), // properties - true, // updateIfExists - true, // createNew - "update services configs to enable kerberos"); + if (!configurations.isEmpty()) { + String configNote = cluster.getSecurityType() == SecurityType.KERBEROS + ? "Enabling Kerberos" + : "Disabling Kerberos"; + + for (Map.Entry> entry : configurations.entrySet()) { + configHelper.updateConfigType(cluster, controller, entry.getKey(), entry.getValue(), + authenticatedUserName, configNote); + } } } catch (IOException e) { String message = "Could not update services configs to enable kerberos"; @@ -171,115 +170,11 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { /** - * - * Updates service config properties of a cluster - * @param cluster the cluster for which to update service configs - * @param configType service config type to be updated - * @param properties map of service config properties - * @param updateIfExists flag indicating whether to update if a property already exists - * @param createNewConfigType flag indicating whether to create new service config - * if the config type does not exist - * @param note a short note on change - * @throws AmbariException if the operation fails - */ - private void updateConfigurationPropertiesForCluster( - Cluster cluster, - String configType, - Map properties, - boolean updateIfExists, - boolean createNewConfigType, - String note) - throws AmbariException { - - String newTag = "version" + System.currentTimeMillis(); - String message; - if ((properties != null) && (properties.size() > 0)) { - Map all = cluster.getConfigsByType(configType); - if (all == null || !all.containsKey(newTag)) { - Map oldConfigProperties; - Config oldConfig = cluster.getDesiredConfigByType(configType); - - if (oldConfig == null && !createNewConfigType) { - message = String.format("Config %s not found. Assuming service not installed. " + - "Skipping configuration properties update", configType); - actionLog.writeStdOut(message); - LOG.info(message); - return; - } else if (oldConfig == null) { - oldConfigProperties = new HashMap(); - newTag = "version1"; - } else { - oldConfigProperties = oldConfig.getProperties(); - } - - Map mergedProperties = - mergeProperties(oldConfigProperties, properties, updateIfExists); - - if (!Maps.difference(oldConfigProperties, mergedProperties).areEqual()) { - message = String.format("Applying configuration with tag '%s' to " + - "cluster '%s'", newTag, cluster.getClusterName()); - actionLog.writeStdOut(message); - LOG.info(message); - - ConfigurationRequest cr = new ConfigurationRequest(); - cr.setClusterName(cluster.getClusterName()); - cr.setVersionTag(newTag); - cr.setType(configType); - cr.setProperties(mergedProperties); - cr.setServiceConfigVersionNote(note); - controller.createConfiguration(cr); - - Config baseConfig = cluster.getConfig(cr.getType(), cr.getVersionTag()); - if (baseConfig != null) { - String authName = controller.getAuthName(); - String configNote = null; - configNote = cluster.getSecurityType() == SecurityType.KERBEROS ? - "Enabling Kerberos on Cluster" : "Disabling Kerberos on Cluster"; - if (cluster.addDesiredConfig(authName, Collections.singleton(baseConfig), configNote) != null) { - String oldConfigString = (oldConfig != null) ? " from='" + oldConfig.getTag() + "'" : ""; - message = "cluster '" + cluster.getClusterName() + "' " - + "changed by: '" + authName + "'; " - + "type='" + baseConfig.getType() + "' " - + "tag='" + baseConfig.getTag() + "'" - + oldConfigString; - LOG.info(message); - actionLog.writeStdOut(message); - } - } - } else { - message = "No changes detected to config " + configType + ". Skipping configuration properties update"; - LOG.info(message); - actionLog.writeStdOut(message); - } - } - } - } - - /** - * Merges current properties and new properties - * @param originalProperties current properties - * @param newProperties new properties - * @param updateIfExists flag indicating whether to update if a property already exists - * @return merged properties - */ - private static Map mergeProperties(Map originalProperties, - Map newProperties, - boolean updateIfExists) { - - Map properties = new HashMap(originalProperties); - for (Map.Entry entry : newProperties.entrySet()) { - if (!properties.containsKey(entry.getKey()) || updateIfExists) { - properties.put(entry.getKey(), entry.getValue()); - } - } - return properties; - } - - /** * Gets a property from the given commandParameters - * @param commandParameters map of command parameters - * @param propertyName property name to find value for - * @return value of given proeprty name, would return null + * + * @param commandParameters map of command parameters + * @param propertyName property name to find value for + * @return value of given proeprty name, would return null * if the provided commandParameters is null or if the requested property is not found * in commandParams */ @@ -289,11 +184,13 @@ public class UpdateKerberosConfigsServerAction extends AbstractServerAction { /** * Adds a property to properties of a given service config type - * @param configtype service config type - * @param prop property to be added - * @param val value for the proeprty + * + * @param configurations + * @param configtype service config type + * @param prop property to be added + * @param val value for the proeprty */ - private void addConfigTypePropVal(String configtype, String prop, String val) { + private void addConfigTypePropVal(HashMap> configurations, String configtype, String prop, String val) { Map configtypePropsVal = configurations.get(configtype); if (configtypePropsVal == null) { configtypePropsVal = new HashMap(); http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java index fdc307b..83fca25 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java @@ -31,6 +31,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import com.google.common.collect.Maps; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.configuration.Configuration; @@ -640,6 +641,53 @@ public class ConfigHelper { /** * A helper method to create a new {@link Config} for a given configuration + * type and updates to the current values, if any. This method will perform the following tasks: + *

    + *
  • Marge the specified updates with the properties of the current version of the + * configuration
  • + *
  • Create a {@link Config} in the cluster for the specified type. This + * will have the proper versions and tags set automatically.
  • + *
  • Set the cluster's {@link DesiredConfig} to the new configuration
  • + *
  • Create an entry in the configuration history with a note and username.
  • + *
      + * + * @param cluster + * @param controller + * @param configType + * @param updates + * @param authenticatedUserName + * @param serviceVersionNote + * @throws AmbariException + */ + public void updateConfigType(Cluster cluster, + AmbariManagementController controller, String configType, + Map updates, String authenticatedUserName, + String serviceVersionNote) throws AmbariException { + + if((configType != null) && (updates != null) && !updates.isEmpty()) { + Config oldConfig = cluster.getDesiredConfigByType(configType); + Map oldConfigProperties; + Map properties = new HashMap(); + + if (oldConfig == null) { + oldConfigProperties = null; + } else { + oldConfigProperties = oldConfig.getProperties(); + if (oldConfigProperties != null) { + properties.putAll(oldConfig.getProperties()); + } + } + + properties.putAll(updates); + + if ((oldConfigProperties == null) || !Maps.difference(oldConfigProperties, properties).areEqual()) { + createConfigType(cluster, controller, configType, properties, authenticatedUserName, serviceVersionNote); + } + } + } + + /** + * A helper method to create a new {@link Config} for a given configuration * type. This method will perform the following tasks: *
        *
      • Create a {@link Config} in the cluster for the specified type. This http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java index 01148a8..97ac2be 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java @@ -47,6 +47,7 @@ import org.apache.ambari.server.orm.dao.ClusterDAO; import org.apache.ambari.server.orm.dao.ClusterVersionDAO; import org.apache.ambari.server.orm.dao.HostDAO; import org.apache.ambari.server.orm.dao.HostVersionDAO; +import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO; import org.apache.ambari.server.orm.dao.ResourceTypeDAO; import org.apache.ambari.server.orm.entities.ClusterEntity; import org.apache.ambari.server.orm.entities.ClusterVersionEntity; @@ -109,6 +110,8 @@ public class ClustersImpl implements Clusters { @Inject ResourceTypeDAO resourceTypeDAO; @Inject + KerberosPrincipalHostDAO kerberosPrincipalHostDAO; + @Inject ClusterFactory clusterFactory; @Inject HostFactory hostFactory; @@ -692,6 +695,8 @@ public class ClustersImpl implements Clusters { deleteConfigGroupHostMapping(hostname); + // Remove mapping of principals to the unmapped host + kerberosPrincipalHostDAO.removeByHost(hostname); } finally { w.unlock(); } @@ -743,6 +748,9 @@ public class ClustersImpl implements Clusters { hostDAO.remove(entity); hosts.remove(hostname); + // Remove mapping of principals to deleted host + kerberosPrincipalHostDAO.removeByHost(hostname); + // publish the event HostRemovedEvent event = new HostRemovedEvent(hostname); eventPublisher.publish(event); http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java index c6389cb..2ec2cb5 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java @@ -24,9 +24,11 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; /** * AbstractKerberosDescriptorContainer is an abstract class implementing AbstractKerberosDescriptor @@ -64,6 +66,14 @@ import java.util.Map; * "title": "KerberosConfigurationDescriptor" * "type": "{@link org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor}" * } + * }, + * "auth_to_local": { + * "description": "A list of configuration properties declaring which properties are auth-to-local values + * "type": "array", + * "items": { + * "title": "String" + * "type": "{@link java.lang.String}" + * } * } * } * } @@ -86,6 +96,12 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber private Map configurations = null; /** + * A Set of configuration identifiers (config-type/property_name) that indicate which properties + * contain auth_to_local values. + */ + private Set authToLocalProperties = null; + + /** * Constructs a new AbstractKerberosDescriptorContainer *

        * This constructor must be called from the constructor(s) of the implementing classes @@ -115,6 +131,16 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber } } } + + // (Safely) Get the set of KerberosConfigurationDescriptors + list = data.get(KerberosDescriptorType.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName()); + if (list instanceof Collection) { + for (Object item : (Collection) list) { + if (item instanceof String) { + putAuthToLocalProperty((String) item); + } + } + } } } @@ -398,6 +424,33 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber } /** + * Adds the specified property name to the set of auth_to_local property names. + *

        + * Each auth_to_local property name is expected to be in the following format: + * config-type/property_name` + * + * @param authToLocalProperty the auth_to_local property to add + */ + public void putAuthToLocalProperty(String authToLocalProperty) { + if (authToLocalProperty != null) { + if (authToLocalProperties == null) { + authToLocalProperties = new HashSet(); + } + + authToLocalProperties.add(authToLocalProperty); + } + } + + /** + * Gets the set of auth_to_local property names. + * + * @return a Set of String values; or null if not set + */ + public Set getAuthToLocalProperties() { + return authToLocalProperties; + } + + /** * Test this AbstractKerberosDescriptor to see if it is a container. *

        * This implementation always returns true since it implements a descriptor container. @@ -459,6 +512,13 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber } } } + + Set updatedAuthToLocalProperties = updates.getAuthToLocalProperties(); + if (updatedAuthToLocalProperties != null) { + for (String updatedAuthToLocalProperty : updatedAuthToLocalProperties) { + putAuthToLocalProperty(updatedAuthToLocalProperty); + } + } } } @@ -586,6 +646,11 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber map.put(KerberosDescriptorType.CONFIGURATION.getDescriptorPluralName(), list); } + if (authToLocalProperties != null) { + List list = new ArrayList(authToLocalProperties); + map.put(KerberosDescriptorType.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), list); + } + return map; } http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorType.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorType.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorType.java index 94c5046..0677de6 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorType.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorType.java @@ -24,7 +24,8 @@ public enum KerberosDescriptorType { IDENTITY("identity", "identities"), PRINCIPAL("principal", "principals"), KEYTAB("keytab", "keytabs"), - CONFIGURATION("configuration", "configurations"); + CONFIGURATION("configuration", "configurations"), + AUTH_TO_LOCAL_PROPERTY("auth_to_local_property", "auth_to_local_properties"); private final String descriptorName; private final String descriptorPluralName; http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json index 5eb2ac5..ed49871 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/kerberos.json @@ -19,13 +19,16 @@ "name": "/hdfs" } ], + "auth_to_local_properties" : [ + "core-site/hadoop.security.auth_to_local" + ], "configurations": [ { "core-site": { "hadoop.security.authentication": "kerberos", "hadoop.rpc.protection": "authentication", "hadoop.security.authorization": "true", - "hadoop.security.auth_to_local": "_AUTH_TO_LOCAL_RULES", + "hadoop.security.auth_to_local": "", "hadoop.http.authentication.kerberos.name.rules": "", "hadoop.http.filter.initializers": "", "hadoop.http.authentication.type": "simple", http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py index 703be52..d723628 100644 --- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py +++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py @@ -395,6 +395,11 @@ class KerberosScript(Script): principal = get_property_value(item, 'principal') if principal is not None: - self.put_structured_out({ - principal.replace("_HOST", params.hostname): keytab_file_path - }) + curr_content = Script.structuredOut + + if "keytabs" not in curr_content: + curr_content['keytabs'] = {} + + curr_content['keytabs'][principal.replace("_HOST", params.hostname)] = keytab_file_path + + self.put_structured_out(curr_content) http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/kerberos.json index a2a3706..8998b98 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/kerberos.json @@ -13,6 +13,9 @@ "name": "/hdfs" } ], + "auth_to_local_properties" : [ + "oozie-site/oozie.authentication.kerberos.name.rules" + ], "configurations": [ { "oozie-site": { @@ -20,7 +23,7 @@ "oozie.service.AuthorizationService.authorization.enabled": "true", "oozie.service.HadoopAccessorService.kerberos.enabled": "true", "local.realm": "${realm}", - "oozie.authentication.kerberos.name.rules": "_AUTH_TO_LOCAL_RULES" + "oozie.authentication.kerberos.name.rules": "" } } ], http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java index abee322..95833ea 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/TestHeartbeatHandler.java @@ -2521,6 +2521,7 @@ public class TestHeartbeatHandler { executionCommand.setHostLevelParams(hlp); Map commandparams = new HashMap(); + commandparams.put(KerberosServerAction.AUTHENTICATED_USER_NAME, "admin"); commandparams.put(KerberosServerAction.DATA_DIRECTORY, createTestKeytabData().getAbsolutePath()); executionCommand.setCommandParams(commandparams); http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java index 18af955..1d13717 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java @@ -3127,9 +3127,9 @@ public class AmbariManagementControllerTest { // sch5 to start Stage stage1 = null, stage2 = null, stage3 = null; for (Stage s : stages) { - if (s.getStageId() == 1) { stage1 = s; } - if (s.getStageId() == 2) { stage2 = s; } - if (s.getStageId() == 3) { stage3 = s; } + if (s.getStageId() == 0) { stage1 = s; } + if (s.getStageId() == 1) { stage2 = s; } + if (s.getStageId() == 2) { stage3 = s; } } Assert.assertEquals(2, stage1.getExecutionCommands(host1).size()); http://git-wip-us.apache.org/repos/asf/ambari/blob/1646be07/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java index 8b98ea0..d1a2bd1 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java @@ -236,4 +236,25 @@ public class AuthToLocalBuilderTest { "DEFAULT", builder.generate("EXAMPLE.COM")); } + + @Test + public void testCopy() { + AuthToLocalBuilder builder = new AuthToLocalBuilder(); + + builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); + builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); + builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); + builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); + builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); + builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); + builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); + + builder.addRule("foobar@EXAMPLE.COM", "hdfs"); + + AuthToLocalBuilder copy = builder.copy(); + + assertNotSame(builder, copy); + assertEquals(copy.generate("EXAMPLE.COM"), builder.generate("EXAMPLE.COM")); + + } } \ No newline at end of file