ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jonathanhur...@apache.org
Subject [2/3] ambari git commit: AMBARI-19617 - Restarting Some Components During a Suspended Upgrade Fails Due To Missing Upgrade Parameters (jonathanhurley)
Date Thu, 19 Jan 2017 21:17:26 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
index 1d51b0d..71fb5d9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContext.java
@@ -17,7 +17,10 @@
  */
 package org.apache.ambari.server.state;
 
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION;
+
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -27,7 +30,11 @@ import java.util.Set;
 import org.apache.ambari.annotations.Experimental;
 import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
+import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.stack.MasterHostResolver;
 import org.apache.ambari.server.stageplanner.RoleGraphFactory;
 import org.apache.ambari.server.state.stack.UpgradePack;
@@ -36,14 +43,42 @@ import org.apache.ambari.server.state.stack.upgrade.Grouping;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeScope;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 
+import com.google.gson.Gson;
+import com.google.gson.JsonElement;
 import com.google.inject.Inject;
 import com.google.inject.assistedinject.Assisted;
+import com.google.inject.assistedinject.AssistedInject;
 
 /**
  * Used to hold various helper objects required to process an upgrade pack.
  */
 public class UpgradeContext {
 
+  public static final String COMMAND_PARAM_VERSION = VERSION;
+  public static final String COMMAND_PARAM_CLUSTER_NAME = "clusterName";
+  public static final String COMMAND_PARAM_DIRECTION = "upgrade_direction";
+  public static final String COMMAND_PARAM_UPGRADE_PACK = "upgrade_pack";
+  public static final String COMMAND_PARAM_REQUEST_ID = "request_id";
+
+  public static final String COMMAND_PARAM_UPGRADE_TYPE = "upgrade_type";
+  public static final String COMMAND_PARAM_TASKS = "tasks";
+  public static final String COMMAND_PARAM_STRUCT_OUT = "structured_out";
+  public static final String COMMAND_DOWNGRADE_FROM_VERSION = "downgrade_from_version";
+
+  /**
+   * The original "current" stack of the cluster before the upgrade started.
+   * This is the same regardless of whether the current direction is
+   * {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE}.
+   */
+  public static final String COMMAND_PARAM_ORIGINAL_STACK = "original_stack";
+
+  /**
+   * The target upgrade stack before the upgrade started. This is the same
+   * regardless of whether the current direction is {@link Direction#UPGRADE} or
+   * {@link Direction#DOWNGRADE}.
+   */
+  public static final String COMMAND_PARAM_TARGET_STACK = "target_stack";
+
   /**
    * The cluster that the upgrade is for.
    */
@@ -72,7 +107,7 @@ public class UpgradeContext {
   /**
    * The version being upgrade to or downgraded to.
    */
-  private String m_version;
+  private final String m_version;
 
   /**
    * The original "current" stack of the cluster before the upgrade started.
@@ -98,9 +133,9 @@ public class UpgradeContext {
 
   private MasterHostResolver m_resolver;
   private AmbariMetaInfo m_metaInfo;
-  private List<ServiceComponentHost> m_unhealthy = new ArrayList<ServiceComponentHost>();
-  private Map<String, String> m_serviceNames = new HashMap<String, String>();
-  private Map<String, String> m_componentNames = new HashMap<String, String>();
+  private List<ServiceComponentHost> m_unhealthy = new ArrayList<>();
+  private Map<String, String> m_serviceNames = new HashMap<>();
+  private Map<String, String> m_componentNames = new HashMap<>();
   private String m_downgradeFromVersion = null;
 
   /**
@@ -141,6 +176,17 @@ public class UpgradeContext {
   private RoleGraphFactory m_roleGraphFactory;
 
   /**
+   * Used to lookup the reposotory version given a stack name and version.
+   */
+  final private RepositoryVersionDAO m_repoVersionDAO;
+
+  /**
+   * Used for serializing the upgrade type.
+   */
+  @Inject
+  private Gson m_gson;
+
+  /**
    * Constructor.
    *
    * @param cluster
@@ -151,14 +197,57 @@ public class UpgradeContext {
    *          the direction for the upgrade
    * @param upgradeRequestMap
    *          the original map of paramters used to create the upgrade
+   *
+   * @param repoVersionDAO
+   *          the repository version DAO.
    */
-  @Inject
+  @AssistedInject
   public UpgradeContext(@Assisted Cluster cluster, @Assisted UpgradeType type,
-      @Assisted Direction direction, @Assisted Map<String, Object> upgradeRequestMap) {
+      @Assisted Direction direction, @Assisted String version,
+      @Assisted Map<String, Object> upgradeRequestMap,
+      RepositoryVersionDAO repoVersionDAO) {
+    m_repoVersionDAO = repoVersionDAO;
     m_cluster = cluster;
     m_type = type;
     m_direction = direction;
+    m_version = version;
     m_upgradeRequestMap = upgradeRequestMap;
+
+    // sets the original/target stacks - requires direction and cluster
+    setSourceAndTargetStacks();
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param cluster
+   *          the cluster that the upgrade is for
+   * @param upgradeEntity
+   *          the upgrade entity
+   * @param repoVersionDAO
+   *          the repository version DAO.
+   */
+  @AssistedInject
+  public UpgradeContext(@Assisted Cluster cluster, @Assisted UpgradeEntity upgradeEntity,
+      RepositoryVersionDAO repoVersionDAO) {
+    m_repoVersionDAO = repoVersionDAO;
+
+    m_cluster = cluster;
+    m_type = upgradeEntity.getUpgradeType();
+    m_direction = upgradeEntity.getDirection();
+
+    m_version = upgradeEntity.getToVersion();
+
+    // sets the original/target stacks - requires direction and cluster
+    setSourceAndTargetStacks();
+
+    if (m_direction == Direction.DOWNGRADE) {
+      m_downgradeFromVersion = upgradeEntity.getFromVersion();
+    }
+
+    // since this constructor is initialized from an entity, then this map is
+    // not present
+    m_upgradeRequestMap = Collections.emptyMap();
   }
 
   /**
@@ -166,24 +255,38 @@ public class UpgradeContext {
    * stack ID based on the already-set {@link UpgradeType} and
    * {@link Direction}.
    *
-   * @param sourceStackId
-   *          the original "current" stack of the cluster before the upgrade
-   *          started. This is the same regardless of whether the current
-   *          direction is {@link Direction#UPGRADE} or
-   *          {@link Direction#DOWNGRADE} (not {@code null}).
-   * @param targetStackId
-   *          The target upgrade stack before the upgrade started. This is the
-   *          same regardless of whether the current direction is
-   *          {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE} (not
-   *          {@code null}).
-   *
    * @see #getEffectiveStackId()
    */
-  public void setSourceAndTargetStacks(StackId sourceStackId, StackId targetStackId) {
+  private void setSourceAndTargetStacks() {
+    StackId sourceStackId = null;
+
+    // taret stack will not always be what it is today - tagging as experimental
+    @Experimental(feature = ExperimentalFeature.PATCH_UPGRADES)
+    StackId targetStackId = null;
+
+    switch (m_direction) {
+      case UPGRADE:
+        sourceStackId = m_cluster.getCurrentStackVersion();
+
+        RepositoryVersionEntity targetRepositoryVersion = m_repoVersionDAO.findByStackNameAndVersion(
+            sourceStackId.getStackName(), m_version);
+
+        // !!! TODO check the repo_version for patch-ness and restrict the
+        // context to those services that require it. Consult the version
+        // definition and add the service names to supportedServices
+        targetStackId = targetRepositoryVersion.getStackId();
+        break;
+      case DOWNGRADE:
+        sourceStackId = m_cluster.getCurrentStackVersion();
+        targetStackId = m_cluster.getDesiredStackVersion();
+        break;
+    }
+
     m_originalStackId = sourceStackId;
 
     switch (m_type) {
       case ROLLING:
+      case HOST_ORDERED:
         m_effectiveStackId = targetStackId;
         break;
       case NON_ROLLING:
@@ -244,14 +347,6 @@ public class UpgradeContext {
   }
 
   /**
-   * @param version
-   *          the target version to upgrade to
-   */
-  public void setVersion(String version) {
-    m_version = version;
-  }
-
-  /**
    * @return the direction of the upgrade
    */
   public Direction getDirection() {
@@ -521,4 +616,48 @@ public class UpgradeContext {
   public HostRoleCommandFactory getHostRoleCommandFactory() {
     return m_hrcFactory;
   }
+
+  /**
+   * Gets a map initialized with parameters required for upgrades to work. The
+   * following properties are already set:
+   * <ul>
+   * <li>{@link #COMMAND_PARAM_CLUSTER_NAME}
+   * <li>{@link #COMMAND_PARAM_VERSION}
+   * <li>{@link #COMMAND_PARAM_DIRECTION}
+   * <li>{@link #COMMAND_PARAM_ORIGINAL_STACK}
+   * <li>{@link #COMMAND_PARAM_TARGET_STACK}
+   * <li>{@link #COMMAND_DOWNGRADE_FROM_VERSION}
+   * <li>{@link #COMMAND_PARAM_UPGRADE_TYPE}
+   * <li>{@link KeyNames#REFRESH_CONFIG_TAGS_BEFORE_EXECUTION} - necessary in
+   * order to have the commands contain the correct configurations. Otherwise,
+   * they will contain the configurations that were available at the time the
+   * command was created. For upgrades, this is problematic since the commands
+   * are all created ahead of time, but the upgrade may change configs as part
+   * of the upgrade pack.</li>
+   * <ul>
+   *
+   * @return the initialized parameter map.
+   */
+  public Map<String, String> getInitializedCommandParameters() {
+    Map<String, String> parameters = new HashMap<>();
+
+    parameters.put(COMMAND_PARAM_CLUSTER_NAME, m_cluster.getClusterName());
+    parameters.put(COMMAND_PARAM_VERSION, getVersion());
+    parameters.put(COMMAND_PARAM_DIRECTION, getDirection().name().toLowerCase());
+    parameters.put(COMMAND_PARAM_ORIGINAL_STACK, getOriginalStackId().getStackId());
+    parameters.put(COMMAND_PARAM_TARGET_STACK, getTargetStackId().getStackId());
+    parameters.put(COMMAND_DOWNGRADE_FROM_VERSION, getDowngradeFromVersion());
+
+    if (null != getType()) {
+      // use the serialized attributes of the enum to convert it to a string,
+      // but first we must convert it into an element so that we don't get a
+      // quoted string - using toString() actually returns a quoted stirng which
+      // is bad
+      JsonElement json = m_gson.toJsonTree(getType());
+      parameters.put(COMMAND_PARAM_UPGRADE_TYPE, json.getAsString());
+    }
+
+    parameters.put(KeyNames.REFRESH_CONFIG_TAGS_BEFORE_EXECUTION, "true");
+    return parameters;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContextFactory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContextFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContextFactory.java
index 4b988e8..4f15ee2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContextFactory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/UpgradeContextFactory.java
@@ -19,6 +19,7 @@ package org.apache.ambari.server.state;
 
 import java.util.Map;
 
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
 import org.apache.ambari.server.state.stack.upgrade.Direction;
 import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
 
@@ -32,16 +33,32 @@ public interface UpgradeContextFactory {
    * Creates an {@link UpgradeContext} which is injected with dependencies.
    *
    * @param cluster
-   *          the cluster that the upgrade is for
+   *          the cluster that the upgrade is for (not {@code null}).
    * @param type
-   *          the type of upgrade, either rolling or non_rolling
+   *          the type of upgrade, either rolling or non_rolling (not
+   *          {@code null}).
    * @param direction
    *          the direction for the upgrade
+   * @param version
+   *          the version being upgrade-to or downgraded-from (not
+   *          {@code null}).
    * @param upgradeRequestMap
-   *          the original map of paramters used to create the upgrade
+   *          the original map of parameters used to create the upgrade (not
+   *          {@code null}).
    *
    * @return an initialized {@link UpgradeContext}.
    */
   UpgradeContext create(Cluster cluster, UpgradeType type, Direction direction,
-      Map<String, Object> upgradeRequestMap);
+      String version, Map<String, Object> upgradeRequestMap);
+
+  /**
+   * Creates an {@link UpgradeContext} which is injected with dependencies.
+   *
+   * @param cluster
+   *          the cluster that the upgrade is for (not {@code null}).
+   * @param upgradeEntity
+   *          the upgrade entity (not {@code null}).
+   * @return an initialized {@link UpgradeContext}.
+   */
+  UpgradeContext create(Cluster cluster, UpgradeEntity upgradeEntity);
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index f99ddf7..5dfa8ec 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -52,6 +52,7 @@ import org.apache.ambari.server.ServiceComponentHostNotFoundException;
 import org.apache.ambari.server.ServiceComponentNotFoundException;
 import org.apache.ambari.server.ServiceNotFoundException;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariSessionManager;
@@ -136,6 +137,8 @@ import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.UpgradeContext;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
@@ -316,6 +319,15 @@ public class ClusterImpl implements Cluster {
   private RoleCommandOrderProvider roleCommandOrderProvider;
 
   /**
+   * Used to create instances of {@link UpgradeContext} with injected
+   * dependencies. The {@link UpgradeContext} is used to populate the command
+   * with upgrade information on the command/role maps if the upgrade is
+   * suspended.
+   */
+  @Inject
+  private UpgradeContextFactory upgradeContextFactory;
+
+  /**
    * A simple cache for looking up {@code cluster-env} properties for a cluster.
    * This map is changed whenever {{cluster-env}} is changed and we receive a
    * {@link ClusterConfigChangedEvent}.
@@ -511,7 +523,7 @@ public class ClusterImpl implements Cluster {
   @Override
   public Map<Long, ConfigGroup> getConfigGroupsByHostname(String hostname)
     throws AmbariException {
-    Map<Long, ConfigGroup> configGroups = new HashMap<Long, ConfigGroup>();
+    Map<Long, ConfigGroup> configGroups = new HashMap<>();
 
     for (Entry<Long, ConfigGroup> groupEntry : clusterConfigGroups.entrySet()) {
       Long id = groupEntry.getKey();
@@ -788,15 +800,15 @@ public class ClusterImpl implements Cluster {
   public List<ServiceComponentHost> getServiceComponentHosts(String hostname) {
     List<ServiceComponentHost> serviceComponentHosts = serviceComponentHostsByHost.get(hostname);
     if (null != serviceComponentHosts) {
-      return new CopyOnWriteArrayList<ServiceComponentHost>(serviceComponentHosts);
+      return new CopyOnWriteArrayList<>(serviceComponentHosts);
     }
 
-    return new ArrayList<ServiceComponentHost>();
+    return new ArrayList<>();
   }
 
   @Override
   public Map<String, Set<String>> getServiceComponentHostMap(Set<String> hostNames, Set<String> serviceNames) {
-    Map<String, Set<String>> componentHostMap = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> componentHostMap = new HashMap<>();
 
     Collection<Host> hosts = getHosts();
 
@@ -816,7 +828,7 @@ public class ClusterImpl implements Cluster {
                 Set<String> componentHosts = componentHostMap.get(component);
 
                 if (componentHosts == null) {
-                  componentHosts = new HashSet<String>();
+                  componentHosts = new HashSet<>();
                   componentHostMap.put(component, componentHosts);
                 }
 
@@ -833,7 +845,7 @@ public class ClusterImpl implements Cluster {
 
   @Override
   public List<ServiceComponentHost> getServiceComponentHosts(String serviceName, String componentName) {
-    ArrayList<ServiceComponentHost> foundItems = new ArrayList<ServiceComponentHost>();
+    ArrayList<ServiceComponentHost> foundItems = new ArrayList<>();
 
     ConcurrentMap<String, ConcurrentMap<String, ServiceComponentHost>> foundByService = serviceComponentHosts.get(
         serviceName);
@@ -884,7 +896,7 @@ public class ClusterImpl implements Cluster {
 
   @Override
   public Map<String, Service> getServices() {
-    return new HashMap<String, Service>(services);
+    return new HashMap<>(services);
   }
 
   @Override
@@ -1138,7 +1150,7 @@ public class ClusterImpl implements Cluster {
       StackEntity repoVersionStackEntity = currentClusterVersion.getRepositoryVersion().getStack();
       StackId repoVersionStackId = new StackId(repoVersionStackEntity);
 
-      Map<String, HostVersionEntity> existingHostToHostVersionEntity = new HashMap<String, HostVersionEntity>();
+      Map<String, HostVersionEntity> existingHostToHostVersionEntity = new HashMap<>();
       List<HostVersionEntity> existingHostVersionEntities = hostVersionDAO.findByClusterStackAndVersion(
         getClusterName(), repoVersionStackId,
         currentClusterVersion.getRepositoryVersion().getVersion());
@@ -1206,8 +1218,8 @@ public class ClusterImpl implements Cluster {
     }
 
     Map<String, Host> hosts = clusters.getHostsForCluster(getClusterName());
-    Set<String> existingHostsWithClusterStackAndVersion = new HashSet<String>();
-    HashMap<String, HostVersionEntity> existingHostStackVersions = new HashMap<String, HostVersionEntity>();
+    Set<String> existingHostsWithClusterStackAndVersion = new HashSet<>();
+    HashMap<String, HostVersionEntity> existingHostStackVersions = new HashMap<>();
 
     clusterGlobalLock.writeLock().lock();
     try {
@@ -1417,15 +1429,15 @@ public class ClusterImpl implements Cluster {
       }
 
       // Part 2, check for transitions.
-      Set<String> hostsWithoutHostVersion = new HashSet<String>();
-      Map<RepositoryVersionState, Set<String>> stateToHosts = new HashMap<RepositoryVersionState, Set<String>>();
+      Set<String> hostsWithoutHostVersion = new HashSet<>();
+      Map<RepositoryVersionState, Set<String>> stateToHosts = new HashMap<>();
 
       //hack until better hostversion integration into in-memory cluster structure
 
       List<HostVersionEntity> hostVersionEntities =
               hostVersionDAO.findByClusterStackAndVersion(getClusterName(), stackId, version);
 
-      Set<String> hostsWithState = new HashSet<String>();
+      Set<String> hostsWithState = new HashSet<>();
       Set<String> hostsInMaintenanceState = new HashSet<>();
       for (HostVersionEntity hostVersionEntity : hostVersionEntities) {
         String hostname = hostVersionEntity.getHostEntity().getHostName();
@@ -1440,7 +1452,7 @@ public class ClusterImpl implements Cluster {
         if (stateToHosts.containsKey(hostState)) {
           stateToHosts.get(hostState).add(hostname);
         } else {
-          Set<String> hostsInState = new HashSet<String>();
+          Set<String> hostsInState = new HashSet<>();
           hostsInState.add(hostname);
           stateToHosts.put(hostState, hostsInState);
         }
@@ -1655,7 +1667,7 @@ public class ClusterImpl implements Cluster {
   @Transactional
   public void transitionClusterVersion(StackId stackId, String version,
       RepositoryVersionState state) throws AmbariException {
-    Set<RepositoryVersionState> allowedStates = new HashSet<RepositoryVersionState>();
+    Set<RepositoryVersionState> allowedStates = new HashSet<>();
     clusterGlobalLock.writeLock().lock();
     try {
       ClusterEntity clusterEntity = getClusterEntity();
@@ -1924,7 +1936,7 @@ public class ClusterImpl implements Cluster {
   public Collection<Config> getAllConfigs() {
     clusterGlobalLock.readLock().lock();
     try {
-      List<Config> list = new ArrayList<Config>();
+      List<Config> list = new ArrayList<>();
       for (Entry<String, ConcurrentMap<String, Config>> entry : allConfigs.entrySet()) {
         for (Config config : entry.getValue().values()) {
           list.add(config);
@@ -2211,14 +2223,14 @@ public class ClusterImpl implements Cluster {
       }
 
       // TODO AMBARI-10679, need efficient caching from hostId to hostName...
-      Map<Long, String> hostIdToName = new HashMap<Long, String>();
+      Map<Long, String> hostIdToName = new HashMap<>();
 
       if (!map.isEmpty()) {
         Map<String, List<HostConfigMapping>> hostMappingsByType =
           hostConfigMappingDAO.findSelectedHostsByTypes(clusterId, types);
 
         for (Entry<String, Set<DesiredConfig>> entry : map.entrySet()) {
-          List<DesiredConfig.HostOverride> hostOverrides = new ArrayList<DesiredConfig.HostOverride>();
+          List<DesiredConfig.HostOverride> hostOverrides = new ArrayList<>();
           for (HostConfigMapping mappingEntity : hostMappingsByType.get(entry.getKey())) {
 
             if (!hostIdToName.containsKey(mappingEntity.getHostId())) {
@@ -2258,7 +2270,7 @@ public class ClusterImpl implements Cluster {
       if (configGroup != null) {
         serviceConfigEntity.setGroupId(configGroup.getId());
         Collection<Config> configs = configGroup.getConfigurations().values();
-        List<ClusterConfigEntity> configEntities = new ArrayList<ClusterConfigEntity>(
+        List<ClusterConfigEntity> configEntities = new ArrayList<>(
             configs.size());
         for (Config config : configs) {
           configEntities.add(
@@ -2284,7 +2296,7 @@ public class ClusterImpl implements Cluster {
 
       serviceConfigDAO.create(serviceConfigEntity);
       if (configGroup != null) {
-        serviceConfigEntity.setHostIds(new ArrayList<Long>(configGroup.getHosts().keySet()));
+        serviceConfigEntity.setHostIds(new ArrayList<>(configGroup.getHosts().keySet()));
         serviceConfigEntity = serviceConfigDAO.merge(serviceConfigEntity);
       }
     } finally {
@@ -2360,7 +2372,7 @@ public class ClusterImpl implements Cluster {
   public Map<String, Collection<ServiceConfigVersionResponse>> getActiveServiceConfigVersions() {
     clusterGlobalLock.readLock().lock();
     try {
-      Map<String, Collection<ServiceConfigVersionResponse>> map = new HashMap<String, Collection<ServiceConfigVersionResponse>>();
+      Map<String, Collection<ServiceConfigVersionResponse>> map = new HashMap<>();
 
       Set<ServiceConfigVersionResponse> responses = getActiveServiceConfigVersionSet();
       for (ServiceConfigVersionResponse response : responses) {
@@ -2380,7 +2392,7 @@ public class ClusterImpl implements Cluster {
   public List<ServiceConfigVersionResponse> getServiceConfigVersions() {
     clusterGlobalLock.readLock().lock();
     try {
-      List<ServiceConfigVersionResponse> serviceConfigVersionResponses = new ArrayList<ServiceConfigVersionResponse>();
+      List<ServiceConfigVersionResponse> serviceConfigVersionResponses = new ArrayList<>();
 
       List<ServiceConfigEntity> serviceConfigs = serviceConfigDAO.getServiceConfigs(getClusterId());
 
@@ -2436,7 +2448,7 @@ public class ClusterImpl implements Cluster {
   }
 
   private Set<ServiceConfigVersionResponse> getActiveServiceConfigVersionSet() {
-    Set<ServiceConfigVersionResponse> responses = new HashSet<ServiceConfigVersionResponse>();
+    Set<ServiceConfigVersionResponse> responses = new HashSet<>();
     List<ServiceConfigEntity> activeServiceConfigVersions = getActiveServiceConfigVersionEntities();
 
     for (ServiceConfigEntity lastServiceConfig : activeServiceConfigVersions) {
@@ -2449,7 +2461,7 @@ public class ClusterImpl implements Cluster {
 
   private List<ServiceConfigEntity> getActiveServiceConfigVersionEntities() {
 
-    List<ServiceConfigEntity> activeServiceConfigVersions = new ArrayList<ServiceConfigEntity>();
+    List<ServiceConfigEntity> activeServiceConfigVersions = new ArrayList<>();
     //for services
     activeServiceConfigVersions.addAll(serviceConfigDAO.getLastServiceConfigs(getClusterId()));
     //for config groups
@@ -2465,8 +2477,8 @@ public class ClusterImpl implements Cluster {
   public List<ServiceConfigVersionResponse> getActiveServiceConfigVersionResponse(String serviceName) {
     clusterGlobalLock.readLock().lock();
     try {
-      List<ServiceConfigEntity> activeServiceConfigVersionEntities = new ArrayList<ServiceConfigEntity>();
-      List<ServiceConfigVersionResponse> activeServiceConfigVersionResponses = new ArrayList<ServiceConfigVersionResponse>();
+      List<ServiceConfigEntity> activeServiceConfigVersionEntities = new ArrayList<>();
+      List<ServiceConfigVersionResponse> activeServiceConfigVersionResponses = new ArrayList<>();
       activeServiceConfigVersionEntities.addAll(serviceConfigDAO.getLastServiceConfigsForService(getClusterId(), serviceName));
       for (ServiceConfigEntity serviceConfigEntity : activeServiceConfigVersionEntities) {
         ServiceConfigVersionResponse serviceConfigVersionResponse = getServiceConfigVersionResponseWithConfig(convertToServiceConfigVersionResponse(serviceConfigEntity), serviceConfigEntity);
@@ -2560,14 +2572,14 @@ public class ClusterImpl implements Cluster {
       Long configGroupId = serviceConfigEntity.getGroupId();
       ConfigGroup configGroup = clusterConfigGroups.get(configGroupId);
       if (configGroup != null) {
-        Map<String, Config> groupDesiredConfigs = new HashMap<String, Config>();
+        Map<String, Config> groupDesiredConfigs = new HashMap<>();
         for (ClusterConfigEntity entity : serviceConfigEntity.getClusterConfigEntities()) {
           Config config = allConfigs.get(entity.getType()).get(entity.getTag());
           groupDesiredConfigs.put(config.getType(), config);
         }
         configGroup.setConfigurations(groupDesiredConfigs);
 
-        Map<Long, Host> groupDesiredHosts = new HashMap<Long, Host>();
+        Map<Long, Host> groupDesiredHosts = new HashMap<>();
         if (serviceConfigEntity.getHostIds() != null) {
           for (Long hostId : serviceConfigEntity.getHostIds()) {
             Host host = clusters.getHostById(hostId);
@@ -2716,7 +2728,7 @@ public class ClusterImpl implements Cluster {
     Set<HostConfigMapping> mappingEntities =
         hostConfigMappingDAO.findSelectedByHosts(hostIds);
 
-    Map<Long, Map<String, DesiredConfig>> desiredConfigsByHost = new HashMap<Long, Map<String, DesiredConfig>>();
+    Map<Long, Map<String, DesiredConfig>> desiredConfigsByHost = new HashMap<>();
 
     for (Long hostId : hostIds) {
       desiredConfigsByHost.put(hostId, new HashMap<String, DesiredConfig>());
@@ -2782,7 +2794,7 @@ public class ClusterImpl implements Cluster {
   @Transactional
   protected Map<ServiceComponentHostEvent, String> processServiceComponentHostEventsInSingleTransaction(
       ListMultimap<String, ServiceComponentHostEvent> eventMap) {
-    Map<ServiceComponentHostEvent, String> failedEvents = new HashMap<ServiceComponentHostEvent, String>();
+    Map<ServiceComponentHostEvent, String> failedEvents = new HashMap<>();
 
     for (Entry<String, ServiceComponentHostEvent> entry : eventMap.entries()) {
       String serviceName = entry.getKey();
@@ -3016,7 +3028,7 @@ public class ClusterImpl implements Cluster {
   @Override
   public void addSessionAttributes(Map<String, Object> attributes) {
     if (attributes != null && !attributes.isEmpty()) {
-      Map<String, Object>  sessionAttributes = new HashMap<String, Object>(getSessionAttributes());
+      Map<String, Object>  sessionAttributes = new HashMap<>(getSessionAttributes());
       sessionAttributes.putAll(attributes);
       setSessionAttributes(attributes);
     }
@@ -3025,7 +3037,7 @@ public class ClusterImpl implements Cluster {
   @Override
   public void setSessionAttribute(String key, Object value){
     if (key != null && !key.isEmpty()) {
-      Map<String, Object> sessionAttributes = new HashMap<String, Object>(getSessionAttributes());
+      Map<String, Object> sessionAttributes = new HashMap<>(getSessionAttributes());
       sessionAttributes.put(key, value);
       setSessionAttributes(sessionAttributes);
     }
@@ -3034,7 +3046,7 @@ public class ClusterImpl implements Cluster {
   @Override
   public void removeSessionAttribute(String key) {
     if (key != null && !key.isEmpty()) {
-      Map<String, Object> sessionAttributes = new HashMap<String, Object>(getSessionAttributes());
+      Map<String, Object> sessionAttributes = new HashMap<>(getSessionAttributes());
       sessionAttributes.remove(key);
       setSessionAttributes(sessionAttributes);
     }
@@ -3174,7 +3186,7 @@ public class ClusterImpl implements Cluster {
     List<ClusterConfigMappingEntity> clusterConfigMappingsForStack = clusterDAO.getClusterConfigMappingsByStack(
         clusterId, stackId);
 
-    Map<String, ClusterConfigMappingEntity> latestMappingsByType = new HashMap<String, ClusterConfigMappingEntity>();
+    Map<String, ClusterConfigMappingEntity> latestMappingsByType = new HashMap<>();
     for (ClusterConfigMappingEntity mapping : clusterConfigMappingsForStack) {
       String type = mapping.getType();
 
@@ -3228,7 +3240,7 @@ public class ClusterImpl implements Cluster {
     // this will keep track of cluster config mappings that need removal
     // since there is no relationship between configs and their mappings, we
     // have to do it manually
-    List<ClusterConfigEntity> removedClusterConfigs = new ArrayList<ClusterConfigEntity>(50);
+    List<ClusterConfigEntity> removedClusterConfigs = new ArrayList<>(50);
     Collection<ClusterConfigEntity> clusterConfigEntities = clusterEntity.getClusterConfigEntities();
 
     List<ServiceConfigEntity> serviceConfigs = serviceConfigDAO.getAllServiceConfigsForClusterAndStack(
@@ -3490,4 +3502,28 @@ public class ClusterImpl implements Cluster {
   public RoleCommandOrder getRoleCommandOrder() {
     return roleCommandOrderProvider.getRoleCommandOrder(this);
   }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void addSuspendedUpgradeParameters(Map<String, String> commandParams,
+      Map<String, String> roleParams) {
+
+    // build some command params from the upgrade, including direction,
+    // type, version, etc
+    UpgradeEntity suspendedUpgrade = getUpgradeInProgress();
+    if( null == suspendedUpgrade ){
+      LOG.warn(
+          "An upgrade is not currently suspended. The command and role parameters will not be modified.");
+
+      return;
+    }
+
+    UpgradeContext upgradeContext = upgradeContextFactory.create(this, suspendedUpgrade);
+    commandParams.putAll(upgradeContext.getInitializedCommandParameters());
+
+    // suspended goes in role params
+    roleParams.put(KeyNames.UPGRADE_SUSPENDED, Boolean.TRUE.toString().toLowerCase());
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index 17b1e27..8ae192b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -44,6 +44,7 @@ import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
 import org.apache.ambari.server.security.SecurityHelper;
 import org.apache.ambari.server.security.SecurityHelperImpl;
 import org.apache.ambari.server.stack.StackManagerFactory;
+import org.apache.ambari.server.stageplanner.RoleGraphFactory;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
@@ -58,6 +59,7 @@ import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceComponentImpl;
 import org.apache.ambari.server.state.ServiceFactory;
 import org.apache.ambari.server.state.ServiceImpl;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.cluster.ClusterFactory;
 import org.apache.ambari.server.state.cluster.ClusterImpl;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
@@ -322,6 +324,8 @@ public class AgentResourceTest extends RandomPortJerseyTest {
     }
 
     private void installDependencies() {
+      install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+      install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
       install(new FactoryModuleBuilder().implement(
           Cluster.class, ClusterImpl.class).build(ClusterFactory.class));
       install(new FactoryModuleBuilder().implement(

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
index 29bb35e..d123401 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/KerberosHelperTest.java
@@ -92,6 +92,7 @@ import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.cluster.ClusterFactory;
 import org.apache.ambari.server.state.host.HostFactory;
 import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
@@ -220,6 +221,9 @@ public class KerberosHelperTest extends EasyMockSupport {
 
       @Override
       protected void configure() {
+        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
+
         bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
         bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
         bind(ClusterFactory.class).toInstance(createNiceMock(ClusterFactory.class));
@@ -233,7 +237,6 @@ public class KerberosHelperTest extends EasyMockSupport {
         bind(RequestFactory.class).toInstance(createNiceMock(RequestFactory.class));
         bind(StageFactory.class).toInstance(createNiceMock(StageFactory.class));
         bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
-        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
         bind(ConfigHelper.class).toInstance(createNiceMock(ConfigHelper.class));
         bind(KerberosOperationHandlerFactory.class).toInstance(kerberosOperationHandlerFactory);
         bind(ClusterController.class).toInstance(clusterController);
@@ -1582,7 +1585,7 @@ public class KerberosHelperTest extends EasyMockSupport {
       hostInvalid = null;
     }
 
-    Map<String, ServiceComponentHost> map = new HashMap<String, ServiceComponentHost>();
+    Map<String, ServiceComponentHost> map = new HashMap<>();
     final ServiceComponent serviceComponentKerberosClient = createNiceMock(ServiceComponent.class);
     map.put("host1", schKerberosClient);
     expect(serviceComponentKerberosClient.getName()).andReturn(Role.KERBEROS_CLIENT.name()).anyTimes();
@@ -1921,7 +1924,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         identityDescriptor2
     )).times(1);
     expect(serviceDescriptor1.getComponent("COMPONENT1")).andReturn(componentDescriptor1).times(1);
-    expect(serviceDescriptor1.getAuthToLocalProperties()).andReturn(new HashSet<String>(Arrays.asList(
+    expect(serviceDescriptor1.getAuthToLocalProperties()).andReturn(new HashSet<>(Arrays.asList(
         "default",
         "explicit_multiple_lines|new_lines",
         "explicit_multiple_lines_escaped|new_lines_escaped",
@@ -1940,7 +1943,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     Map<String, Set<String>> installedServices = Collections.singletonMap("SERVICE1", Collections.singleton("COMPONENT1"));
 
-    Map<String, Map<String, String>> kerberosConfigurations = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> kerberosConfigurations = new HashMap<>();
 
     replayAll();
 
@@ -2190,7 +2193,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         Collections.<KerberosIdentityDescriptor>emptyList());
     expect(serviceDescriptor3.getComponent("COMPONENT3A")).andReturn(componentDescriptor3a).times(4);
 
-    final Map<String, String> kerberosDescriptorProperties = new HashMap<String, String>();
+    final Map<String, String> kerberosDescriptorProperties = new HashMap<>();
     kerberosDescriptorProperties.put("realm", "${kerberos-env/realm}");
 
     final KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
@@ -2217,7 +2220,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     RecommendationResponse.BlueprintConfigurations service1SiteRecommendation = createNiceMock(RecommendationResponse.BlueprintConfigurations.class);
     expect(service1SiteRecommendation.getProperties()).andReturn(Collections.singletonMap("component1b.property", "replaced value"));
 
-    Map<String, RecommendationResponse.BlueprintConfigurations> configurations = new HashMap<String, RecommendationResponse.BlueprintConfigurations>();
+    Map<String, RecommendationResponse.BlueprintConfigurations> configurations = new HashMap<>();
     configurations.put("core-site", coreSiteRecommendation);
     configurations.put("new-type", newTypeRecommendation);
     configurations.put("type1", type1Recommendation);
@@ -2284,16 +2287,16 @@ public class KerberosHelperTest extends EasyMockSupport {
           }
         });
 
-    Map<String, Service> services = new HashMap<String, Service>();
+    Map<String, Service> services = new HashMap<>();
     services.put("SERVICE1", service1);
     services.put("SERVICE2", service2);
     services.put("SERVICE3", service3);
 
-    Map<String, Set<String>> serviceComponentHostMap = new HashMap<String, Set<String>>();
+    Map<String, Set<String>> serviceComponentHostMap = new HashMap<>();
     serviceComponentHostMap.put("COMPONENT1A", Collections.singleton("hostA"));
-    serviceComponentHostMap.put("COMPONENT1B", new HashSet<String>(Arrays.asList("hostB", "hostC")));
+    serviceComponentHostMap.put("COMPONENT1B", new HashSet<>(Arrays.asList("hostB", "hostC")));
     serviceComponentHostMap.put("COMPONENT2A", Collections.singleton("hostA"));
-    serviceComponentHostMap.put("COMPONENT2B", new HashSet<String>(Arrays.asList("hostB", "hostC")));
+    serviceComponentHostMap.put("COMPONENT2B", new HashSet<>(Arrays.asList("hostB", "hostC")));
     serviceComponentHostMap.put("COMPONEN3A", Collections.singleton("hostA"));
 
     final Cluster cluster = createMockCluster("c1", hosts, SecurityType.KERBEROS, krb5ConfConfig, kerberosEnvConfig);
@@ -2334,20 +2337,20 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Needed by infrastructure
     injector.getInstance(AmbariMetaInfo.class).init();
 
-    HashMap<String, Set<String>> installedServices1 = new HashMap<String, Set<String>>();
-    installedServices1.put("SERVICE1", new HashSet<String>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
-    installedServices1.put("SERVICE2", new HashSet<String>(Arrays.asList("COMPONENT2A", "COMPONENT2B")));
+    HashMap<String, Set<String>> installedServices1 = new HashMap<>();
+    installedServices1.put("SERVICE1", new HashSet<>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
+    installedServices1.put("SERVICE2", new HashSet<>(Arrays.asList("COMPONENT2A", "COMPONENT2B")));
     installedServices1.put("SERVICE3", Collections.singleton("COMPONENT3A"));
 
     Map<String, Map<String, String>> updates1 = kerberosHelper.getServiceConfigurationUpdates(
         cluster, existingConfigurations, installedServices1, null, null, true, true);
 
-    HashMap<String, Set<String>> installedServices2 = new HashMap<String, Set<String>>();
-    installedServices2.put("SERVICE1", new HashSet<String>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
+    HashMap<String, Set<String>> installedServices2 = new HashMap<>();
+    installedServices2.put("SERVICE1", new HashSet<>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
     installedServices2.put("SERVICE3", Collections.singleton("COMPONENT3A"));
 
-    Map<String, Collection<String>> serviceFilter2 = new HashMap<String, Collection<String>>();
-    serviceFilter2.put("SERVICE1", new HashSet<String>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
+    Map<String, Collection<String>> serviceFilter2 = new HashMap<>();
+    serviceFilter2.put("SERVICE1", new HashSet<>(Arrays.asList("COMPONENT1A", "COMPONENT1B")));
     serviceFilter2.put("SERVICE3", Collections.singleton("COMPONENT3A"));
 
     Map<String, Map<String, String>> updates2 = kerberosHelper.getServiceConfigurationUpdates(
@@ -2494,9 +2497,9 @@ public class KerberosHelperTest extends EasyMockSupport {
       ambariServerPrincipalNameExpected = String.format("ambari-server-%s@%s", clusterName, realm);
     }
 
-    Map<String, String> propertiesKrb5Conf = new HashMap<String, String>();
+    Map<String, String> propertiesKrb5Conf = new HashMap<>();
 
-    Map<String, String> propertiesKerberosEnv = new HashMap<String, String>();
+    Map<String, String> propertiesKerberosEnv = new HashMap<>();
     propertiesKerberosEnv.put("realm", realm);
     propertiesKerberosEnv.put("kdc_type", "mit-kdc");
     propertiesKerberosEnv.put("password_length", "20");
@@ -2517,35 +2520,35 @@ public class KerberosHelperTest extends EasyMockSupport {
     Host host2 = createMockHost("host3");
     Host host3 = createMockHost("host2");
 
-    Map<String, ServiceComponentHost> service1Component1HostMap = new HashMap<String, ServiceComponentHost>();
+    Map<String, ServiceComponentHost> service1Component1HostMap = new HashMap<>();
     service1Component1HostMap.put("host1", createMockServiceComponentHost(State.INSTALLED));
 
-    Map<String, ServiceComponentHost> service2Component1HostMap = new HashMap<String, ServiceComponentHost>();
+    Map<String, ServiceComponentHost> service2Component1HostMap = new HashMap<>();
     service2Component1HostMap.put("host2", createMockServiceComponentHost(State.INSTALLED));
 
-    Map<String, ServiceComponent> service1ComponentMap = new HashMap<String, ServiceComponent>();
+    Map<String, ServiceComponent> service1ComponentMap = new HashMap<>();
     service1ComponentMap.put("COMPONENT11", createMockComponent("COMPONENT11", true, service1Component1HostMap));
 
-    Map<String, ServiceComponent> service2ComponentMap = new HashMap<String, ServiceComponent>();
+    Map<String, ServiceComponent> service2ComponentMap = new HashMap<>();
     service2ComponentMap.put("COMPONENT21", createMockComponent("COMPONENT21", true, service2Component1HostMap));
 
     Service service1 = createMockService("SERVICE1", service1ComponentMap);
     Service service2 = createMockService("SERVICE2", service2ComponentMap);
 
-    Map<String, Service> servicesMap = new HashMap<String, Service>();
+    Map<String, Service> servicesMap = new HashMap<>();
     servicesMap.put("SERVICE1", service1);
     servicesMap.put("SERVICE2", service2);
 
     Cluster cluster = createMockCluster(clusterName, Arrays.asList(host1, host2, host3), SecurityType.KERBEROS, configKrb5Conf, configKerberosEnv);
     expect(cluster.getServices()).andReturn(servicesMap).times(1);
 
-    Map<String, String> kerberosDescriptorProperties = new HashMap<String, String>();
+    Map<String, String> kerberosDescriptorProperties = new HashMap<>();
     kerberosDescriptorProperties.put("additional_realms", "");
     kerberosDescriptorProperties.put("keytab_dir", "/etc/security/keytabs");
     kerberosDescriptorProperties.put("realm", "${kerberos-env/realm}");
     kerberosDescriptorProperties.put("principal_suffix", "-${cluster_name|toLower()}");
 
-    ArrayList<KerberosIdentityDescriptor> service1Component1Identities = new ArrayList<KerberosIdentityDescriptor>();
+    ArrayList<KerberosIdentityDescriptor> service1Component1Identities = new ArrayList<>();
     service1Component1Identities.add(createMockIdentityDescriptor(
         "s1c1_1.user",
         createMockPrincipalDescriptor("s1c1_1@${realm}", KerberosPrincipalType.USER, "s1c1", null),
@@ -2557,10 +2560,10 @@ public class KerberosHelperTest extends EasyMockSupport {
         createMockKeytabDescriptor("s1c1_1.service.keytab", null)
     ));
 
-    HashMap<String, KerberosComponentDescriptor> service1ComponentDescriptorMap = new HashMap<String, KerberosComponentDescriptor>();
+    HashMap<String, KerberosComponentDescriptor> service1ComponentDescriptorMap = new HashMap<>();
     service1ComponentDescriptorMap.put("COMPONENT11", createMockComponentDescriptor("COMPONENT11", service1Component1Identities, null));
 
-    List<KerberosIdentityDescriptor> service1Identities = new ArrayList<KerberosIdentityDescriptor>();
+    List<KerberosIdentityDescriptor> service1Identities = new ArrayList<>();
     service1Identities.add(createMockIdentityDescriptor(
         "s1_1.user",
         createMockPrincipalDescriptor("s1_1@${realm}", KerberosPrincipalType.USER, "s1", null),
@@ -2574,7 +2577,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     KerberosServiceDescriptor service1KerberosDescriptor = createMockServiceDescriptor("SERVICE1", service1ComponentDescriptorMap, service1Identities);
 
-    ArrayList<KerberosIdentityDescriptor> service2Component1Identities = new ArrayList<KerberosIdentityDescriptor>();
+    ArrayList<KerberosIdentityDescriptor> service2Component1Identities = new ArrayList<>();
     service2Component1Identities.add(createMockIdentityDescriptor(
         "s2_1.user",
         createMockPrincipalDescriptor("s2_1@${realm}", KerberosPrincipalType.USER, "s2", null),
@@ -2586,7 +2589,7 @@ public class KerberosHelperTest extends EasyMockSupport {
         createMockKeytabDescriptor("s2c1_1.service.keytab", null)
     ));
 
-    HashMap<String, KerberosComponentDescriptor> service2ComponentDescriptorMap = new HashMap<String, KerberosComponentDescriptor>();
+    HashMap<String, KerberosComponentDescriptor> service2ComponentDescriptorMap = new HashMap<>();
     service2ComponentDescriptorMap.put("COMPONENT21", createMockComponentDescriptor("COMPONENT21", service2Component1Identities, null));
 
     KerberosServiceDescriptor service2KerberosDescriptor = createMockServiceDescriptor("SERVICE2", service2ComponentDescriptorMap, null);
@@ -2599,7 +2602,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     if (createAmbariIdentities) {
       String spnegoPrincipalNameExpected = String.format("HTTP/%s@%s", ambariServerHostname, realm);
 
-      ArrayList<KerberosIdentityDescriptor> ambarServerComponent1Identities = new ArrayList<KerberosIdentityDescriptor>();
+      ArrayList<KerberosIdentityDescriptor> ambarServerComponent1Identities = new ArrayList<>();
       ambarServerComponent1Identities.add(createMockIdentityDescriptor(
           KerberosHelper.AMBARI_SERVER_KERBEROS_IDENTITY_NAME,
           createMockPrincipalDescriptor(ambariServerPrincipalName, ambariServerPrincipalType, "ambari", null),
@@ -2612,7 +2615,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
       KerberosComponentDescriptor ambariServerComponentKerberosDescriptor = createMockComponentDescriptor("AMBARI_SERVER", ambarServerComponent1Identities, null);
 
-      HashMap<String, KerberosComponentDescriptor> ambariServerComponentDescriptorMap = new HashMap<String, KerberosComponentDescriptor>();
+      HashMap<String, KerberosComponentDescriptor> ambariServerComponentDescriptorMap = new HashMap<>();
       ambariServerComponentDescriptorMap.put("AMBARI_SERVER", ambariServerComponentKerberosDescriptor);
 
       KerberosServiceDescriptor ambariServiceKerberosDescriptor = createMockServiceDescriptor("AMBARI", ambariServerComponentDescriptorMap, null);
@@ -2636,7 +2639,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     setupKerberosDescriptor(kerberosDescriptor, 1);
 
-    Map<String, Map<String, String>> existingConfigurations = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> existingConfigurations = new HashMap<>();
     existingConfigurations.put("kerberos-env", propertiesKerberosEnv);
 
     Set<String> services = new HashSet<String>() {
@@ -2716,9 +2719,9 @@ public class KerberosHelperTest extends EasyMockSupport {
    */
   @Test
   public void testServiceWithoutComponents() throws Exception {
-    Map<String, String> propertiesKrb5Conf = new HashMap<String, String>();
+    Map<String, String> propertiesKrb5Conf = new HashMap<>();
 
-    Map<String, String> propertiesKerberosEnv = new HashMap<String, String>();
+    Map<String, String> propertiesKerberosEnv = new HashMap<>();
     propertiesKerberosEnv.put("realm", "EXAMPLE.COM");
     propertiesKerberosEnv.put("kdc_type", "mit-kdc");
     propertiesKerberosEnv.put("create_ambari_principal", "false");
@@ -2732,29 +2735,29 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Create a Service (SERVICE1) with one Component (COMPONENT11)
     Host host1 = createMockHost("host1");
 
-    Map<String, ServiceComponentHost> service1Component1HostMap = new HashMap<String, ServiceComponentHost>();
+    Map<String, ServiceComponentHost> service1Component1HostMap = new HashMap<>();
     service1Component1HostMap.put("host1", createMockServiceComponentHost(State.INSTALLED));
 
-    Map<String, ServiceComponent> service1ComponentMap = new HashMap<String, ServiceComponent>();
+    Map<String, ServiceComponent> service1ComponentMap = new HashMap<>();
     service1ComponentMap.put("COMPONENT11", createMockComponent("COMPONENT11", true, service1Component1HostMap));
 
     Service service1 = createMockService("SERVICE1", service1ComponentMap);
 
-    Map<String, Service> servicesMap = new HashMap<String, Service>();
+    Map<String, Service> servicesMap = new HashMap<>();
     servicesMap.put("SERVICE1", service1);
 
     Cluster cluster = createMockCluster("c1", Arrays.asList(host1), SecurityType.KERBEROS, configKrb5Conf, configKerberosEnv);
     expect(cluster.getServices()).andReturn(servicesMap).times(1);
 
-    Map<String, String> kerberosDescriptorProperties = new HashMap<String, String>();
+    Map<String, String> kerberosDescriptorProperties = new HashMap<>();
     kerberosDescriptorProperties.put("additional_realms", "");
     kerberosDescriptorProperties.put("keytab_dir", "/etc/security/keytabs");
     kerberosDescriptorProperties.put("realm", "${kerberos-env/realm}");
 
     // Notice that this map is empty, hence it has 0 Components in the kerberosDescriptor.
-    HashMap<String, KerberosComponentDescriptor> service1ComponentDescriptorMap = new HashMap<String, KerberosComponentDescriptor>();
+    HashMap<String, KerberosComponentDescriptor> service1ComponentDescriptorMap = new HashMap<>();
 
-    List<KerberosIdentityDescriptor> service1Identities = new ArrayList<KerberosIdentityDescriptor>();
+    List<KerberosIdentityDescriptor> service1Identities = new ArrayList<>();
     KerberosServiceDescriptor service1KerberosDescriptor = createMockServiceDescriptor("SERVICE1", service1ComponentDescriptorMap, service1Identities);
 
     KerberosDescriptor kerberosDescriptor = createMock(KerberosDescriptor.class);
@@ -2763,7 +2766,7 @@ public class KerberosHelperTest extends EasyMockSupport {
 
     setupKerberosDescriptor(kerberosDescriptor, 1);
 
-    Map<String, Map<String, String>> existingConfigurations = new HashMap<String, Map<String, String>>();
+    Map<String, Map<String, String>> existingConfigurations = new HashMap<>();
     existingConfigurations.put("kerberos-env", propertiesKerberosEnv);
 
     Set<String> services = new HashSet<String>() {
@@ -3085,7 +3088,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Needed by infrastructure
     injector.getInstance(AmbariMetaInfo.class).init();
 
-    Map<String, Collection<String>> serviceComponentFilter = new HashMap<String, Collection<String>>();
+    Map<String, Collection<String>> serviceComponentFilter = new HashMap<>();
     Collection<String> identityFilter = Arrays.asList("identity1a", "identity3");
 
     serviceComponentFilter.put("SERVICE3", Collections.singleton("COMPONENT3"));
@@ -3272,7 +3275,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Needed by infrastructure
     injector.getInstance(AmbariMetaInfo.class).init();
 
-    Map<String, Collection<String>> serviceComponentFilter = new HashMap<String, Collection<String>>();
+    Map<String, Collection<String>> serviceComponentFilter = new HashMap<>();
     Collection<String> identityFilter = Arrays.asList("identity1a", "identity3");
 
     serviceComponentFilter.put("SERVICE3", Collections.singleton("COMPONENT3"));
@@ -3291,7 +3294,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     KerberosHelper kerberosHelper = injector.getInstance(KerberosHelper.class);
     boolean managingIdentities = !Boolean.FALSE.equals(manageIdentities);
 
-    final Map<String, String> kerberosEnvProperties = new HashMap<String, String>();
+    final Map<String, String> kerberosEnvProperties = new HashMap<>();
     kerberosEnvProperties.put("kdc_type", "mit-kdc");
     kerberosEnvProperties.put("realm", "FOOBAR.COM");
     kerberosEnvProperties.put("manage_identities", "FOOBAR.COM");
@@ -3303,12 +3306,12 @@ public class KerberosHelperTest extends EasyMockSupport {
     final Config kerberosEnvConfig = createMock(Config.class);
     expect(kerberosEnvConfig.getProperties()).andReturn(kerberosEnvProperties).anyTimes();
 
-    final Map<String, String> krb5ConfProperties = new HashMap<String, String>();
+    final Map<String, String> krb5ConfProperties = new HashMap<>();
 
     final Config krb5ConfConfig = createMock(Config.class);
     expect(krb5ConfConfig.getProperties()).andReturn(krb5ConfProperties).anyTimes();
 
-    final Map<String, Object> attributeMap = new HashMap<String, Object>();
+    final Map<String, Object> attributeMap = new HashMap<>();
 
     final Cluster cluster = createNiceMock(Cluster.class);
     expect(cluster.getDesiredConfigByType("krb5-conf")).andReturn(krb5ConfConfig).anyTimes();
@@ -3445,7 +3448,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Needed by infrastructure
     injector.getInstance(AmbariMetaInfo.class).init();
 
-    Map<String, String> commandParamsStage = new HashMap<String, String>();
+    Map<String, String> commandParamsStage = new HashMap<>();
     CredentialStoreService credentialStoreService = injector.getInstance(CredentialStoreService.class);
     credentialStoreService.setCredential(cluster.getClusterName(), KerberosHelper.KDC_ADMINISTRATOR_CREDENTIAL_ALIAS,
         PrincipalKeyCredential, CredentialStoreType.TEMPORARY);
@@ -3601,7 +3604,7 @@ public class KerberosHelperTest extends EasyMockSupport {
     // Needed by infrastructure
     injector.getInstance(AmbariMetaInfo.class).init();
 
-    Map<String, String> commandParamsStage = new HashMap<String, String>();
+    Map<String, String> commandParamsStage = new HashMap<>();
     commandParamsStage.put("principal_name", "${cluster-env/smokeuser}@${realm}");
     commandParamsStage.put("keytab_file", "${keytab_dir}/kerberos.service_check.keytab");
 
@@ -3906,7 +3909,7 @@ public class KerberosHelperTest extends EasyMockSupport {
           createMockPrincipalDescriptor(ambariServerPrincipalName, ambariServerPrincipalType, "ambari", null),
           createMockKeytabDescriptor(ambariServerKeytabFilePath, null));
 
-      ArrayList<KerberosIdentityDescriptor> ambarServerComponent1Identities = new ArrayList<KerberosIdentityDescriptor>();
+      ArrayList<KerberosIdentityDescriptor> ambarServerComponent1Identities = new ArrayList<>();
       ambarServerComponent1Identities.add(ambariKerberosIdentity);
 
       ambarServerComponent1Identities.add(createMockIdentityDescriptor(
@@ -3914,7 +3917,7 @@ public class KerberosHelperTest extends EasyMockSupport {
           createMockPrincipalDescriptor("HTTP/_HOST@${realm}", KerberosPrincipalType.SERVICE, null, null),
           createMockKeytabDescriptor("spnego.service.keytab", null)));
 
-      HashMap<String, KerberosComponentDescriptor> ambariServerComponentDescriptorMap = new HashMap<String, KerberosComponentDescriptor>();
+      HashMap<String, KerberosComponentDescriptor> ambariServerComponentDescriptorMap = new HashMap<>();
       KerberosComponentDescriptor componentDescrptor = createMockComponentDescriptor("AMBARI_SERVER", ambarServerComponent1Identities, null);
       ambariServerComponentDescriptorMap.put("AMBARI_SERVER", componentDescrptor);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ActiveWidgetLayoutResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ActiveWidgetLayoutResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ActiveWidgetLayoutResourceProviderTest.java
index 641c642..e18b3b1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ActiveWidgetLayoutResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ActiveWidgetLayoutResourceProviderTest.java
@@ -67,6 +67,7 @@ import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -232,9 +233,9 @@ public class ActiveWidgetLayoutResourceProviderTest extends EasyMockSupport {
     ResourceProvider provider = getResourceProvider(injector, managementController);
 
     // add the property map to a set for the request.  add more maps for multiple creates
-    Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+    Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
 
-    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    Map<String, Object> properties = new LinkedHashMap<>();
 
     // add properties to the request map
     properties.put(ActiveWidgetLayoutResourceProvider.WIDGETLAYOUT_USERNAME_PROPERTY_ID, requestedUsername);
@@ -275,18 +276,18 @@ public class ActiveWidgetLayoutResourceProviderTest extends EasyMockSupport {
 
     AmbariManagementController managementController = injector.getInstance(AmbariManagementController.class);
 
-    Set<Map<String, String>> widgetLayouts = new HashSet<Map<String, String>>();
+    Set<Map<String, String>> widgetLayouts = new HashSet<>();
     HashMap<String, String> layout;
 
-    layout = new HashMap<String, String>();
+    layout = new HashMap<>();
     layout.put("id", "1");
     widgetLayouts.add(layout);
 
-    layout = new HashMap<String, String>();
+    layout = new HashMap<>();
     layout.put("id", "2");
     widgetLayouts.add(layout);
 
-    HashMap<String, Object> requestProps = new HashMap<String, Object>();
+    HashMap<String, Object> requestProps = new HashMap<>();
     requestProps.put(ActiveWidgetLayoutResourceProvider.WIDGETLAYOUT, widgetLayouts);
     requestProps.put(ActiveWidgetLayoutResourceProvider.WIDGETLAYOUT_USERNAME_PROPERTY_ID, requestedUsername);
 
@@ -376,6 +377,9 @@ public class ActiveWidgetLayoutResourceProviderTest extends EasyMockSupport {
     return Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
+        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
+
         bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
         bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
         bind(ActionDBAccessor.class).toInstance(createNiceMock(ActionDBAccessor.class));
@@ -386,7 +390,6 @@ public class ActiveWidgetLayoutResourceProviderTest extends EasyMockSupport {
         bind(org.apache.ambari.server.actionmanager.RequestFactory.class).toInstance(createNiceMock(org.apache.ambari.server.actionmanager.RequestFactory.class));
         bind(RequestExecutionFactory.class).toInstance(createNiceMock(RequestExecutionFactory.class));
         bind(StageFactory.class).toInstance(createNiceMock(StageFactory.class));
-        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
         bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
         bind(AbstractRootServiceResponseFactory.class).toInstance(createNiceMock(AbstractRootServiceResponseFactory.class));
         bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthorizationResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthorizationResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthorizationResourceProviderTest.java
index d8f560f..3a8b65a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthorizationResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserAuthorizationResourceProviderTest.java
@@ -70,6 +70,7 @@ import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -224,7 +225,7 @@ public class UserAuthorizationResourceProviderTest extends EasyMockSupport {
         .andReturn(null)
         .anyTimes();
 
-    Set<Resource> userPrivilegeResources = new HashSet<Resource>();
+    Set<Resource> userPrivilegeResources = new HashSet<>();
     userPrivilegeResources.add(clusterResource);
     userPrivilegeResources.add(viewResource);
     userPrivilegeResources.add(adminResource);
@@ -334,7 +335,7 @@ public class UserAuthorizationResourceProviderTest extends EasyMockSupport {
 
     Assert.assertEquals(3, resources.size());
 
-    LinkedList<String> expectedIds = new LinkedList<String>();
+    LinkedList<String> expectedIds = new LinkedList<>();
     expectedIds.add("CLUSTER.DO_SOMETHING");
     expectedIds.add("VIEW.DO_SOMETHING");
     expectedIds.add("ADMIN.DO_SOMETHING");
@@ -388,6 +389,9 @@ public class UserAuthorizationResourceProviderTest extends EasyMockSupport {
     return Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
+        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
+
         bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
         bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
         bind(ActionDBAccessor.class).toInstance(createNiceMock(ActionDBAccessor.class));
@@ -398,7 +402,6 @@ public class UserAuthorizationResourceProviderTest extends EasyMockSupport {
         bind(org.apache.ambari.server.actionmanager.RequestFactory.class).toInstance(createNiceMock(org.apache.ambari.server.actionmanager.RequestFactory.class));
         bind(RequestExecutionFactory.class).toInstance(createNiceMock(RequestExecutionFactory.class));
         bind(StageFactory.class).toInstance(createNiceMock(StageFactory.class));
-        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
         bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
         bind(AbstractRootServiceResponseFactory.class).toInstance(createNiceMock(AbstractRootServiceResponseFactory.class));
         bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserResourceProviderTest.java
index ef6a0cc..73cec56 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserResourceProviderTest.java
@@ -61,6 +61,7 @@ import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ServiceComponentFactory;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.UpgradeContextFactory;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -224,6 +225,9 @@ public class UserResourceProviderTest extends EasyMockSupport {
     return Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
+        install(new FactoryModuleBuilder().build(UpgradeContextFactory.class));
+        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
+
         bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
         bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
         bind(ActionDBAccessor.class).toInstance(createNiceMock(ActionDBAccessor.class));
@@ -234,7 +238,6 @@ public class UserResourceProviderTest extends EasyMockSupport {
         bind(RequestFactory.class).toInstance(createNiceMock(RequestFactory.class));
         bind(RequestExecutionFactory.class).toInstance(createNiceMock(RequestExecutionFactory.class));
         bind(StageFactory.class).toInstance(createNiceMock(StageFactory.class));
-        install(new FactoryModuleBuilder().build(RoleGraphFactory.class));
         bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
         bind(AbstractRootServiceResponseFactory.class).toInstance(createNiceMock(AbstractRootServiceResponseFactory.class));
         bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
@@ -278,9 +281,9 @@ public class UserResourceProviderTest extends EasyMockSupport {
     ResourceProvider provider = getResourceProvider(managementController);
 
     // add the property map to a set for the request.  add more maps for multiple creates
-    Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+    Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
 
-    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    Map<String, Object> properties = new LinkedHashMap<>();
 
     // add properties to the request map
     properties.put(UserResourceProvider.USER_USERNAME_PROPERTY_ID, "User100");
@@ -325,7 +328,7 @@ public class UserResourceProviderTest extends EasyMockSupport {
 
     ResourceProvider provider = getResourceProvider(managementController);
 
-    Set<String> propertyIds = new HashSet<String>();
+    Set<String> propertyIds = new HashSet<>();
     propertyIds.add(UserResourceProvider.USER_USERNAME_PROPERTY_ID);
     propertyIds.add(UserResourceProvider.USER_PASSWORD_PROPERTY_ID);
 
@@ -367,7 +370,7 @@ public class UserResourceProviderTest extends EasyMockSupport {
 
     ResourceProvider provider = getResourceProvider(managementController);
 
-    Set<String> propertyIds = new HashSet<String>();
+    Set<String> propertyIds = new HashSet<>();
     propertyIds.add(UserResourceProvider.USER_USERNAME_PROPERTY_ID);
     propertyIds.add(UserResourceProvider.USER_PASSWORD_PROPERTY_ID);
 
@@ -407,7 +410,7 @@ public class UserResourceProviderTest extends EasyMockSupport {
     ResourceProvider provider = getResourceProvider(managementController);
 
     // add the property map to a set for the request.
-    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    Map<String, Object> properties = new LinkedHashMap<>();
     properties.put(UserResourceProvider.USER_ADMIN_PROPERTY_ID, "true");
 
     // create the request
@@ -441,7 +444,7 @@ public class UserResourceProviderTest extends EasyMockSupport {
     ResourceProvider provider = getResourceProvider(managementController);
 
     // add the property map to a set for the request.
-    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    Map<String, Object> properties = new LinkedHashMap<>();
     properties.put(UserResourceProvider.USER_ACTIVE_PROPERTY_ID, "true");
 
     Request request = PropertyHelper.getUpdateRequest(properties, null);
@@ -471,7 +474,7 @@ public class UserResourceProviderTest extends EasyMockSupport {
     ResourceProvider provider = getResourceProvider(managementController);
 
     // add the property map to a set for the request.
-    Map<String, Object> properties = new LinkedHashMap<String, Object>();
+    Map<String, Object> properties = new LinkedHashMap<>();
     properties.put(UserResourceProvider.USER_OLD_PASSWORD_PROPERTY_ID, "old_password");
     properties.put(UserResourceProvider.USER_PASSWORD_PROPERTY_ID, "new_password");
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/49185423/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index 526e462..1e65fc2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -125,7 +125,7 @@ public class ConfigHelperTest {
         put("fs.trash.interval", "30");
       }});
       cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{
-        Map<String, String> attrs = new HashMap<String, String>();
+        Map<String, String> attrs = new HashMap<>();
         attrs.put("ipc.client.connect.max.retries", "1");
         attrs.put("fs.trash.interval", "2");
         put("attribute1", attrs);
@@ -165,7 +165,7 @@ public class ConfigHelperTest {
         put("namenode_heapsize", "1024");
       }});
       cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{
-        Map<String, String> attrs = new HashMap<String, String>();
+        Map<String, String> attrs = new HashMap<>();
         attrs.put("dfs_namenode_name_dir", "3");
         attrs.put("namenode_heapsize", "4");
         put("attribute2", attrs);
@@ -233,8 +233,8 @@ public class ConfigHelperTest {
     Long addConfigGroup(String name, String tag, List<String> hosts,
                         List<Config> configs) throws AmbariException {
 
-      Map<Long, Host> hostMap = new HashMap<Long, Host>();
-      Map<String, Config> configMap = new HashMap<String, Config>();
+      Map<Long, Host> hostMap = new HashMap<>();
+      Map<String, Config> configMap = new HashMap<>();
 
       Long hostId = 1L;
       for (String hostname : hosts) {
@@ -277,7 +277,7 @@ public class ConfigHelperTest {
     @Test
     public void testProcessHiddenAttribute() throws Exception {
       StackInfo stackInfo = metaInfo.getStack("HDP", "2.0.5");
-      Map<String, Map<String, Map<String, String>>> configAttributes = new HashMap<String, Map<String, Map<String, String>>>();
+      Map<String, Map<String, Map<String, String>>> configAttributes = new HashMap<>();
       configAttributes.put("hive-site", stackInfo.getDefaultConfigAttributesForConfigType("hive-site"));
 
       Map<String, Map<String, String>> originalConfig_hiveClient = createHiveConfig();
@@ -339,7 +339,7 @@ public class ConfigHelperTest {
         add(clusterRequest6);
       }}, null);
 
-      Map<String, String> properties = new HashMap<String, String>();
+      Map<String, String> properties = new HashMap<>();
       properties.put("a", "b");
       properties.put("c", "d");
 
@@ -378,7 +378,7 @@ public class ConfigHelperTest {
         put("fs.trash.interval", "30");
       }});
       cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{
-        Map<String, String> attrs = new HashMap<String, String>();
+        Map<String, String> attrs = new HashMap<>();
         attrs.put("ipc.client.connect.max.retries", "1");
         attrs.put("fs.trash.interval", "2");
         put("attribute1", attrs);
@@ -401,7 +401,7 @@ public class ConfigHelperTest {
         put("namenode_heapsize", "1024");
       }});
       cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{
-        Map<String, String> attrs = new HashMap<String, String>();
+        Map<String, String> attrs = new HashMap<>();
         attrs.put("dfs_namenode_name_dir", "3");
         attrs.put("namenode_heapsize", "4");
         put("attribute2", attrs);
@@ -416,12 +416,12 @@ public class ConfigHelperTest {
         add(clusterRequest3);
       }}, null);
 
-      Map<String, String> properties = new HashMap<String, String>();
+      Map<String, String> properties = new HashMap<>();
       properties.put("a", "b");
       properties.put("c", "d");
       final Config config1 = configFactory.createNew(cluster, "core-site2", "version122", properties, null);
 
-      Map<String, String> properties2 = new HashMap<String, String>();
+      Map<String, String> properties2 = new HashMap<>();
       properties2.put("namenode_heapsize", "1111");
       final Config config2 = configFactory.createNew(cluster, "global2", "version122", properties2, null);
 
@@ -464,7 +464,7 @@ public class ConfigHelperTest {
         put("fs.trash.interval", "30");
       }});
       crr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{
-        Map<String, String> attrs = new HashMap<String, String>();
+        Map<String, String> attrs = new HashMap<>();
         attrs.put("ipc.client.connect.max.retries", "1");
         attrs.put("fs.trash.interval", "2");
         put("attribute1", attrs);
@@ -487,7 +487,7 @@ public class ConfigHelperTest {
         put("namenode_heapsize", "1024");
       }});
       crr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{
-        Map<String, String> attrs = new HashMap<String, String>();
+        Map<String, String> attrs = new HashMap<>();
         attrs.put("dfs_namenode_name_dir", "3");
         attrs.put("namenode_heapsize", "4");
         put("attribute2", attrs);
@@ -503,19 +503,19 @@ public class ConfigHelperTest {
       }}, null);
 
 
-      Map<String, String> attributes = new HashMap<String, String>();
+      Map<String, String> attributes = new HashMap<>();
       attributes.put("fs.trash.interval", "11");
       attributes.put("b", "y");
-      Map<String, Map<String, String>> config1Attributes = new HashMap<String, Map<String, String>>();
+      Map<String, Map<String, String>> config1Attributes = new HashMap<>();
       config1Attributes.put("attribute1", attributes);
 
       final Config config1 = configFactory.createNew(cluster, "core-site3", "version122",
           new HashMap<String, String>(), config1Attributes);
 
-      attributes = new HashMap<String, String>();
+      attributes = new HashMap<>();
       attributes.put("namenode_heapsize", "z");
       attributes.put("c", "q");
-      Map<String, Map<String, String>> config2Attributes = new HashMap<String, Map<String, String>>();
+      Map<String, Map<String, String>> config2Attributes = new HashMap<>();
       config2Attributes.put("attribute2", attributes);
 
       final Config config2 = configFactory.createNew(cluster, "global3", "version122",
@@ -567,19 +567,19 @@ public class ConfigHelperTest {
     @Test
     public void testCloneAttributesMap() throws Exception {
       // init
-      Map<String, Map<String, String>> targetAttributesMap = new HashMap<String, Map<String, String>>();
-      Map<String, String> attributesValues = new HashMap<String, String>();
+      Map<String, Map<String, String>> targetAttributesMap = new HashMap<>();
+      Map<String, String> attributesValues = new HashMap<>();
       attributesValues.put("a", "1");
       attributesValues.put("b", "2");
       attributesValues.put("f", "3");
       attributesValues.put("q", "4");
       targetAttributesMap.put("attr", attributesValues);
-      Map<String, Map<String, String>> sourceAttributesMap = new HashMap<String, Map<String, String>>();
-      attributesValues = new HashMap<String, String>();
+      Map<String, Map<String, String>> sourceAttributesMap = new HashMap<>();
+      attributesValues = new HashMap<>();
       attributesValues.put("a", "5");
       attributesValues.put("f", "6");
       sourceAttributesMap.put("attr", attributesValues);
-      attributesValues = new HashMap<String, String>();
+      attributesValues = new HashMap<>();
       attributesValues.put("f", "7");
       attributesValues.put("q", "8");
       sourceAttributesMap.put("attr1", attributesValues);
@@ -606,8 +606,8 @@ public class ConfigHelperTest {
     @Test
     public void testCloneAttributesMapSourceIsNull() throws Exception {
       // init
-      Map<String, Map<String, String>> targetAttributesMap = new HashMap<String, Map<String, String>>();
-      Map<String, String> attributesValues = new HashMap<String, String>();
+      Map<String, Map<String, String>> targetAttributesMap = new HashMap<>();
+      Map<String, String> attributesValues = new HashMap<>();
       attributesValues.put("a", "1");
       attributesValues.put("b", "2");
       attributesValues.put("f", "3");
@@ -635,12 +635,12 @@ public class ConfigHelperTest {
     public void testCloneAttributesMapTargetIsNull() throws Exception {
       // init
       Map<String, Map<String, String>> targetAttributesMap = null;
-      Map<String, Map<String, String>> sourceAttributesMap = new HashMap<String, Map<String, String>>();
-      Map<String, String> attributesValues = new HashMap<String, String>();
+      Map<String, Map<String, String>> sourceAttributesMap = new HashMap<>();
+      Map<String, String> attributesValues = new HashMap<>();
       attributesValues.put("a", "5");
       attributesValues.put("f", "6");
       sourceAttributesMap.put("attr", attributesValues);
-      attributesValues = new HashMap<String, String>();
+      attributesValues = new HashMap<>();
       attributesValues.put("f", "7");
       attributesValues.put("q", "8");
       sourceAttributesMap.put("attr1", attributesValues);
@@ -666,17 +666,17 @@ public class ConfigHelperTest {
 
     @Test
     public void testMergeAttributes() throws Exception {
-      Map<String, Map<String, String>> persistedAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> persistedFinalAttrs = new HashMap<String, String>();
+      Map<String, Map<String, String>> persistedAttributes = new HashMap<>();
+      Map<String, String> persistedFinalAttrs = new HashMap<>();
       persistedFinalAttrs.put("a", "true");
       persistedFinalAttrs.put("c", "true");
       persistedFinalAttrs.put("d", "true");
       persistedAttributes.put("final", persistedFinalAttrs);
-      Map<String, Map<String, String>> confGroupAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> confGroupFinalAttrs = new HashMap<String, String>();
+      Map<String, Map<String, String>> confGroupAttributes = new HashMap<>();
+      Map<String, String> confGroupFinalAttrs = new HashMap<>();
       confGroupFinalAttrs.put("b", "true");
       confGroupAttributes.put("final", confGroupFinalAttrs);
-      Map<String, String> confGroupProperties = new HashMap<String, String>();
+      Map<String, String> confGroupProperties = new HashMap<>();
       confGroupProperties.put("a", "any");
       confGroupProperties.put("b", "any");
       confGroupProperties.put("c", "any");
@@ -698,14 +698,14 @@ public class ConfigHelperTest {
 
     @Test
     public void testMergeAttributesWithNoAttributeOverrides() throws Exception {
-      Map<String, Map<String, String>> persistedAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> persistedFinalAttrs = new HashMap<String, String>();
+      Map<String, Map<String, String>> persistedAttributes = new HashMap<>();
+      Map<String, String> persistedFinalAttrs = new HashMap<>();
       persistedFinalAttrs.put("a", "true");
       persistedFinalAttrs.put("c", "true");
       persistedFinalAttrs.put("d", "true");
       persistedAttributes.put("final", persistedFinalAttrs);
-      Map<String, Map<String, String>> confGroupAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> confGroupProperties = new HashMap<String, String>();
+      Map<String, Map<String, String>> confGroupAttributes = new HashMap<>();
+      Map<String, String> confGroupProperties = new HashMap<>();
       confGroupProperties.put("a", "any");
       confGroupProperties.put("b", "any");
       confGroupProperties.put("c", "any");
@@ -726,13 +726,13 @@ public class ConfigHelperTest {
 
     @Test
     public void testMergeAttributesWithNullAttributes() throws Exception {
-      Map<String, Map<String, String>> persistedAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> persistedFinalAttrs = new HashMap<String, String>();
+      Map<String, Map<String, String>> persistedAttributes = new HashMap<>();
+      Map<String, String> persistedFinalAttrs = new HashMap<>();
       persistedFinalAttrs.put("a", "true");
       persistedFinalAttrs.put("c", "true");
       persistedFinalAttrs.put("d", "true");
       persistedAttributes.put("final", persistedFinalAttrs);
-      Map<String, String> confGroupProperties = new HashMap<String, String>();
+      Map<String, String> confGroupProperties = new HashMap<>();
       confGroupProperties.put("a", "any");
       confGroupProperties.put("b", "any");
       confGroupProperties.put("c", "any");
@@ -755,14 +755,14 @@ public class ConfigHelperTest {
 
     @Test
     public void testMergeAttributesWithNullProperties() throws Exception {
-      Map<String, Map<String, String>> persistedAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> persistedFinalAttrs = new HashMap<String, String>();
+      Map<String, Map<String, String>> persistedAttributes = new HashMap<>();
+      Map<String, String> persistedFinalAttrs = new HashMap<>();
       persistedFinalAttrs.put("a", "true");
       persistedFinalAttrs.put("c", "true");
       persistedFinalAttrs.put("d", "true");
       persistedAttributes.put("final", persistedFinalAttrs);
-      Map<String, Map<String, String>> confGroupAttributes = new HashMap<String, Map<String, String>>();
-      Map<String, String> confGroupFinalAttrs = new HashMap<String, String>();
+      Map<String, Map<String, String>> confGroupAttributes = new HashMap<>();
+      Map<String, String> confGroupFinalAttrs = new HashMap<>();
       confGroupFinalAttrs.put("b", "true");
       confGroupAttributes.put("final", confGroupFinalAttrs);
 
@@ -801,7 +801,7 @@ public class ConfigHelperTest {
       Assert.assertTrue(propertiesAttributes.get("attribute1").containsKey("ipc.client.connect.max.retries"));
 
 
-      Map<String, String> updates = new HashMap<String, String>();
+      Map<String, String> updates = new HashMap<>();
       updates.put("new-property", "new-value");
       updates.put("fs.trash.interval", "updated-value");
       Collection<String> removals = Collections.singletonList("ipc.client.connect.max.retries");
@@ -839,7 +839,7 @@ public class ConfigHelperTest {
       Assert.assertEquals("simple", properties.get("oozie.authentication.type"));
       Assert.assertEquals("false", properties.get("oozie.service.HadoopAccessorService.kerberos.enabled"));
 
-      Map<String, String> updates = new HashMap<String, String>();
+      Map<String, String> updates = new HashMap<>();
       updates.put("oozie.authentication.type", "kerberos");
       updates.put("oozie.service.HadoopAccessorService.kerberos.enabled", "true");
 
@@ -868,7 +868,7 @@ public class ConfigHelperTest {
       Assert.assertEquals("embedded", properties.get("timeline.service.operating.mode"));
       Assert.assertEquals("false", properties.get("timeline.service.fifo.enabled"));
 
-      List<String> removals = new ArrayList<String>();
+      List<String> removals = new ArrayList<>();
       removals.add("timeline.service.operating.mode");
 
       configHelper.updateConfigType(cluster, managementController, "ams-site", null, removals, "admin", "Test note");
@@ -887,7 +887,7 @@ public class ConfigHelperTest {
     @Test
     public void testCalculateIsStaleConfigs() throws Exception {
 
-      Map<String, HostConfig> schReturn = new HashMap<String, HostConfig>();
+      Map<String, HostConfig> schReturn = new HashMap<>();
       HostConfig hc = new HostConfig();
       // Put a different version to check for change
       hc.setDefaultVersionTag("version2");
@@ -913,9 +913,9 @@ public class ConfigHelperTest {
       Assert.assertFalse(configHelper.isStaleConfigs(sch, null));
 
       // Cluster level same configs but group specific configs for host have been updated
-      List<String> hosts = new ArrayList<String>();
+      List<String> hosts = new ArrayList<>();
       hosts.add("h1");
-      List<Config> configs = new ArrayList<Config>();
+      List<Config> configs = new ArrayList<>();
 
       Config configImpl = configFactory.createNew(cluster, "flume-conf", "FLUME1",
           new HashMap<String,String>(), null);
@@ -966,6 +966,8 @@ public class ConfigHelperTest {
           final AmbariMetaInfo mockMetaInfo = createNiceMock(AmbariMetaInfo.class);
           final ClusterController clusterController = createStrictMock(ClusterController.class);
 
+          bind(UpgradeContextFactory.class).toInstance(createNiceMock(UpgradeContextFactory.class));
+
           bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
           bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
           bind(ClusterFactory.class).toInstance(createNiceMock(ClusterFactory.class));


Mime
View raw message