Return-Path:
X-Original-To: apmail-ambari-commits-archive@www.apache.org
Delivered-To: apmail-ambari-commits-archive@www.apache.org
Received: from mail.apache.org (hermes.apache.org [140.211.11.3])
by minotaur.apache.org (Postfix) with SMTP id DCACF178F3
for ;
Sat, 25 Apr 2015 12:08:16 +0000 (UTC)
Received: (qmail 47128 invoked by uid 500); 25 Apr 2015 12:08:16 -0000
Delivered-To: apmail-ambari-commits-archive@ambari.apache.org
Received: (qmail 47094 invoked by uid 500); 25 Apr 2015 12:08:16 -0000
Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm
Precedence: bulk
List-Help:
List-Unsubscribe:
List-Post:
List-Id:
Reply-To: ambari-dev@ambari.apache.org
Delivered-To: mailing list commits@ambari.apache.org
Received: (qmail 47082 invoked by uid 99); 25 Apr 2015 12:08:16 -0000
Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org)
(140.211.11.23)
by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 25 Apr 2015 12:08:16 +0000
Received: by git1-us-west.apache.org (ASF Mail Server at
git1-us-west.apache.org, from userid 33)
id 25956DFC14; Sat, 25 Apr 2015 12:08:16 +0000 (UTC)
Content-Type: text/plain; charset="us-ascii"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
From: ncole@apache.org
To: commits@ambari.apache.org
Message-Id: <0cfce9b4b3de4cc887633f6311e4118d@git.apache.org>
X-Mailer: ASF-Git Admin Mailer
Subject: ambari git commit: AMBARI-10739. ConfigureAction Must Take Into
Account Stack Versions (ncole)
Date: Sat, 25 Apr 2015 12:08:16 +0000 (UTC)
Repository: ambari
Updated Branches:
refs/heads/trunk 27eae30dd -> 682d82eef
AMBARI-10739. ConfigureAction Must Take Into Account Stack Versions (ncole)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/682d82ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/682d82ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/682d82ee
Branch: refs/heads/trunk
Commit: 682d82eef2e1229f4aaf9cd217c27c4c857d37fe
Parents: 27eae30
Author: Nate Cole
Authored: Fri Apr 24 16:40:55 2015 -0400
Committer: Nate Cole
Committed: Fri Apr 24 21:17:14 2015 -0400
----------------------------------------------------------------------
.../serveraction/upgrades/ConfigureAction.java | 70 ++++++
.../org/apache/ambari/server/state/Config.java | 6 +
.../apache/ambari/server/state/ConfigImpl.java | 55 +++--
.../state/stack/upgrade/ConfigureTask.java | 5 +-
.../upgrades/ConfigureActionTest.java | 214 +++++++++++++++++++
5 files changed, 333 insertions(+), 17 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/682d82ee/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
index 6154f5b..df9d7be 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
@@ -35,6 +35,7 @@ import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
import com.google.inject.Inject;
@@ -72,6 +73,45 @@ public class ConfigureAction extends AbstractServerAction {
private Configuration configuration;
/**
+ * Aside from the normal execution, this method performs the following logic, with
+ * the stack values set in the table below:
+ *
+ *
+ *
+ * Upgrade Path |
+ * direction |
+ * Stack Actual |
+ * Stack Desired |
+ * Config Stack |
+ * Action |
+ *
+ *
+ * 2.2.x -> 2.2.y |
+ * upgrade or downgrade |
+ * 2.2 |
+ * 2.2 |
+ * 2.2 |
+ * if value has changed, create a new config object with new value |
+ *
+ *
+ * 2.2 -> 2.3 |
+ * upgrade |
+ * 2.2 |
+ * 2.3: set before action is executed |
+ * 2.3: set before action is executed |
+ * new configs are already created; just update with new properties |
+ *
+ *
+ * 2.3 -> 2.2 |
+ * downgrade |
+ * 2.2 |
+ * 2.2: set before action is executed |
+ * 2.2 |
+ * configs are already managed, results are the same as 2.2.x -> 2.2.y |
+ *
+ *
+ *
+ *
* {@inheritDoc}
*/
@Override
@@ -108,14 +148,44 @@ public class ConfigureAction extends AbstractServerAction {
}
Cluster cluster = m_clusters.getCluster(clusterName);
+
Map desiredConfigs = cluster.getDesiredConfigs();
DesiredConfig desiredConfig = desiredConfigs.get(configType);
Config config = cluster.getConfig(configType, desiredConfig.getTag());
+ StackId currentStack = cluster.getCurrentStackVersion();
+ StackId targetStack = cluster.getDesiredStackVersion();
+ StackId configStack = config.getStackId();
+
+ String oldValue = config.getProperties().get(key);
+ // !!! values are not changing, so make this a no-op
+ if (null != oldValue && value.equals(oldValue)) {
+ if (currentStack.equals(targetStack)) {
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+ MessageFormat.format("{0}/{1} for cluster {2} would not change, skipping setting",
+ configType, key, clusterName),
+ "");
+ }
+ }
+
Map propertiesToChange = new HashMap();
propertiesToChange.put(key, value);
config.updateProperties(propertiesToChange);
+ // !!! check to see if we're going to a new stack and double check the
+ // configs are for the target. Then simply update the new properties instead
+ // of creating a whole new history record since it was already done
+ if (!targetStack.equals(currentStack) && targetStack.equals(configStack)) {
+ config.persist(false);
+
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}",
+ MessageFormat.format("Updated ''{0}'' with ''{1}={2}''",
+ configType, key, value),
+ "");
+ }
+
+ // !!! values are different and within the same stack. create a new
+ // config and service config version
String serviceVersionNote = "Stack Upgrade";
String auditName = getExecutionCommand().getRoleParams().get(ServerAction.ACTION_USER_NAME);
http://git-wip-us.apache.org/repos/asf/ambari/blob/682d82ee/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
index 502e1bf..e18505a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java
@@ -107,4 +107,10 @@ public interface Config {
* Persist the configuration.
*/
public void persist();
+
+ /**
+ * Persist the configuration, optionally creating a new config entity.
+ */
+ public void persist(boolean newConfig);
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/682d82ee/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
index 6866b0c..e755f76 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java
@@ -185,27 +185,50 @@ public class ConfigImpl implements Config {
}
@Override
+ public void persist() {
+ persist(true);
+ }
+
+ @Override
@Transactional
- public synchronized void persist() {
+ public synchronized void persist(boolean newConfig) {
ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId());
- ClusterConfigEntity entity = new ClusterConfigEntity();
- entity.setClusterEntity(clusterEntity);
- entity.setClusterId(cluster.getClusterId());
- entity.setType(getType());
- entity.setVersion(getVersion());
- entity.setTag(getTag());
- entity.setTimestamp(new Date().getTime());
- entity.setStack(clusterEntity.getDesiredStack());
-
- entity.setData(gson.toJson(getProperties()));
- if (null != getPropertiesAttributes()) {
- entity.setAttributes(gson.toJson(getPropertiesAttributes()));
+ if (newConfig) {
+ ClusterConfigEntity entity = new ClusterConfigEntity();
+ entity.setClusterEntity(clusterEntity);
+ entity.setClusterId(cluster.getClusterId());
+ entity.setType(getType());
+ entity.setVersion(getVersion());
+ entity.setTag(getTag());
+ entity.setTimestamp(new Date().getTime());
+ entity.setStack(clusterEntity.getDesiredStack());
+ entity.setData(gson.toJson(getProperties()));
+ if (null != getPropertiesAttributes()) {
+ entity.setAttributes(gson.toJson(getPropertiesAttributes()));
+ }
+
+ clusterDAO.createConfig(entity);
+
+ clusterEntity.getClusterConfigEntities().add(entity);
+ } else {
+ // only supporting changes to the properties
+ ClusterConfigEntity entity = null;
+ for (ClusterConfigEntity cfe : clusterEntity.getClusterConfigEntities()) {
+ if (getTag().equals(cfe.getTag()) &&
+ getType().equals(cfe.getType()) &&
+ getVersion().equals(cfe.getVersion())) {
+ entity = cfe;
+ break;
+ }
+
+ }
+
+ if (null != entity) {
+ entity.setData(gson.toJson(getProperties()));
+ }
}
- clusterDAO.createConfig(entity);
-
- clusterEntity.getClusterConfigEntities().add(entity);
clusterDAO.merge(clusterEntity);
cluster.refresh();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/682d82ee/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
index 40a1db2..d3cb366 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ConfigureTask.java
@@ -218,8 +218,11 @@ public class ConfigureTask extends ServerSideActionTask {
Map desiredConfigs = cluster.getDesiredConfigs();
DesiredConfig desiredConfig = desiredConfigs.get(configType);
- Config config = cluster.getConfig(configType, desiredConfig.getTag());
+ if (null == desiredConfig) {
+ return null;
+ }
+ Config config = cluster.getConfig(configType, desiredConfig.getTag());
if (null == config) {
return null;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/682d82ee/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
new file mode 100644
index 0000000..391db55
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.OrmTestHelper;
+import org.apache.ambari.server.orm.dao.HostDAO;
+import org.apache.ambari.server.orm.dao.HostVersionDAO;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.HostVersionEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.RepositoryVersionState;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.persist.PersistService;
+
+/**
+ * Tests upgrade-related server side actions
+ */
+public class ConfigureActionTest {
+ private static final String HDP_2_2_1_0 = "2.2.1.0-2270";
+ private static final String HDP_2_2_0_0 = "2.2.0.0-2041";
+ private static final StackId HDP_21_STACK = new StackId("HDP-2.1.1");
+ private static final StackId HDP_22_STACK = new StackId("HDP-2.2.0");
+
+ private Injector m_injector;
+
+ @Inject
+ private OrmTestHelper m_helper;
+
+ @Inject
+ private RepositoryVersionDAO repoVersionDAO;
+
+ @Inject
+ private HostVersionDAO hostVersionDAO;
+
+ @Inject
+ private HostRoleCommandFactory hostRoleCommandFactory;
+
+ @Before
+ public void setup() throws Exception {
+ m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+ m_injector.getInstance(GuiceJpaInitializer.class);
+ m_injector.injectMembers(this);
+ }
+
+ @After
+ public void teardown() throws Exception {
+ m_injector.getInstance(PersistService.class).stop();
+ }
+
+ private void makeUpgradeCluster() throws Exception {
+ String clusterName = "c1";
+ String hostName = "h1";
+
+ Clusters clusters = m_injector.getInstance(Clusters.class);
+ clusters.addCluster(clusterName, HDP_21_STACK);
+
+ StackDAO stackDAO = m_injector.getInstance(StackDAO.class);
+ StackEntity stackEntity = stackDAO.find(HDP_21_STACK.getStackName(),
+ HDP_21_STACK.getStackVersion());
+
+ assertNotNull(stackEntity);
+
+ Cluster c = clusters.getCluster(clusterName);
+ c.setDesiredStackVersion(HDP_21_STACK);
+
+ ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
+ Config config = cf.createNew(c, "zoo.cfg", new HashMap() {{
+ put("initLimit", "10");
+ }}, new HashMap>());
+ config.setTag("version1");
+ config.persist();
+
+ c.addConfig(config);
+ c.addDesiredConfig("user", Collections.singleton(config));
+
+ // add a host component
+ clusters.addHost(hostName);
+
+ Host host = clusters.getHost(hostName);
+
+ Map hostAttributes = new HashMap();
+ hostAttributes.put("os_family", "redhat");
+ hostAttributes.put("os_release_version", "6");
+ host.setHostAttributes(hostAttributes);
+ host.persist();
+
+ String urlInfo = "[{'repositories':["
+ + "{'Repositories/base_url':'http://foo1','Repositories/repo_name':'HDP','Repositories/repo_id':'HDP-2.1.1'}"
+ + "], 'OperatingSystems/os_type':'redhat6'}]";
+
+ m_helper.getOrCreateRepositoryVersion(HDP_21_STACK, HDP_2_2_0_0);
+ repoVersionDAO.create(stackEntity, HDP_2_2_1_0, String.valueOf(System.currentTimeMillis()),
+ "pack", urlInfo);
+
+ c.createClusterVersion(HDP_21_STACK, HDP_2_2_0_0, "admin", RepositoryVersionState.UPGRADING);
+ c.createClusterVersion(HDP_21_STACK, HDP_2_2_1_0, "admin", RepositoryVersionState.INSTALLING);
+
+ c.transitionClusterVersion(HDP_21_STACK, HDP_2_2_0_0, RepositoryVersionState.CURRENT);
+ c.transitionClusterVersion(HDP_21_STACK, HDP_2_2_1_0, RepositoryVersionState.INSTALLED);
+ c.transitionClusterVersion(HDP_21_STACK, HDP_2_2_1_0, RepositoryVersionState.UPGRADING);
+ c.transitionClusterVersion(HDP_21_STACK, HDP_2_2_1_0, RepositoryVersionState.UPGRADED);
+ c.setCurrentStackVersion(HDP_21_STACK);
+
+ c.mapHostVersions(Collections.singleton(hostName), c.getCurrentClusterVersion(),
+ RepositoryVersionState.CURRENT);
+
+ HostDAO hostDAO = m_injector.getInstance(HostDAO.class);
+
+ HostVersionEntity entity = new HostVersionEntity();
+ entity.setHostEntity(hostDAO.findByName(hostName));
+ entity.setRepositoryVersion(repoVersionDAO.findByStackAndVersion(HDP_21_STACK, HDP_2_2_1_0));
+ entity.setState(RepositoryVersionState.UPGRADED);
+ hostVersionDAO.create(entity);
+ }
+
+ @Test
+ public void testConfigActionUpgradeAcrossStack() throws Exception {
+ makeUpgradeCluster();
+
+ Cluster c = m_injector.getInstance(Clusters.class).getCluster("c1");
+ assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+
+ c.setDesiredStackVersion(HDP_22_STACK);
+ ConfigFactory cf = m_injector.getInstance(ConfigFactory.class);
+ Config config = cf.createNew(c, "zoo.cfg", new HashMap() {{
+ put("initLimit", "10");
+ }}, new HashMap>());
+ config.setTag("version2");
+ config.persist();
+
+ c.addConfig(config);
+ c.addDesiredConfig("user", Collections.singleton(config));
+ assertEquals(2, c.getConfigsByType("zoo.cfg").size());
+
+
+ Map commandParams = new HashMap();
+ commandParams.put("upgrade_direction", "upgrade");
+ commandParams.put("version", HDP_2_2_1_0);
+ commandParams.put("clusterName", "c1");
+ commandParams.put(ConfigureTask.PARAMETER_CONFIG_TYPE, "zoo.cfg");
+ commandParams.put(ConfigureTask.PARAMETER_KEY, "initLimit");
+ commandParams.put(ConfigureTask.PARAMETER_VALUE, "11");
+
+ ExecutionCommand executionCommand = new ExecutionCommand();
+ executionCommand.setCommandParams(commandParams);
+ executionCommand.setClusterName("c1");
+
+ HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null,
+ null, null);
+
+ hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
+ executionCommand));
+
+ ConfigureAction action = m_injector.getInstance(ConfigureAction.class);
+ action.setExecutionCommand(executionCommand);
+ action.setHostRoleCommand(hostRoleCommand);
+
+ CommandReport report = action.execute(null);
+ assertNotNull(report);
+
+ assertEquals(2, c.getConfigsByType("zoo.cfg").size());
+
+ config = c.getDesiredConfigByType("zoo.cfg");
+ assertNotNull(config);
+ assertEquals("version2", config.getTag());
+ assertEquals("11", config.getProperties().get("initLimit"));
+
+ }
+
+
+}