ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nc...@apache.org
Subject [38/51] [abbrv] ambari git commit: AMBARI-14424. Hive Metastore alert timeout.(vbrodetskyi)
Date Wed, 23 Dec 2015 15:07:17 GMT
AMBARI-14424. Hive Metastore alert timeout.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f0c1e2ea
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f0c1e2ea
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f0c1e2ea

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f0c1e2eabaf2f44592c0a9aa4618dfcaf51e8202
Parents: e7b1758
Author: Vitaly Brodetskyi <vbrodetskyi@hortonworks.com>
Authored: Tue Dec 22 17:56:30 2015 +0200
Committer: Vitaly Brodetskyi <vbrodetskyi@hortonworks.com>
Committed: Tue Dec 22 17:58:34 2015 +0200

----------------------------------------------------------------------
 .../libraries/functions/hive_check.py           |   5 +-
 .../server/upgrade/SchemaUpgradeHelper.java     |  28 +--
 .../server/upgrade/UpgradeCatalog221.java       | 179 +++++++++++++++++++
 .../common-services/HIVE/0.12.0.2.0/alerts.json |  20 ++-
 .../package/alerts/alert_hive_metastore.py      |  11 +-
 .../package/alerts/alert_hive_thrift_port.py    |  10 +-
 .../server/upgrade/UpgradeCatalog221Test.java   | 136 ++++++++++++++
 7 files changed, 367 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
b/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
index 55fd6bd..aacb176 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/hive_check.py
@@ -27,7 +27,7 @@ from resource_management.libraries.functions import format
 
 def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, kinitcmd=None, smokeuser='ambari-qa',
                            transport_mode="binary", http_endpoint="cliservice", ssl=False,
ssl_keystore=None,
-                           ssl_password=None):
+                           ssl_password=None, check_command_timeout=30):
   """
   Hive thrift SASL port check
   """
@@ -41,7 +41,6 @@ def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None,
kinitcmd
 
   # to pass as beeline argument
   ssl_str = str(ssl).lower()
-  beeline_check_timeout = 30
   beeline_url = ['jdbc:hive2://{address}:{port}/', "transportMode={transport_mode}"]
 
   # append url according to used transport
@@ -66,5 +65,5 @@ def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None,
kinitcmd
   Execute(cmd,
           user=smokeuser,
           path=["/bin/", "/usr/bin/", "/usr/lib/hive/bin/", "/usr/sbin/"],
-          timeout=beeline_check_timeout
+          timeout=check_command_timeout
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index 871cb67..82aa6ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -17,15 +17,11 @@
  */
 package org.apache.ambari.server.upgrade;
 
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.multibindings.Multibinder;
+import com.google.inject.persist.PersistService;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.ControllerModule;
@@ -35,11 +31,14 @@ import org.apache.ambari.server.utils.VersionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Guice;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.multibindings.Multibinder;
-import com.google.inject.persist.PersistService;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
 
 public class SchemaUpgradeHelper {
   private static final Logger LOG = LoggerFactory.getLogger
@@ -182,6 +181,7 @@ public class SchemaUpgradeHelper {
       catalogBinder.addBinding().to(UpgradeCatalog212.class);
       catalogBinder.addBinding().to(UpgradeCatalog2121.class);
       catalogBinder.addBinding().to(UpgradeCatalog220.class);
+      catalogBinder.addBinding().to(UpgradeCatalog221.class);
       catalogBinder.addBinding().to(UpgradeCatalog230.class);
       catalogBinder.addBinding().to(FinalUpgradeCatalog.class);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
new file mode 100644
index 0000000..a27a2b2
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.gson.JsonPrimitive;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.dao.DaoUtils;
+import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * Upgrade catalog for version 2.2.1.
+ */
+public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
+
+  @Inject
+  DaoUtils daoUtils;
+
+  /**
+   * Logger.
+   */
+  private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog221.class);
+
+
+  // ----- Constructors ------------------------------------------------------
+
+  /**
+   * Don't forget to register new UpgradeCatalogs in {@link org.apache.ambari.server.upgrade.SchemaUpgradeHelper.UpgradeHelperModule#configure()}
+   *
+   * @param injector Guice injector to track dependencies and uses bindings to inject them.
+   */
+  @Inject
+  public UpgradeCatalog221(Injector injector) {
+    super(injector);
+    this.injector = injector;
+  }
+
+  // ----- UpgradeCatalog ----------------------------------------------------
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getTargetVersion() {
+    return "2.2.1";
+  }
+
+  // ----- AbstractUpgradeCatalog --------------------------------------------
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getSourceVersion() {
+    return "2.2.0";
+  }
+
+
+  @Override
+  protected void executeDDLUpdates() throws AmbariException, SQLException {
+    //To change body of implemented methods use File | Settings | File Templates.
+  }
+
+  @Override
+  protected void executePreDMLUpdates() throws AmbariException, SQLException {
+    //To change body of implemented methods use File | Settings | File Templates.
+  }
+
+  @Override
+  protected void executeDMLUpdates() throws AmbariException, SQLException {
+    addNewConfigurationsFromXml();
+    updateAlerts();
+  }
+
+  protected void updateAlerts() {
+    LOG.info("Updating alert definitions.");
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    AlertDefinitionDAO alertDefinitionDAO = injector.getInstance(AlertDefinitionDAO.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters);
+    for (final Cluster cluster : clusterMap.values()) {
+      long clusterID = cluster.getClusterId();
+      final AlertDefinitionEntity hiveMetastoreProcessAlertDefinitionEntity = alertDefinitionDAO.findByName(
+              clusterID, "hive_metastore_process");
+      final AlertDefinitionEntity hiveServerProcessAlertDefinitionEntity = alertDefinitionDAO.findByName(
+              clusterID, "hive_server_process");
+
+      List<AlertDefinitionEntity> hiveAlertDefinitions = new ArrayList();
+      hiveAlertDefinitions.add(hiveMetastoreProcessAlertDefinitionEntity);
+      hiveAlertDefinitions.add(hiveServerProcessAlertDefinitionEntity);
+
+      for(AlertDefinitionEntity alertDefinition : hiveAlertDefinitions){
+        String source = alertDefinition.getSource();
+
+        alertDefinition.setScheduleInterval(3);
+        alertDefinition.setSource(addCheckCommandTimeoutParam(source));
+        alertDefinition.setHash(UUID.randomUUID().toString());
+
+        alertDefinitionDAO.merge(alertDefinition);
+      }
+
+    }
+  }
+
+  protected String addCheckCommandTimeoutParam(String source) {
+    JsonObject sourceJson = new JsonParser().parse(source).getAsJsonObject();
+    JsonArray parametersJson = sourceJson.getAsJsonArray("parameters");
+
+    boolean parameterExists = parametersJson != null && !parametersJson.isJsonNull();
+
+    if (parameterExists) {
+      Iterator<JsonElement> jsonElementIterator = parametersJson.iterator();
+      while(jsonElementIterator.hasNext()) {
+        JsonElement element = jsonElementIterator.next();
+        JsonElement name = element.getAsJsonObject().get("name");
+        if (name != null && !name.isJsonNull() && name.getAsString().equals("check.command.timeout"))
{
+          return sourceJson.toString();
+        }
+      }
+    }
+
+    JsonObject checkCommandTimeoutParamJson = new JsonObject();
+    checkCommandTimeoutParamJson.add("name", new JsonPrimitive("check.command.timeout"));
+    checkCommandTimeoutParamJson.add("display_name", new JsonPrimitive("Check command timeout"));
+    checkCommandTimeoutParamJson.add("value", new JsonPrimitive(60.0));
+    checkCommandTimeoutParamJson.add("type", new JsonPrimitive("NUMERIC"));
+    checkCommandTimeoutParamJson.add("description", new JsonPrimitive("The maximum time before
check command will be killed by timeout"));
+    checkCommandTimeoutParamJson.add("units", new JsonPrimitive("seconds"));
+
+    if (!parameterExists) {
+      parametersJson = new JsonArray();
+      parametersJson.add(checkCommandTimeoutParamJson);
+      sourceJson.add("parameters", parametersJson);
+    } else {
+      parametersJson.add(checkCommandTimeoutParamJson);
+      sourceJson.remove("parameters");
+      sourceJson.add("parameters", parametersJson);
+    }
+
+    return sourceJson.toString();
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/alerts.json
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/alerts.json
index 55e3f78..cf99435 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/alerts.json
@@ -6,7 +6,7 @@
         "name": "hive_metastore_process",
         "label": "Hive Metastore Process",
         "description": "This host-level alert is triggered if the Hive Metastore process
cannot be determined to be up and listening on the network.",
-        "interval": 1,
+        "interval": 3,
         "scope": "ANY",
         "enabled": true,
         "source": {
@@ -14,6 +14,14 @@
           "path": "HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py",
           "parameters": [
             {
+              "name": "check.command.timeout",
+              "display_name": "Check command timeout",
+              "value": 60.0,
+              "type": "NUMERIC",
+              "description": "The maximum time before check command will be killed by timeout",
+              "units": "seconds"
+            },
+            {
               "name": "default.smoke.user",
               "display_name": "Default Smoke User",
               "value": "ambari-qa",
@@ -43,7 +51,7 @@
         "name": "hive_server_process",
         "label": "HiveServer2 Process",
         "description": "This host-level alert is triggered if the HiveServer cannot be determined
to be up and responding to client requests.",
-        "interval": 1,
+        "interval": 3,
         "scope": "ANY",
         "enabled": true,
         "source": {
@@ -51,6 +59,14 @@
           "path": "HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py",
           "parameters": [
             {
+              "name": "check.command.timeout",
+              "display_name": "Check command timeout",
+              "value": 60.0,
+              "type": "NUMERIC",
+              "description": "The maximum time before check command will be killed by timeout",
+              "units": "seconds"
+            },
+            {
               "name": "default.smoke.user",
               "display_name": "Default Smoke User",
               "value": "ambari-qa",

http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
index 861c48e..dbf0600 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_metastore.py
@@ -59,6 +59,9 @@ HIVE_CONF_DIR_LEGACY = '/etc/hive/conf.server'
 HIVE_BIN_DIR = '/usr/hdp/current/hive-metastore/bin'
 HIVE_BIN_DIR_LEGACY = '/usr/lib/hive/bin'
 
+CHECK_COMMAND_TIMEOUT_KEY = 'check.command.timeout'
+CHECK_COMMAND_TIMEOUT_DEFAULT = 60.0
+
 HADOOPUSER_KEY = '{{cluster-env/hadoop.user.name}}'
 HADOOPUSER_DEFAULT = 'hadoop'
 logger = logging.getLogger('ambari_alerts')
@@ -103,6 +106,10 @@ def execute(configurations={}, parameters={}, host_name=None):
   if SECURITY_ENABLED_KEY in configurations:
     security_enabled = str(configurations[SECURITY_ENABLED_KEY]).upper() == 'TRUE'
 
+  check_command_timeout = CHECK_COMMAND_TIMEOUT_DEFAULT
+  if CHECK_COMMAND_TIMEOUT_KEY in parameters:
+    check_command_timeout = float(parameters[CHECK_COMMAND_TIMEOUT_KEY])
+
   # defaults
   smokeuser_keytab = SMOKEUSER_KEYTAB_DEFAULT
   smokeuser_principal = SMOKEUSER_PRINCIPAL_DEFAULT
@@ -173,7 +180,7 @@ def execute(configurations={}, parameters={}, host_name=None):
     try:
       Execute(cmd, user=smokeuser,
         path=["/bin/", "/usr/bin/", "/usr/sbin/", bin_dir],
-        timeout=30 )
+        timeout=int(check_command_timeout) )
 
       total_time = time.time() - start_time
 
@@ -245,4 +252,4 @@ def execute(configurations={}, parameters={}, host_name=None):
     label = traceback.format_exc()
     result_code = 'UNKNOWN'
 
-  return ((result_code, [label]))
\ No newline at end of file
+  return ((result_code, [label]))

http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
index a04c2a6..32da1cc 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
@@ -68,6 +68,9 @@ SMOKEUSER_DEFAULT = 'ambari-qa'
 HADOOPUSER_KEY = '{{cluster-env/hadoop.user.name}}'
 HADOOPUSER_DEFAULT = 'hadoop'
 
+CHECK_COMMAND_TIMEOUT_KEY = 'check.command.timeout'
+CHECK_COMMAND_TIMEOUT_DEFAULT = 60.0
+
 logger = logging.getLogger('ambari_alerts')
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
@@ -119,6 +122,10 @@ def execute(configurations={}, parameters={}, host_name=None):
   if SECURITY_ENABLED_KEY in configurations:
     security_enabled = str(configurations[SECURITY_ENABLED_KEY]).upper() == 'TRUE'
 
+  check_command_timeout = CHECK_COMMAND_TIMEOUT_DEFAULT
+  if CHECK_COMMAND_TIMEOUT_KEY in parameters:
+    check_command_timeout = float(parameters[CHECK_COMMAND_TIMEOUT_KEY])
+
   hive_server2_authentication = HIVE_SERVER2_AUTHENTICATION_DEFAULT
   if HIVE_SERVER2_AUTHENTICATION_KEY in configurations:
     hive_server2_authentication = configurations[HIVE_SERVER2_AUTHENTICATION_KEY]
@@ -189,7 +196,8 @@ def execute(configurations={}, parameters={}, host_name=None):
     try:
       hive_check.check_thrift_port_sasl(host_name, port, hive_server2_authentication, hive_server_principal,
                                         kinitcmd, smokeuser, transport_mode=transport_mode,
ssl=hive_ssl,
-                                        ssl_keystore=hive_ssl_keystore_path, ssl_password=hive_ssl_keystore_password)
+                                        ssl_keystore=hive_ssl_keystore_path, ssl_password=hive_ssl_keystore_password,
+                                        check_command_timeout=int(check_command_timeout))
       result_code = 'OK'
       total_time = time.time() - start_time
       label = OK_MESSAGE.format(total_time, port)

http://git-wip-us.apache.org/repos/asf/ambari/blob/f0c1e2ea/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
new file mode 100644
index 0000000..7cf386e
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.upgrade;
+
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Provider;
+import com.google.inject.persist.PersistService;
+import junit.framework.Assert;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import javax.persistence.EntityManager;
+import java.lang.reflect.Method;
+
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+
+public class UpgradeCatalog221Test {
+  private Injector injector;
+  private Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
+  private EntityManager entityManager = createNiceMock(EntityManager.class);
+  private UpgradeCatalogHelper upgradeCatalogHelper;
+  private StackEntity desiredStackEntity;
+
+
+
+  @Before
+  public void init() {
+    reset(entityManagerProvider);
+    expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
+    replay(entityManagerProvider);
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+
+    upgradeCatalogHelper = injector.getInstance(UpgradeCatalogHelper.class);
+    // inject AmbariMetaInfo to ensure that stacks get populated in the DB
+    injector.getInstance(AmbariMetaInfo.class);
+    // load the stack entity
+    StackDAO stackDAO = injector.getInstance(StackDAO.class);
+    desiredStackEntity = stackDAO.find("HDP", "2.2.0");
+  }
+
+  @After
+  public void tearDown() {
+    injector.getInstance(PersistService.class).stop();
+  }
+
+  @Test
+  public void testExecuteDMLUpdates() throws Exception {
+    Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
+    Method updateAlerts = UpgradeCatalog221.class.getDeclaredMethod("updateAlerts");
+
+
+
+    UpgradeCatalog221 upgradeCatalog221 = createMockBuilder(UpgradeCatalog221.class)
+            .addMockedMethod(addNewConfigurationsFromXml)
+            .addMockedMethod(updateAlerts)
+            .createMock();
+
+    upgradeCatalog221.addNewConfigurationsFromXml();
+    expectLastCall().once();
+    upgradeCatalog221.updateAlerts();
+    expectLastCall().once();
+
+
+    replay(upgradeCatalog221);
+
+    upgradeCatalog221.executeDMLUpdates();
+
+    verify(upgradeCatalog221);
+  }
+
+  @Test
+  public void test_AddCheckCommandTimeoutParam_ParamsNotAvailable() {
+
+    UpgradeCatalog221 upgradeCatalog221 = new UpgradeCatalog221(injector);
+    String inputSource = "{ \"path\" : \"test_path\", \"type\" : \"SCRIPT\"}";
+    String expectedSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"check.command.timeout\",\"display_name\":\"Check
command timeout\",\"value\":60.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before
check command will be killed by timeout\",\"units\":\"seconds\"}]}";
+
+    String result = upgradeCatalog221.addCheckCommandTimeoutParam(inputSource);
+    Assert.assertEquals(result, expectedSource);
+  }
+
+  @Test
+  public void test_AddCheckCommandTimeoutParam_ParamsAvailable() {
+
+    UpgradeCatalog221 upgradeCatalog221 = new UpgradeCatalog221(injector);
+    String inputSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"test\",\"display_name\":\"Test\",\"value\":10.0,\"type\":\"test\",\"description\":\"test\",\"units\":\"test\"}]}";
+    String expectedSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"test\",\"display_name\":\"Test\",\"value\":10.0,\"type\":\"test\",\"description\":\"test\",\"units\":\"test\"},{\"name\":\"check.command.timeout\",\"display_name\":\"Check
command timeout\",\"value\":60.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before
check command will be killed by timeout\",\"units\":\"seconds\"}]}";
+
+    String result = upgradeCatalog221.addCheckCommandTimeoutParam(inputSource);
+    Assert.assertEquals(result, expectedSource);
+  }
+
+  @Test
+  public void test_AddCheckCommandTimeoutParam_NeededParamAlreadyAdded() {
+
+    UpgradeCatalog221 upgradeCatalog221 = new UpgradeCatalog221(injector);
+    String inputSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"display_name\":\"Test\",\"value\":10.0,\"type\":\"test\",\"description\":\"test\",\"units\":\"test\"},{\"name\":\"check.command.timeout\",\"display_name\":\"Check
command timeout\",\"value\":60.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before
check command will be killed by timeout\",\"units\":\"seconds\"}]}";
+    String expectedSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"display_name\":\"Test\",\"value\":10.0,\"type\":\"test\",\"description\":\"test\",\"units\":\"test\"},{\"name\":\"check.command.timeout\",\"display_name\":\"Check
command timeout\",\"value\":60.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before
check command will be killed by timeout\",\"units\":\"seconds\"}]}";
+
+    String result = upgradeCatalog221.addCheckCommandTimeoutParam(inputSource);
+    Assert.assertEquals(result, expectedSource);
+  }
+
+}


Mime
View raw message