Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id AB833200B84 for ; Tue, 20 Sep 2016 14:54:40 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id AA091160AC9; Tue, 20 Sep 2016 12:54:40 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id C5786160AA9 for ; Tue, 20 Sep 2016 14:54:39 +0200 (CEST) Received: (qmail 99600 invoked by uid 500); 20 Sep 2016 12:54:38 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 99591 invoked by uid 99); 20 Sep 2016 12:54:38 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 20 Sep 2016 12:54:38 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 9C98BDFF5A; Tue, 20 Sep 2016 12:54:38 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: magyari_sandor@apache.org To: commits@ambari.apache.org Message-Id: <43b604de504c4c8a9ce4a0ab3e648b94@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: ambari git commit: AMBARI-18254. DB consistency check should tolerate services with no configuration (Balazs Bence Sari via magyari_sandor) Date: Tue, 20 Sep 2016 12:54:38 +0000 (UTC) archived-at: Tue, 20 Sep 2016 12:54:40 -0000 Repository: ambari Updated Branches: refs/heads/branch-2.4 1f43692b1 -> a73554f24 AMBARI-18254. DB consistency check should tolerate services with no configuration (Balazs Bence Sari via magyari_sandor) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a73554f2 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a73554f2 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a73554f2 Branch: refs/heads/branch-2.4 Commit: a73554f243347dc8b3bbb820c7e563c652e60109 Parents: 1f43692 Author: Balazs Bence Sari Authored: Tue Sep 20 14:39:55 2016 +0200 Committer: Sandor Magyari Committed: Tue Sep 20 14:54:07 2016 +0200 ---------------------------------------------------------------------- .../checks/DatabaseConsistencyCheckHelper.java | 10 ++- .../DatabaseConsistencyCheckHelperTest.java | 91 ++++++++++++++++++++ 2 files changed, 99 insertions(+), 2 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/a73554f2/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java index fa42e8a..f302b8b 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelper.java @@ -86,6 +86,12 @@ public class DatabaseConsistencyCheckHelper { protected static void setInjector(Injector injector) { DatabaseConsistencyCheckHelper.injector = injector; + // Clean up: new injector means static fields should be reinitalized, though in real life it only occurs during testing + closeConnection(); + connection = null; + metainfoDAO = null; + ambariMetaInfo = null; + dbAccessor = null; } public static void setConnection(Connection connection) { @@ -448,8 +454,8 @@ public class DatabaseConsistencyCheckHelper { } for (String clusterName : clusterServiceMap.keySet()) { - LOG.error("Service(s): {}, from cluster {} has no config(s) in serviceconfig table!", StringUtils.join(clusterServiceMap.get(clusterName), ","), clusterName); - errorAvailable = true; + LOG.warn("Service(s): {}, from cluster {} has no config(s) in serviceconfig table!", StringUtils.join(clusterServiceMap.get(clusterName), ","), clusterName); + warningAvailable = true; } } http://git-wip-us.apache.org/repos/asf/ambari/blob/a73554f2/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java index 86a57c5..4663310 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/DatabaseConsistencyCheckHelperTest.java @@ -27,6 +27,7 @@ import java.sql.Statement; import java.util.HashMap; import java.util.Map; +import junit.framework.Assert; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.orm.DBAccessor; import org.apache.ambari.server.stack.StackManagerFactory; @@ -34,6 +35,7 @@ import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.ServiceInfo; import org.apache.ambari.server.state.stack.OsFamily; import org.easymock.EasyMockSupport; +import org.junit.Ignore; import org.junit.Test; import com.google.inject.AbstractModule; @@ -293,6 +295,95 @@ public class DatabaseConsistencyCheckHelperTest { easyMockSupport.verifyAll(); } + @Test + public void testCheckServiceConfigs_missingServiceConfigGeneratesWarning() throws Exception { + EasyMockSupport easyMockSupport = new EasyMockSupport(); + final AmbariMetaInfo mockAmbariMetainfo = easyMockSupport.createNiceMock(AmbariMetaInfo.class); + final DBAccessor mockDBDbAccessor = easyMockSupport.createNiceMock(DBAccessor.class); + final Connection mockConnection = easyMockSupport.createNiceMock(Connection.class); + final ResultSet mockResultSet = easyMockSupport.createNiceMock(ResultSet.class); + final ResultSet clusterServicesResultSet = easyMockSupport.createNiceMock(ResultSet.class); + final ResultSet stackResultSet = easyMockSupport.createNiceMock(ResultSet.class); + final ResultSet serviceConfigResultSet = easyMockSupport.createNiceMock(ResultSet.class); + final Statement mockStatement = easyMockSupport.createNiceMock(Statement.class); + final ServiceInfo mockHDFSServiceInfo = easyMockSupport.createNiceMock(ServiceInfo.class); + + final StackManagerFactory mockStackManagerFactory = easyMockSupport.createNiceMock(StackManagerFactory.class); + final EntityManager mockEntityManager = easyMockSupport.createNiceMock(EntityManager.class); + final Clusters mockClusters = easyMockSupport.createNiceMock(Clusters.class); + final OsFamily mockOSFamily = easyMockSupport.createNiceMock(OsFamily.class); + final Injector mockInjector = Guice.createInjector(new AbstractModule() { + @Override + protected void configure() { + bind(AmbariMetaInfo.class).toInstance(mockAmbariMetainfo); + bind(StackManagerFactory.class).toInstance(mockStackManagerFactory); + bind(EntityManager.class).toInstance(mockEntityManager); + bind(DBAccessor.class).toInstance(mockDBDbAccessor); + bind(Clusters.class).toInstance(mockClusters); + bind(OsFamily.class).toInstance(mockOSFamily); + } + }); + + Map services = new HashMap<>(); + services.put("HDFS", mockHDFSServiceInfo); + + Map>> configAttributes = new HashMap<>(); + configAttributes.put("core-site", new HashMap>()); + + expect(mockHDFSServiceInfo.getConfigTypeAttributes()).andReturn(configAttributes); + expect(mockAmbariMetainfo.getServices("HDP", "2.2")).andReturn(services); + expect(clusterServicesResultSet.next()).andReturn(true); + expect(clusterServicesResultSet.getString("service_name")).andReturn("OPENSOFT R"); + expect(clusterServicesResultSet.getString("cluster_name")).andReturn("My Cluster"); + expect(serviceConfigResultSet.next()).andReturn(true); + expect(serviceConfigResultSet.getString("service_name")).andReturn("HDFS"); + expect(serviceConfigResultSet.getString("type_name")).andReturn("core-site"); + expect(stackResultSet.next()).andReturn(true); + expect(stackResultSet.getString("stack_name")).andReturn("HDP"); + expect(stackResultSet.getString("stack_version")).andReturn("2.2"); + expect(mockConnection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)).andReturn(mockStatement); + expect(mockStatement.executeQuery("select c.cluster_name, service_name from clusterservices cs " + + "join clusters c on cs.cluster_id=c.cluster_id " + + "where service_name not in (select service_name from serviceconfig sc where sc.cluster_id=cs.cluster_id and sc.service_name=cs.service_name and sc.group_id is null)")).andReturn(clusterServicesResultSet); + expect(mockStatement.executeQuery("select c.cluster_name, sc.service_name, sc.version from serviceconfig sc " + + "join clusters c on sc.cluster_id=c.cluster_id " + + "where service_config_id not in (select service_config_id from serviceconfigmapping) and group_id is null")).andReturn(mockResultSet); + expect(mockStatement.executeQuery("select c.cluster_name, s.stack_name, s.stack_version from clusters c " + + "join stack s on c.desired_stack_id = s.stack_id")).andReturn(stackResultSet); + expect(mockStatement.executeQuery("select c.cluster_name, cs.service_name, cc.type_name, sc.version from clusterservices cs " + + "join serviceconfig sc on cs.service_name=sc.service_name and cs.cluster_id=sc.cluster_id " + + "join serviceconfigmapping scm on sc.service_config_id=scm.service_config_id " + + "join clusterconfig cc on scm.config_id=cc.config_id and sc.cluster_id=cc.cluster_id " + + "join clusters c on cc.cluster_id=c.cluster_id and sc.stack_id=c.desired_stack_id " + + "where sc.group_id is null and sc.service_config_id=(select max(service_config_id) from serviceconfig sc2 where sc2.service_name=sc.service_name and sc2.cluster_id=sc.cluster_id) " + + "group by c.cluster_name, cs.service_name, cc.type_name, sc.version")).andReturn(serviceConfigResultSet); + expect(mockStatement.executeQuery("select c.cluster_name, cs.service_name, cc.type_name from clusterservices cs " + + "join serviceconfig sc on cs.service_name=sc.service_name and cs.cluster_id=sc.cluster_id " + + "join serviceconfigmapping scm on sc.service_config_id=scm.service_config_id " + + "join clusterconfig cc on scm.config_id=cc.config_id and cc.cluster_id=sc.cluster_id " + + "join clusterconfigmapping ccm on cc.type_name=ccm.type_name and cc.version_tag=ccm.version_tag and cc.cluster_id=ccm.cluster_id " + + "join clusters c on ccm.cluster_id=c.cluster_id " + + "where sc.group_id is null and sc.service_config_id = (select max(service_config_id) from serviceconfig sc2 where sc2.service_name=sc.service_name and sc2.cluster_id=sc.cluster_id) " + + "group by c.cluster_name, cs.service_name, cc.type_name " + + "having sum(ccm.selected) < 1")).andReturn(mockResultSet); + + DatabaseConsistencyCheckHelper.setInjector(mockInjector); + DatabaseConsistencyCheckHelper.setConnection(mockConnection); + + easyMockSupport.replayAll(); + + mockAmbariMetainfo.init(); + + DatabaseConsistencyCheckHelper.resetErrorWarningFlags(); + DatabaseConsistencyCheckHelper.checkServiceConfigs(); + + easyMockSupport.verifyAll(); + + Assert.assertTrue("Missing service config for OPENSOFT R should have triggered a warning.", + DatabaseConsistencyCheckHelper.isWarningAvailable()); + Assert.assertFalse("No errors should have been triggered.", DatabaseConsistencyCheckHelper.isErrorAvailable()); + } + }