Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id E6E7E181B7 for ; Wed, 2 Dec 2015 17:15:10 +0000 (UTC) Received: (qmail 86096 invoked by uid 500); 2 Dec 2015 17:15:10 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 86070 invoked by uid 500); 2 Dec 2015 17:15:10 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 86061 invoked by uid 99); 2 Dec 2015 17:15:10 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 02 Dec 2015 17:15:10 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 68501E60D4; Wed, 2 Dec 2015 17:15:10 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit From: dsen@apache.org To: commits@ambari.apache.org Message-Id: <4ac51839ce174e4aa93c23b6e4ae8527@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: ambari git commit: AMBARI-14156 Analyze configs changes done to 500 node cluster for AMS config updates (dsen) Date: Wed, 2 Dec 2015 17:15:10 +0000 (UTC) Repository: ambari Updated Branches: refs/heads/trunk 9f0538e02 -> 4fe479b0f AMBARI-14156 Analyze configs changes done to 500 node cluster for AMS config updates (dsen) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4fe479b0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4fe479b0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4fe479b0 Branch: refs/heads/trunk Commit: 4fe479b0f4ad7bdf97e8609b3a1182ad3c0f57fd Parents: 9f0538e Author: Dmytro Sen Authored: Wed Dec 2 19:14:10 2015 +0200 Committer: Dmytro Sen Committed: Wed Dec 2 19:14:54 2015 +0200 ---------------------------------------------------------------------- .../server/upgrade/UpgradeCatalog213.java | 22 +++++++- .../0.1.0/configuration/ams-hbase-site.xml | 8 +++ .../server/upgrade/UpgradeCatalog213Test.java | 58 +++++++++++++++++++- 3 files changed, 84 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/4fe479b0/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java index 07c56de..0770d86 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java @@ -86,10 +86,14 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog { private static final String TOPOLOGY_CONFIG = "topology"; private static final String KAFKA_BROKER = "kafka-broker"; private static final String KAFKA_ENV_CONFIG = "kafka-env"; - private static final String KAFKA_ENV_CONTENT_KERBEROS_PARAMS = "export KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}"; + private static final String KAFKA_ENV_CONTENT_KERBEROS_PARAMS = + "export KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}"; private static final String AMS_ENV = "ams-env"; private static final String AMS_HBASE_ENV = "ams-hbase-env"; private static final String AMS_SITE = "ams-site"; + private static final String AMS_HBASE_SITE = "ams-hbase-site"; + private static final String AMS_HBASE_SITE_ZK_TIMEOUT_PROPERTY = + "zookeeper.session.timeout.localHBaseCluster"; private static final String HBASE_ENV_CONFIG = "hbase-env"; private static final String FLUME_ENV_CONFIG = "flume-env"; private static final String HIVE_SITE_CONFIG = "hive-site"; @@ -214,9 +218,9 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog { private void executeBlueprintDDLUpdates() throws AmbariException, SQLException { dbAccessor.addColumn(BLUEPRINT_TABLE, new DBAccessor.DBColumnInfo(SECURITY_TYPE_COLUMN, - String.class, 32, "NONE", false)); + String.class, 32, "NONE", false)); dbAccessor.addColumn(BLUEPRINT_TABLE, new DBAccessor.DBColumnInfo(SECURITY_DESCRIPTOR_REF_COLUMN, - String.class, null, null, true)); + String.class, null, null, true)); } /** @@ -1055,6 +1059,18 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog { updateConfigurationPropertiesForCluster(cluster, AMS_SITE, newProperties, true, true); } + + Config amsHbaseSite = cluster.getDesiredConfigByType(AMS_HBASE_SITE); + if (amsHbaseSite != null) { + Map amsHbaseSiteProperties = amsHbaseSite.getProperties(); + String zkTimeout = amsHbaseSiteProperties.get(AMS_HBASE_SITE_ZK_TIMEOUT_PROPERTY); + // if old default, set new default + if ("20000".equals(zkTimeout)) { + Map newProperties = new HashMap<>(); + newProperties.put(AMS_HBASE_SITE_ZK_TIMEOUT_PROPERTY, "120000"); + updateConfigurationPropertiesForCluster(cluster, AMS_HBASE_SITE, newProperties, true, true); + } + } } } } http://git-wip-us.apache.org/repos/asf/ambari/blob/4fe479b0/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml index 33ac1b7..3e90617 100644 --- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml @@ -190,6 +190,14 @@ + hbase.hregion.max.filesize + 4294967296 + + Maximum HFile size. If the sum of the sizes of a region’s HFiles has grown + to exceed this value, the region is split in two. Default is 10Gb. + + + hbase.hregion.memstore.block.multiplier 4 http://git-wip-us.apache.org/repos/asf/ambari/blob/4fe479b0/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java index 068ffe6..5164f8f 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java @@ -693,6 +693,62 @@ public class UpgradeCatalog213Test { } @Test + public void testAmsHbaseSiteUpdateConfigs() throws Exception{ + + Map oldPropertiesAmsHbaseSite = new HashMap() { + { + //Including only those properties that might be present in an older version. + put("zookeeper.session.timeout.localHBaseCluster", String.valueOf(20000)); + } + }; + Map newPropertiesAmsSite = new HashMap() { + { + put("zookeeper.session.timeout.localHBaseCluster", String.valueOf(120000)); + } + }; + EasyMockSupport easyMockSupport = new EasyMockSupport(); + + Clusters clusters = easyMockSupport.createNiceMock(Clusters.class); + final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class); + Config mockAmsHbaseSite = easyMockSupport.createNiceMock(Config.class); + + expect(clusters.getClusters()).andReturn(new HashMap() {{ + put("normal", cluster); + }}).once(); + expect(cluster.getDesiredConfigByType("ams-hbase-site")).andReturn(mockAmsHbaseSite).atLeastOnce(); + expect(mockAmsHbaseSite.getProperties()).andReturn(oldPropertiesAmsHbaseSite).atLeastOnce(); + + Injector injector = easyMockSupport.createNiceMock(Injector.class); + expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes(); + expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes(); + expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class)).anyTimes(); + + replay(injector, clusters, mockAmsHbaseSite, cluster); + + AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class) + .addMockedMethod("createConfiguration") + .addMockedMethod("getClusters", new Class[] { }) + .withConstructor(createNiceMock(ActionManager.class), clusters, injector) + .createNiceMock(); + + Injector injector2 = easyMockSupport.createNiceMock(Injector.class); + Capture configurationRequestCapture = EasyMock.newCapture(); + ConfigurationResponse configurationResponseMock = easyMockSupport.createMock(ConfigurationResponse.class); + + expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes(); + expect(controller.getClusters()).andReturn(clusters).anyTimes(); + expect(controller.createConfiguration(capture(configurationRequestCapture))).andReturn(configurationResponseMock).once(); + + replay(controller, injector2, configurationResponseMock); + new UpgradeCatalog213(injector2).updateAMSConfigs(); + easyMockSupport.verifyAll(); + + ConfigurationRequest configurationRequest = configurationRequestCapture.getValue(); + Map updatedProperties = configurationRequest.getProperties(); + assertTrue(Maps.difference(newPropertiesAmsSite, updatedProperties).areEqual()); + } + + @Test public void testUpdateAlertDefinitions() { EasyMockSupport easyMockSupport = new EasyMockSupport(); UpgradeCatalog213 upgradeCatalog213 = new UpgradeCatalog213(injector); @@ -835,7 +891,7 @@ public class UpgradeCatalog213Test { Map.class, boolean.class, boolean.class) .createMock(); upgradeCatalog213.updateConfigurationPropertiesForCluster(mockClusterExpected, - "kafka-env", updates, true, false); + "kafka-env", updates, true, false); expectLastCall().once(); expect(mockAmbariManagementController.createConfiguration(EasyMock.anyObject())).andReturn(mockConfigurationResponse);