ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From abaranc...@apache.org
Subject ambari git commit: AMBARI-12259 - Configs: making strange recommendations + changes on upgrade
Date Thu, 02 Jul 2015 17:00:36 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 80204f8a8 -> 54df4ff58


AMBARI-12259 - Configs: making strange recommendations + changes on upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/54df4ff5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/54df4ff5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/54df4ff5

Branch: refs/heads/branch-2.1
Commit: 54df4ff580c242bad2afc6fa2418388cfaf9a471
Parents: 80204f8
Author: Artem Baranchuk <abaranchuk@hortonworks.con>
Authored: Thu Jul 2 20:00:13 2015 +0300
Committer: Artem Baranchuk <abaranchuk@hortonworks.con>
Committed: Thu Jul 2 20:00:13 2015 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog210.java       | 90 +++++++++++++++++++-
 .../server/upgrade/UpgradeCatalog210Test.java   | 15 +++-
 2 files changed, 99 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/54df4ff5/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index e59687d..b0480fa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -42,6 +42,7 @@ import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
@@ -1314,6 +1315,21 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
     }
   }
 
+  private int getHbaseRamRecomendations(int totalMem) {
+    if (totalMem <= 4) return 1;
+    if (4 < totalMem && totalMem <= 8) return 1;
+    if (8 < totalMem && totalMem <= 16) return 2;
+    if (16 < totalMem && totalMem <= 24) return 4;
+    if (24 < totalMem && totalMem <= 48) return 8;
+    if (48 < totalMem && totalMem <= 64) return 8;
+    if (64 < totalMem && totalMem <= 72) return 8;
+    if (72 < totalMem && totalMem <= 96) return 16;
+    if (96 < totalMem && totalMem <= 128) return 24;
+    if (128 < totalMem && totalMem <= 256) return 32;
+    if (256 < totalMem) return 64;
+    return -1;
+  }
+
   protected void updateHiveConfigs() throws AmbariException {
     AmbariManagementController ambariManagementController = injector.getInstance(
             AmbariManagementController.class);
@@ -1359,14 +1375,80 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
         for (final Cluster cluster : clusterMap.values()) {
           if (cluster.getDesiredConfigByType("hbase-site") != null) {
             Map<String, String> hbaseEnvProps = new HashMap<String, String>();
-            Map<String, String> hbaseSiteProps = cluster.getDesiredConfigByType("hbase-site").getProperties();
+            Map<String, String> hbaseSiteProps = new HashMap<String, String>();
+            Set<String> hbaseEnvRemoveProps = new HashSet<String>();
+            Set<String> hbaseSiteRemoveProps = new HashSet<String>();
 
-            if (hbaseSiteProps.containsKey("hbase.region.server.rpc.scheduler.factory.class")
&&
-                "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory".equals(hbaseSiteProps.get(
-                    "hbase.region.server.rpc.scheduler.factory.class"))) {
+            if (cluster.getDesiredConfigByType("hbase-site").getProperties().containsKey("hbase.region.server.rpc.scheduler.factory.class")
&&
+                "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory".equals(cluster.getDesiredConfigByType("hbase-site").getProperties().get(
+                        "hbase.region.server.rpc.scheduler.factory.class"))) {
               hbaseEnvProps.put("phoenix_sql_enabled", "true");
             }
+
+            if (cluster.getDesiredConfigByType("hbase-env").getProperties().containsKey("phoenix_sql_enabled")
&&
+            "true".equalsIgnoreCase(cluster.getDesiredConfigByType("hbase-env").getProperties().get("phoenix_sql_enabled")))
{
+              hbaseSiteProps.put("hbase.regionserver.wal.codec", "org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec");
+              hbaseSiteProps.put("phoenix.functions.allowUserDefinedFunctions", "true");
+            }
+            else {
+              hbaseSiteProps.put("hbase.regionserver.wal.codec", "org.apache.hadoop.hbase.regionserver.wal.WALCellCodec");
+              hbaseSiteRemoveProps.add("hbase.rpc.controllerfactory.class");
+              hbaseSiteRemoveProps.add("phoenix.functions.allowUserDefinedFunctions");
+            }
+
+            if (cluster.getDesiredConfigByType("hbase-site").getProperties().containsKey("hbase.security.authorization"))
{
+              if("true".equalsIgnoreCase(cluster.getDesiredConfigByType("hbase-site").getProperties().get("hbase.security.authorization")))
{
+                hbaseSiteProps.put("hbase.coprocessor.master.classes", "org.apache.hadoop.hbase.security.access.AccessController");
+                hbaseSiteProps.put("hbase.coprocessor.regionserver.classes", "org.apache.hadoop.hbase.security.access.AccessController");
+              }
+              else {
+                hbaseSiteProps.put("hbase.coprocessor.master.classes", "");
+                hbaseSiteRemoveProps.add("hbase.coprocessor.regionserver.classes");
+              }
+            }
+            else {
+              hbaseSiteRemoveProps.add("hbase.coprocessor.regionserver.classes");
+            }
+
+            int threshold = 23;
+            int totalMem = 0;
+            String hostName = cluster.getHosts("HBASE", "HBASE_MASTER").iterator().next();
+            for (Host host : cluster.getHosts()) {
+              if(host.getHostName().equalsIgnoreCase(hostName)) {
+                totalMem = (int)(host.getTotalMemBytes() / (1024 * 1024));
+                break;
+              }
+            }
+
+            if (totalMem == 0) {
+              LOG.error("UpgradeCatalog210 could not retrieve total memory size from the
hosts.");
+            }
+            else {
+              if (getHbaseRamRecomendations(totalMem) > threshold) {
+                final int mb = 1024;
+                final int block_cache_heap = 8192;
+                final int regionserver_heap_size = 20480;
+                final int reserved_offheap_memory = 2048;
+                final int regionserver_total_ram = getHbaseRamRecomendations(totalMem) *
mb;
+                final int regionserver_max_direct_memory_size = regionserver_total_ram -
regionserver_heap_size;
+                final int bucketcache_offheap_memory = regionserver_max_direct_memory_size
- reserved_offheap_memory;
+
+                hbaseSiteProps.put("hbase.bucketcache.size", block_cache_heap + bucketcache_offheap_memory
+ "m");
+                hbaseSiteProps.put("hbase.bucketcache.ioengine", "offheap");
+                hbaseEnvProps.put("hbase_max_direct_memory_size", String.valueOf(regionserver_max_direct_memory_size));
+              } else {
+                hbaseSiteRemoveProps.add("hbase.bucketcache.ioengine");
+                hbaseSiteRemoveProps.add("hbase.bucketcache.size");
+                hbaseSiteRemoveProps.add("hbase.bucketcache.percentage.in.combinedcache");
+                hbaseSiteRemoveProps.add("hbase_max_direct_memory_size");
+                hbaseEnvRemoveProps.add("hbase_max_direct_memory_size");
+              }
+            }
+
+            updateConfigurationPropertiesForCluster(cluster, "hbase-site", hbaseSiteProps,
true, false);
             updateConfigurationPropertiesForCluster(cluster, "hbase-env", hbaseEnvProps,
true, false);
+            updateConfigurationPropertiesForCluster(cluster, "hbase-site", new HashMap<String,
String>(), hbaseSiteRemoveProps, false, true);
+            updateConfigurationPropertiesForCluster(cluster, "hbase-env", new HashMap<String,
String>(), hbaseEnvRemoveProps, false, true);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/54df4ff5/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
index eb240f8..4f96ce9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
@@ -72,6 +72,7 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostComponentAdminState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -450,6 +451,7 @@ public class UpgradeCatalog210Test {
 
     final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
     final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+    final Host mockHost = easyMockSupport.createNiceMock(Host.class);
 
     final Config mockHBaseSite = easyMockSupport.createNiceMock(Config.class);
     final Config mockHBaseEnv = easyMockSupport.createNiceMock(Config.class);
@@ -457,8 +459,11 @@ public class UpgradeCatalog210Test {
     final Map<String, String> propertiesExpectedHBaseSite = new HashMap<String,
String>();
     propertiesExpectedHBaseSite.put("hbase.region.server.rpc.scheduler.factory.class",
                                     "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory");
+    propertiesExpectedHBaseSite.put("hbase.security.authorization", "true");
+
     final Map<String, String> propertiesExpectedHBaseEnv = new HashMap<String, String>();
     propertiesExpectedHBaseEnv.put("phoenix_sql_enabled", "false");
+
     final Injector mockInjector = Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
@@ -481,15 +486,21 @@ public class UpgradeCatalog210Test {
     expect(mockHBaseSite.getProperties()).andReturn(propertiesExpectedHBaseSite).anyTimes();
     expect(mockHBaseEnv.getProperties()).andReturn(propertiesExpectedHBaseEnv).anyTimes();
 
+    expect(mockClusterExpected.getHosts(anyObject(String.class), anyObject(String.class))).andReturn(new
HashSet<String>() {{
+      add("host_1");
+    }}).atLeastOnce();
+    expect(mockClusterExpected.getHosts()).andReturn(new HashSet<Host>(){{add(mockHost);}}).atLeastOnce();
+    expect(mockHost.getHostName()).andReturn("host_1");
+    expect(mockHost.getTotalMemBytes()).andReturn(16777216L);
+
     Capture<String> configType = new Capture<String>();
     Capture<String> configTag = new Capture<String>();
     expect(mockClusterExpected.getConfig(capture(configType), capture(configTag))).
-        andReturn(mockHBaseEnv).times(1);
+            andReturn(mockHBaseSite).atLeastOnce();
 
     easyMockSupport.replayAll();
     mockInjector.getInstance(UpgradeCatalog210.class).updateHBaseConfigs();
     easyMockSupport.verifyAll();
-    assertEquals("hbase-env", configType.getValue());
   }
 
   @Test


Mime
View raw message