ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From smoha...@apache.org
Subject ambari git commit: AMBARI-12182. Ambari did not change Phoenix configuration during RU from HDP 2.2 to 2.3 - II (smohanty)
Date Wed, 01 Jul 2015 00:11:24 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk cde963572 -> 7f4b94d99


AMBARI-12182. Ambari did not change Phoenix configuration during RU from HDP 2.2 to 2.3 -
II (smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7f4b94d9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7f4b94d9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7f4b94d9

Branch: refs/heads/trunk
Commit: 7f4b94d995e35d582961cdb106c3624420197099
Parents: cde9635
Author: Sumit Mohanty <smohanty@hortonworks.com>
Authored: Tue Jun 30 17:11:14 2015 -0700
Committer: Sumit Mohanty <smohanty@hortonworks.com>
Committed: Tue Jun 30 17:11:14 2015 -0700

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog210.java       | 28 +++++++++-
 .../server/upgrade/UpgradeCatalog210Test.java   | 56 +++++++++++++++++++-
 2 files changed, 81 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7f4b94d9/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index cb05c83..e8e0f2d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -1197,6 +1197,7 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
     updateHdfsConfigs();
     updateStormConfigs();
     updateRangerHiveConfigs();
+    updateHBaseConfigs();
   }
 
   protected void updateRangerHiveConfigs() throws AmbariException{
@@ -1319,7 +1320,6 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
 
     if (clusters != null) {
       Map<String, Cluster> clusterMap = clusters.getClusters();
-      Map<String, String> prop = new HashMap<String, String>();
 
       if (clusterMap != null && !clusterMap.isEmpty()) {
         for (final Cluster cluster : clusterMap.values()) {
@@ -1339,6 +1339,32 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
     }
   }
 
+  protected void updateHBaseConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(
+        AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          if (cluster.getDesiredConfigByType("hbase-site") != null) {
+            Map<String, String> hbaseEnvProps = new HashMap<String, String>();
+            Map<String, String> hbaseSiteProps = cluster.getDesiredConfigByType("hbase-site").getProperties();
+
+            if (hbaseSiteProps.containsKey("hbase.region.server.rpc.scheduler.factory.class")
&&
+                "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory".equals(hbaseSiteProps.get(
+                    "hbase.region.server.rpc.scheduler.factory.class"))) {
+              hbaseEnvProps.put("phoenix_sql_enabled", "true");
+            }
+            updateConfigurationPropertiesForCluster(cluster, "hbase-env", hbaseEnvProps,
true, false);
+          }
+        }
+      }
+    }
+  }
+
   protected  void updateStormConfigs() throws  AmbariException {
     AmbariManagementController ambariManagementController = injector.getInstance(
             AmbariManagementController.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/7f4b94d9/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
index ea812ab..0b1d111 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
@@ -230,7 +230,8 @@ public class UpgradeCatalog210Test {
   @Test
   public void testUpdateRangerHiveConfigs() throws Exception{
     EasyMockSupport easyMockSupport = new EasyMockSupport();
-    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(
+        AmbariManagementController.class);
     final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
 
     final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
@@ -319,7 +320,8 @@ public class UpgradeCatalog210Test {
   @Test
   public void testUpdateStormConfiguration() throws Exception {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
-    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(
+        AmbariManagementController.class);
     final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
 
     final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
@@ -359,6 +361,56 @@ public class UpgradeCatalog210Test {
   }
 
   @Test
+  public void testUpdateHBaseConfiguration() throws Exception {
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final AmbariManagementController  mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
+
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+
+    final Config mockHBaseSite = easyMockSupport.createNiceMock(Config.class);
+    final Config mockHBaseEnv = easyMockSupport.createNiceMock(Config.class);
+
+    final Map<String, String> propertiesExpectedHBaseSite = new HashMap<String,
String>();
+    propertiesExpectedHBaseSite.put("hbase.region.server.rpc.scheduler.factory.class",
+                                    "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory");
+    final Map<String, String> propertiesExpectedHBaseEnv = new HashMap<String, String>();
+    propertiesExpectedHBaseEnv.put("phoenix_sql_enabled", "false");
+    final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        bind(ConfigHelper.class).toInstance(mockConfigHelper);
+        bind(Clusters.class).toInstance(mockClusters);
+
+        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+      }
+    });
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).once();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockClusterExpected);
+    }}).once();
+
+    expect(mockClusterExpected.getDesiredConfigByType("hbase-site")).andReturn(mockHBaseSite).atLeastOnce();
+    expect(mockClusterExpected.getDesiredConfigByType("hbase-env")).andReturn(mockHBaseEnv).atLeastOnce();
+    expect(mockHBaseSite.getProperties()).andReturn(propertiesExpectedHBaseSite).anyTimes();
+    expect(mockHBaseEnv.getProperties()).andReturn(propertiesExpectedHBaseEnv).anyTimes();
+
+    Capture<String> configType = new Capture<String>();
+    Capture<String> configTag = new Capture<String>();
+    expect(mockClusterExpected.getConfig(capture(configType), capture(configTag))).
+        andReturn(mockHBaseEnv).times(1);
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog210.class).updateHBaseConfigs();
+    easyMockSupport.verifyAll();
+    assertEquals("hbase-env", configType.getValue());
+  }
+
+  @Test
   public void testDeleteStormRestApiServiceComponent() throws Exception {
     ClusterEntity clusterEntity = upgradeCatalogHelper.createCluster(injector,
       "c1", desiredStackEntity);


Mime
View raw message