ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-16796 Required empty config is present after Upgrade from 2.1.1 to 2.4.0.0 [HDFS: keyserver_port] (dsen)
Date Fri, 20 May 2016 17:22:33 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 776c63b69 -> f1acbe381


AMBARI-16796 Required empty config is present after Upgrade from 2.1.1 to 2.4.0.0 [HDFS: keyserver_port]
(dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f1acbe38
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f1acbe38
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f1acbe38

Branch: refs/heads/trunk
Commit: f1acbe38110dd0d236db14b729830a06d4fd2738
Parents: 776c63b
Author: Dmytro Sen <dsen@apache.org>
Authored: Fri May 20 20:22:23 2016 +0300
Committer: Dmytro Sen <dsen@apache.org>
Committed: Fri May 20 20:22:23 2016 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog240.java       |  7 +++++
 .../server/upgrade/UpgradeCatalog240Test.java   | 29 ++++++++++++++++----
 2 files changed, 31 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f1acbe38/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 729b577..fc72950 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -1573,6 +1573,13 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
                 updateConfigurationProperties("hdfs-site", Collections.singletonMap("dfs.internal.nameservices",
nameservices), true, false);
               }
             }
+            Config hadoopEnv = cluster.getDesiredConfigByType("hadoop-env");
+            if (hadoopEnv != null) {
+              String keyServerPort = hadoopEnv.getProperties().get("keyserver_port");
+              if (null != keyServerPort && " ".equals(keyServerPort)) {
+                updateConfigurationProperties("hadoop-env", Collections.singletonMap("keyserver_port",
""), true, false);
+              }
+            }
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f1acbe38/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index 20fa50f..479a24c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -693,6 +693,16 @@ public class UpgradeCatalog240Test {
         put("dfs.internal.nameservices", "nnha");
       }
     };
+    Map<String, String> oldPropertiesHadoopEnv = new HashMap<String, String>()
{
+      {
+        put("keyserver_port", " ");
+      }
+    };
+    Map<String, String> newPropertiesHadoopEnv = new HashMap<String, String>()
{
+      {
+        put("keyserver_port", "");
+      }
+    };
     EasyMockSupport easyMockSupport = new EasyMockSupport();
 
     Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
@@ -700,12 +710,15 @@ public class UpgradeCatalog240Test {
     final Service service = createStrictMock(Service.class);
     final Map<String, Service> services = Collections.singletonMap("HDFS", service);
     Config mockHdfsSite = easyMockSupport.createNiceMock(Config.class);
+    Config mockHadoopEnv = easyMockSupport.createNiceMock(Config.class);
 
     expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
       put("normal", cluster);
     }}).anyTimes();
     expect(cluster.getDesiredConfigByType("hdfs-site")).andReturn(mockHdfsSite).atLeastOnce();
+    expect(cluster.getDesiredConfigByType("hadoop-env")).andReturn(mockHadoopEnv).atLeastOnce();
     expect(mockHdfsSite.getProperties()).andReturn(oldPropertiesHdfsSite).anyTimes();
+    expect(mockHadoopEnv.getProperties()).andReturn(oldPropertiesHadoopEnv).anyTimes();
     expect(cluster.getServices()).andReturn(services).once();
 
     Injector injector = easyMockSupport.createNiceMock(Injector.class);
@@ -713,7 +726,7 @@ public class UpgradeCatalog240Test {
     expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
     expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class)).anyTimes();
 
-    replay(injector, clusters, mockHdfsSite, cluster);
+    replay(injector, clusters, mockHdfsSite, mockHadoopEnv, cluster);
 
     AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
         .addMockedMethod("createConfiguration")
@@ -723,19 +736,25 @@ public class UpgradeCatalog240Test {
         .createNiceMock();
 
     Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
-    Capture<Map> propertiesCapture = EasyMock.newCapture();
+    Capture<Map> propertiesCaptureHdfsSite = EasyMock.newCapture();
+    Capture<Map> propertiesCaptureHadoopEnv = EasyMock.newCapture();
 
     expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
     expect(controller.getClusters()).andReturn(clusters).anyTimes();
-    expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(propertiesCapture),
anyString(),
+    expect(controller.createConfig(anyObject(Cluster.class), eq("hdfs-site"), capture(propertiesCaptureHdfsSite),
anyString(),
                                    anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+    expect(controller.createConfig(anyObject(Cluster.class), eq("hadoop-env"), capture(propertiesCaptureHadoopEnv),
anyString(),
+        anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
 
     replay(controller, injector2);
     new UpgradeCatalog240(injector2).updateHDFSConfigs();
     easyMockSupport.verifyAll();
 
-    Map<String, String> updatedProperties = propertiesCapture.getValue();
-    assertTrue(Maps.difference(newPropertiesHdfsSite, updatedProperties).areEqual());
+    Map<String, String> updatedPropertiesHdfsSite = propertiesCaptureHdfsSite.getValue();
+    assertTrue(Maps.difference(newPropertiesHdfsSite, updatedPropertiesHdfsSite).areEqual());
+
+    Map<String, String> updatedPropertiesHadoopEnv = propertiesCaptureHadoopEnv.getValue();
+    assertTrue(Maps.difference(newPropertiesHadoopEnv, updatedPropertiesHadoopEnv).areEqual());
   }
 
   @Test


Mime
View raw message