ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-13813 After kerberization, Kafka brokers fail to start (dsen)
Date Wed, 11 Nov 2015 12:55:30 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 65793f66a -> b83506ed3


AMBARI-13813 After kerberization, Kafka brokers fail to start (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b83506ed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b83506ed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b83506ed

Branch: refs/heads/branch-2.1
Commit: b83506ed33a8cb9726dd56ed345fffd90c3986ad
Parents: 65793f6
Author: Dmytro Sen <dsen@apache.org>
Authored: Wed Nov 11 14:44:36 2015 +0200
Committer: Dmytro Sen <dsen@apache.org>
Committed: Wed Nov 11 14:55:21 2015 +0200

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog213.java       | 18 +++++++++++++-
 .../server/upgrade/UpgradeCatalog213Test.java   | 26 ++++++++++++++++++++
 2 files changed, 43 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b83506ed/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index d8ddb3d..d05a02c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -84,6 +84,8 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
   private static final String STORM_SITE = "storm-site";
   private static final String HDFS_SITE_CONFIG = "hdfs-site";
   private static final String KAFKA_BROKER = "kafka-broker";
+  private static final String KAFKA_ENV_CONFIG = "kafka-env";
+  private static final String KAFKA_ENV_CONTENT_KERBEROS_PARAMS = "export KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}";
   private static final String AMS_ENV = "ams-env";
   private static final String AMS_HBASE_ENV = "ams-hbase-env";
   private static final String AMS_SITE = "ams-site";
@@ -919,7 +921,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
       Map<String, Cluster> clusterMap = clusters.getClusters();
       if (clusterMap != null && !clusterMap.isEmpty()) {
         for (final Cluster cluster : clusterMap.values()) {
-          Set<String> installedServices =cluster.getServices().keySet();
+          Set<String> installedServices = cluster.getServices().keySet();
           Config kafkaBroker = cluster.getDesiredConfigByType(KAFKA_BROKER);
           if (kafkaBroker != null) {
             Map<String, String> newProperties = new HashMap<>();
@@ -941,6 +943,20 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
               updateConfigurationPropertiesForCluster(cluster, KAFKA_BROKER, newProperties,
true, true);
             }
           }
+
+          StackId stackId = cluster.getCurrentStackVersion();
+          if (stackId != null && stackId.getStackName().equals("HDP") &&
+              VersionUtils.compareVersions(stackId.getStackVersion(), "2.3") >= 0) {
+            Config kafkaEnv = cluster.getDesiredConfigByType(KAFKA_ENV_CONFIG);
+            if (kafkaEnv != null) {
+              String kafkaEnvContent = kafkaEnv.getProperties().get(CONTENT_PROPERTY);
+              if (kafkaEnvContent != null && !kafkaEnvContent.contains(KAFKA_ENV_CONTENT_KERBEROS_PARAMS))
{
+                kafkaEnvContent += "\n\nexport KAFKA_KERBEROS_PARAMS=\"$KAFKA_KERBEROS_PARAMS
{{kafka_kerberos_params}}\"";
+                Map<String, String> updates = Collections.singletonMap(CONTENT_PROPERTY,
kafkaEnvContent);
+                updateConfigurationPropertiesForCluster(cluster, KAFKA_ENV_CONFIG, updates,
true, false);
+              }
+            }
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b83506ed/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index 16d38eb..748d7c7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -874,10 +874,19 @@ public class UpgradeCatalog213Test {
   public void testUpdateKafkaConfigs() throws Exception {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
     final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);
+    final ConfigurationResponse mockConfigurationResponse = easyMockSupport.createMock(ConfigurationResponse.class);
     final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
 
     final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
     final Cluster mockClusterExpected = easyMockSupport.createNiceMock(Cluster.class);
+
+    final Map<String, String> propertiesKafkaEnv = new HashMap<String, String>()
{
+      {
+        put("content", "test");
+      }
+    };
+    Map<String, String> updates = Collections.singletonMap("content", "test\n\nexport
KAFKA_KERBEROS_PARAMS=\"$KAFKA_KERBEROS_PARAMS {{kafka_kerberos_params}}");
+
     final Map<String, String> propertiesAmsEnv = new HashMap<String, String>()
{
       {
         put("kafka.metrics.reporters", "{{kafka_metrics_reporters}}");
@@ -891,6 +900,7 @@ public class UpgradeCatalog213Test {
     };
 
     final Config mockAmsEnv = easyMockSupport.createNiceMock(Config.class);
+    final Config mockKafkaEnv = easyMockSupport.createNiceMock(Config.class);
 
     final Injector mockInjector = Guice.createInjector(new AbstractModule() {
       @Override
@@ -914,6 +924,22 @@ public class UpgradeCatalog213Test {
     expect(mockClusterExpected.getDesiredConfigByType("kafka-broker")).andReturn(mockAmsEnv).atLeastOnce();
     expect(mockAmsEnv.getProperties()).andReturn(propertiesAmsEnv).atLeastOnce();
 
+    expect(mockClusterExpected.getCurrentStackVersion()).andReturn(new StackId("HDP", "2.3"));
+    expect(mockClusterExpected.getDesiredConfigByType("kafka-env")).andReturn(mockKafkaEnv).atLeastOnce();
+    expect(mockKafkaEnv.getProperties()).andReturn(propertiesKafkaEnv).atLeastOnce();
+
+    UpgradeCatalog213 upgradeCatalog213 = createMockBuilder(UpgradeCatalog213.class)
+        .withConstructor(Injector.class)
+        .withArgs(mockInjector)
+        .addMockedMethod("updateConfigurationPropertiesForCluster", Cluster.class, String.class,
+            Map.class, boolean.class, boolean.class)
+        .createMock();
+    upgradeCatalog213.updateConfigurationPropertiesForCluster(mockClusterExpected,
+        "kafka-env", updates, true, false);
+    expectLastCall().once();
+
+    expect(mockAmbariManagementController.createConfiguration(EasyMock.<ConfigurationRequest>anyObject())).andReturn(mockConfigurationResponse);
+
     easyMockSupport.replayAll();
     mockInjector.getInstance(UpgradeCatalog213.class).updateKafkaConfigs();
     easyMockSupport.verifyAll();


Mime
View raw message