ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dmitriu...@apache.org
Subject ambari git commit: AMBARI-15968. HBase masters go offline after kerberization on 2.2.9.0 (dgrinenko via dlysnichenko)
Date Tue, 19 Apr 2016 12:33:20 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk c8a2da207 -> 3d2ba98fd


AMBARI-15968. HBase masters go offline after kerberization on 2.2.9.0 (dgrinenko via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3d2ba98f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3d2ba98f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3d2ba98f

Branch: refs/heads/trunk
Commit: 3d2ba98fd3cdff011333dafbc8c460552bc20651
Parents: c8a2da2
Author: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Authored: Tue Apr 19 15:33:27 2016 +0300
Committer: Lisnichenko Dmitro <dlysnichenko@hortonworks.com>
Committed: Tue Apr 19 15:33:27 2016 +0300

----------------------------------------------------------------------
 .../AbstractPrepareKerberosServerAction.java    |   4 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  16 --
 ...AbstractPrepareKerberosServerActionTest.java | 181 +++++++++++++++++++
 3 files changed, 182 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2ba98f/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
index 793ff6b..0dbd357 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java
@@ -101,7 +101,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
       }
 
       try {
-        Set<String> services = new HashSet<String>();
+        Set<String> services = cluster.getServices().keySet();
         Map<String, Set<String>> propertiesToIgnore = null;
 
         try {
@@ -141,8 +141,6 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer
                 propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore);
               }
             }
-
-            services.add(serviceName);
           }
 
           // Add ambari-server principal (and keytab) only if 'kerberos-env.create_ambari_principal
= true'

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2ba98f/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index ff9b638..621feb6 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -347,19 +347,3 @@ if has_ranger_admin:
   #For SQLA explicitly disable audit to DB for Ranger
   if xa_audit_db_flavor == 'sqla':
     xa_audit_db_is_enabled = False
-
-# Used to dynamically set the hbase-site props that are referenced during Kerberization
-if security_enabled:
-  if not enable_ranger_hbase: # Default props, no ranger plugin
-    hbase_coprocessor_master_classes = "org.apache.hadoop.hbase.security.access.AccessController"
-    hbase_coprocessor_regionserver_classes = "org.apache.hadoop.hbase.security.access.AccessController"
-    hbase_coprocessor_region_classes = "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController"
-  elif xml_configurations_supported:  # stack version  ranger plugin enabled
-    hbase_coprocessor_master_classes = "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor
"
-    hbase_coprocessor_regionserver_classes = "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"
-    hbase_coprocessor_region_classes = "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor"
-  else:
-    hbase_coprocessor_master_classes = "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor"
-    hbase_coprocessor_regionserver_classes = "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor"
-    hbase_coprocessor_region_classes = "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor"
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2ba98f/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
new file mode 100644
index 0000000..99076bf
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.serveraction.kerberos;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.audit.AuditLogger;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+public class AbstractPrepareKerberosServerActionTest {
+  private class PrepareKerberosServerAction extends AbstractPrepareKerberosServerAction{
+
+    @Override
+    public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext)
throws AmbariException, InterruptedException {
+      return null;
+    }
+  }
+
+  private Injector injector;
+  private final PrepareKerberosServerAction prepareKerberosServerAction = new PrepareKerberosServerAction();
+
+  private final AuditLogger auditLogger = EasyMock.createNiceMock(AuditLogger.class);
+  private final Clusters clusters = EasyMock.createNiceMock(Clusters.class);
+  private final KerberosHelper kerberosHelper = EasyMock.createNiceMock(KerberosHelper.class);
+  private final KerberosIdentityDataFileWriterFactory kerberosIdentityDataFileWriterFactory
= EasyMock.createNiceMock(KerberosIdentityDataFileWriterFactory.class);
+
+  @Before
+  public void setUp() throws Exception {
+    injector = Guice.createInjector(new AbstractModule() {
+      @Override
+      protected void configure() {
+        bind(KerberosHelper.class).toInstance(kerberosHelper);
+        bind(KerberosIdentityDataFileWriterFactory.class).toInstance(kerberosIdentityDataFileWriterFactory);
+        bind(Clusters.class).toInstance(clusters);
+        bind(AuditLogger.class).toInstance(auditLogger);
+      }
+    });
+
+    injector.injectMembers(prepareKerberosServerAction);
+  }
+
+  /**
+   * Test checks that {@code KerberosHelper.applyStackAdvisorUpdates} would be called with
+   * full list of the services and not only list of services with KerberosDescriptior.
+   * In this test HDFS service will have KerberosDescriptor, while Zookeeper not.
+   * @throws Exception
+   */
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testProcessServiceComponentHosts() throws Exception {
+    final Cluster cluster =  EasyMock.createNiceMock(Cluster.class);
+    final KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter = EasyMock.createNiceMock(KerberosIdentityDataFileWriter.class);
+    final KerberosDescriptor kerberosDescriptor = EasyMock.createNiceMock(KerberosDescriptor.class);
+    final ServiceComponentHost serviceComponentHostHDFS = EasyMock.createNiceMock(ServiceComponentHost.class);
+    final ServiceComponentHost serviceComponentHostZK = EasyMock.createNiceMock(ServiceComponentHost.class);
+    final KerberosServiceDescriptor serviceDescriptor = EasyMock.createNiceMock(KerberosServiceDescriptor.class);
+    final KerberosComponentDescriptor componentDescriptor = EasyMock.createNiceMock(KerberosComponentDescriptor.class);
+
+    final String hdfsService = "HDFS";
+    final String zookeeperService = "ZOOKEEPER";
+    final String hostName = "host1";
+    final String hdfsComponent = "DATANODE";
+    final String zkComponent = "ZK";
+
+    Collection<String> identityFilter = new ArrayList<>();
+    Map<String, Map<String, String>> kerberosConfigurations = new HashMap<>();
+    Map<String, Map<String, String>> propertiesToInsert = new HashMap<>();
+    Map<String, Set<String>> propertiesToRemove = new HashMap<>();
+    Map<String, String> descriptorProperties = new HashMap<>();
+    Map<String, Map<String, String>> configurations = new HashMap<>();
+
+    List<ServiceComponentHost> serviceComponentHosts = new ArrayList<ServiceComponentHost>()
{{
+      add(serviceComponentHostHDFS);
+      add(serviceComponentHostZK);
+    }};
+    Map<String, Service> clusterServices = new HashMap<String, Service>(){{
+      put(hdfsService, null);
+      put(zookeeperService, null);
+    }};
+
+    Capture<Set<String>> serviceCapture = Capture.newInstance(CaptureType.LAST);
+
+    expect(kerberosDescriptor.getProperties()).andReturn(descriptorProperties).atLeastOnce();
+    expect(kerberosHelper.calculateConfigurations((Cluster)anyObject(), anyString(), (Map<String,String>)anyObject())).andReturn(configurations).atLeastOnce();
+    expect(kerberosIdentityDataFileWriterFactory.createKerberosIdentityDataFileWriter((File)anyObject())).andReturn(kerberosIdentityDataFileWriter);
+    // it's important to pass a copy of clusterServices
+    expect(cluster.getServices()).andReturn(new HashMap<>(clusterServices)).atLeastOnce();
+
+    expect(serviceComponentHostHDFS.getHostName()).andReturn(hostName).atLeastOnce();
+    expect(serviceComponentHostHDFS.getServiceName()).andReturn(hdfsService).atLeastOnce();
+    expect(serviceComponentHostHDFS.getServiceComponentName()).andReturn(hdfsComponent).atLeastOnce();
+
+    expect(serviceComponentHostZK.getHostName()).andReturn(hostName).atLeastOnce();
+    expect(serviceComponentHostZK.getServiceName()).andReturn(zookeeperService).atLeastOnce();
+    expect(serviceComponentHostZK.getServiceComponentName()).andReturn(zkComponent).atLeastOnce();
+
+    expect(kerberosDescriptor.getService(hdfsService)).andReturn(serviceDescriptor).once();
+
+    expect(serviceDescriptor.getComponent(hdfsComponent)).andReturn(componentDescriptor).once();
+    expect(componentDescriptor.getConfigurations(anyBoolean())).andReturn(null);
+
+    expect(kerberosHelper.applyStackAdvisorUpdates(
+      (Cluster)anyObject(),
+      capture(serviceCapture),
+      (Map<String, Map<String, String>>)anyObject(),
+      (Map<String, Map<String, String>>)anyObject(),
+      (Map<String, Set<String>>)anyObject(),
+      (Map<String, Map<String, String>>)anyObject(),
+      (Map<String, Set<String>>)anyObject(),
+      anyBoolean())).andReturn(null).atLeastOnce();
+
+    replay(kerberosDescriptor, kerberosHelper, kerberosIdentityDataFileWriterFactory,
+      cluster, serviceComponentHostHDFS, serviceComponentHostZK, serviceDescriptor, componentDescriptor);
+
+    prepareKerberosServerAction.processServiceComponentHosts(cluster,
+      kerberosDescriptor,
+      serviceComponentHosts,
+      identityFilter,
+      "",
+      kerberosConfigurations,
+      propertiesToInsert,
+      propertiesToRemove,
+      false, false);
+
+    verify(kerberosHelper);
+
+    Set<String> resultServices = serviceCapture.getValue();
+    Assert.assertEquals(clusterServices.keySet(), resultServices);
+  }
+
+}


Mime
View raw message