Return-Path: X-Original-To: apmail-ambari-commits-archive@www.apache.org Delivered-To: apmail-ambari-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id C6E5818520 for ; Tue, 19 Apr 2016 12:33:20 +0000 (UTC) Received: (qmail 29873 invoked by uid 500); 19 Apr 2016 12:33:20 -0000 Delivered-To: apmail-ambari-commits-archive@ambari.apache.org Received: (qmail 29841 invoked by uid 500); 19 Apr 2016 12:33:20 -0000 Mailing-List: contact commits-help@ambari.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: ambari-dev@ambari.apache.org Delivered-To: mailing list commits@ambari.apache.org Received: (qmail 29832 invoked by uid 99); 19 Apr 2016 12:33:20 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 19 Apr 2016 12:33:20 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 5A8FEDFE04; Tue, 19 Apr 2016 12:33:20 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: dmitriusan@apache.org To: commits@ambari.apache.org Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: ambari git commit: AMBARI-15968. HBase masters go offline after kerberization on 2.2.9.0 (dgrinenko via dlysnichenko) Date: Tue, 19 Apr 2016 12:33:20 +0000 (UTC) Repository: ambari Updated Branches: refs/heads/trunk c8a2da207 -> 3d2ba98fd AMBARI-15968. HBase masters go offline after kerberization on 2.2.9.0 (dgrinenko via dlysnichenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3d2ba98f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3d2ba98f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3d2ba98f Branch: refs/heads/trunk Commit: 3d2ba98fd3cdff011333dafbc8c460552bc20651 Parents: c8a2da2 Author: Lisnichenko Dmitro Authored: Tue Apr 19 15:33:27 2016 +0300 Committer: Lisnichenko Dmitro Committed: Tue Apr 19 15:33:27 2016 +0300 ---------------------------------------------------------------------- .../AbstractPrepareKerberosServerAction.java | 4 +- .../0.96.0.2.0/package/scripts/params_linux.py | 16 -- ...AbstractPrepareKerberosServerActionTest.java | 181 +++++++++++++++++++ 3 files changed, 182 insertions(+), 19 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2ba98f/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java index 793ff6b..0dbd357 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerAction.java @@ -101,7 +101,7 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer } try { - Set services = new HashSet(); + Set services = cluster.getServices().keySet(); Map> propertiesToIgnore = null; try { @@ -141,8 +141,6 @@ public abstract class AbstractPrepareKerberosServerAction extends KerberosServer propertiesToIgnore = gatherPropertiesToIgnore(componentIdentities, propertiesToIgnore); } } - - services.add(serviceName); } // Add ambari-server principal (and keytab) only if 'kerberos-env.create_ambari_principal = true' http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2ba98f/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py index ff9b638..621feb6 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py @@ -347,19 +347,3 @@ if has_ranger_admin: #For SQLA explicitly disable audit to DB for Ranger if xa_audit_db_flavor == 'sqla': xa_audit_db_is_enabled = False - -# Used to dynamically set the hbase-site props that are referenced during Kerberization -if security_enabled: - if not enable_ranger_hbase: # Default props, no ranger plugin - hbase_coprocessor_master_classes = "org.apache.hadoop.hbase.security.access.AccessController" - hbase_coprocessor_regionserver_classes = "org.apache.hadoop.hbase.security.access.AccessController" - hbase_coprocessor_region_classes = "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController" - elif xml_configurations_supported: # stack version ranger plugin enabled - hbase_coprocessor_master_classes = "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor " - hbase_coprocessor_regionserver_classes = "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor" - hbase_coprocessor_region_classes = "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor" - else: - hbase_coprocessor_master_classes = "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor" - hbase_coprocessor_regionserver_classes = "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor" - hbase_coprocessor_region_classes = "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor" - http://git-wip-us.apache.org/repos/asf/ambari/blob/3d2ba98f/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java new file mode 100644 index 0000000..99076bf --- /dev/null +++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/AbstractPrepareKerberosServerActionTest.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.server.serveraction.kerberos; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentMap; + +import org.apache.ambari.server.AmbariException; +import org.apache.ambari.server.agent.CommandReport; +import org.apache.ambari.server.audit.AuditLogger; +import org.apache.ambari.server.controller.KerberosHelper; +import org.apache.ambari.server.state.Cluster; +import org.apache.ambari.server.state.Clusters; +import org.apache.ambari.server.state.Service; +import org.apache.ambari.server.state.ServiceComponentHost; +import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor; +import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor; +import org.apache.ambari.server.state.kerberos.KerberosDescriptor; +import org.easymock.Capture; +import org.easymock.CaptureType; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import com.google.inject.AbstractModule; +import com.google.inject.Guice; +import com.google.inject.Injector; + +import static org.easymock.EasyMock.anyBoolean; +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.anyString; +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; + +public class AbstractPrepareKerberosServerActionTest { + private class PrepareKerberosServerAction extends AbstractPrepareKerberosServerAction{ + + @Override + public CommandReport execute(ConcurrentMap requestSharedDataContext) throws AmbariException, InterruptedException { + return null; + } + } + + private Injector injector; + private final PrepareKerberosServerAction prepareKerberosServerAction = new PrepareKerberosServerAction(); + + private final AuditLogger auditLogger = EasyMock.createNiceMock(AuditLogger.class); + private final Clusters clusters = EasyMock.createNiceMock(Clusters.class); + private final KerberosHelper kerberosHelper = EasyMock.createNiceMock(KerberosHelper.class); + private final KerberosIdentityDataFileWriterFactory kerberosIdentityDataFileWriterFactory = EasyMock.createNiceMock(KerberosIdentityDataFileWriterFactory.class); + + @Before + public void setUp() throws Exception { + injector = Guice.createInjector(new AbstractModule() { + @Override + protected void configure() { + bind(KerberosHelper.class).toInstance(kerberosHelper); + bind(KerberosIdentityDataFileWriterFactory.class).toInstance(kerberosIdentityDataFileWriterFactory); + bind(Clusters.class).toInstance(clusters); + bind(AuditLogger.class).toInstance(auditLogger); + } + }); + + injector.injectMembers(prepareKerberosServerAction); + } + + /** + * Test checks that {@code KerberosHelper.applyStackAdvisorUpdates} would be called with + * full list of the services and not only list of services with KerberosDescriptior. + * In this test HDFS service will have KerberosDescriptor, while Zookeeper not. + * @throws Exception + */ + @Test + @SuppressWarnings("unchecked") + public void testProcessServiceComponentHosts() throws Exception { + final Cluster cluster = EasyMock.createNiceMock(Cluster.class); + final KerberosIdentityDataFileWriter kerberosIdentityDataFileWriter = EasyMock.createNiceMock(KerberosIdentityDataFileWriter.class); + final KerberosDescriptor kerberosDescriptor = EasyMock.createNiceMock(KerberosDescriptor.class); + final ServiceComponentHost serviceComponentHostHDFS = EasyMock.createNiceMock(ServiceComponentHost.class); + final ServiceComponentHost serviceComponentHostZK = EasyMock.createNiceMock(ServiceComponentHost.class); + final KerberosServiceDescriptor serviceDescriptor = EasyMock.createNiceMock(KerberosServiceDescriptor.class); + final KerberosComponentDescriptor componentDescriptor = EasyMock.createNiceMock(KerberosComponentDescriptor.class); + + final String hdfsService = "HDFS"; + final String zookeeperService = "ZOOKEEPER"; + final String hostName = "host1"; + final String hdfsComponent = "DATANODE"; + final String zkComponent = "ZK"; + + Collection identityFilter = new ArrayList<>(); + Map> kerberosConfigurations = new HashMap<>(); + Map> propertiesToInsert = new HashMap<>(); + Map> propertiesToRemove = new HashMap<>(); + Map descriptorProperties = new HashMap<>(); + Map> configurations = new HashMap<>(); + + List serviceComponentHosts = new ArrayList() {{ + add(serviceComponentHostHDFS); + add(serviceComponentHostZK); + }}; + Map clusterServices = new HashMap(){{ + put(hdfsService, null); + put(zookeeperService, null); + }}; + + Capture> serviceCapture = Capture.newInstance(CaptureType.LAST); + + expect(kerberosDescriptor.getProperties()).andReturn(descriptorProperties).atLeastOnce(); + expect(kerberosHelper.calculateConfigurations((Cluster)anyObject(), anyString(), (Map)anyObject())).andReturn(configurations).atLeastOnce(); + expect(kerberosIdentityDataFileWriterFactory.createKerberosIdentityDataFileWriter((File)anyObject())).andReturn(kerberosIdentityDataFileWriter); + // it's important to pass a copy of clusterServices + expect(cluster.getServices()).andReturn(new HashMap<>(clusterServices)).atLeastOnce(); + + expect(serviceComponentHostHDFS.getHostName()).andReturn(hostName).atLeastOnce(); + expect(serviceComponentHostHDFS.getServiceName()).andReturn(hdfsService).atLeastOnce(); + expect(serviceComponentHostHDFS.getServiceComponentName()).andReturn(hdfsComponent).atLeastOnce(); + + expect(serviceComponentHostZK.getHostName()).andReturn(hostName).atLeastOnce(); + expect(serviceComponentHostZK.getServiceName()).andReturn(zookeeperService).atLeastOnce(); + expect(serviceComponentHostZK.getServiceComponentName()).andReturn(zkComponent).atLeastOnce(); + + expect(kerberosDescriptor.getService(hdfsService)).andReturn(serviceDescriptor).once(); + + expect(serviceDescriptor.getComponent(hdfsComponent)).andReturn(componentDescriptor).once(); + expect(componentDescriptor.getConfigurations(anyBoolean())).andReturn(null); + + expect(kerberosHelper.applyStackAdvisorUpdates( + (Cluster)anyObject(), + capture(serviceCapture), + (Map>)anyObject(), + (Map>)anyObject(), + (Map>)anyObject(), + (Map>)anyObject(), + (Map>)anyObject(), + anyBoolean())).andReturn(null).atLeastOnce(); + + replay(kerberosDescriptor, kerberosHelper, kerberosIdentityDataFileWriterFactory, + cluster, serviceComponentHostHDFS, serviceComponentHostZK, serviceDescriptor, componentDescriptor); + + prepareKerberosServerAction.processServiceComponentHosts(cluster, + kerberosDescriptor, + serviceComponentHosts, + identityFilter, + "", + kerberosConfigurations, + propertiesToInsert, + propertiesToRemove, + false, false); + + verify(kerberosHelper); + + Set resultServices = serviceCapture.getValue(); + Assert.assertEquals(clusterServices.keySet(), resultServices); + } + +}