ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From d...@apache.org
Subject ambari git commit: AMBARI-17905 HBase coprocessor classes related properties in hbase-site are empty after Ambari upgrade from 2.1.2 to 2.4.0 (dsen)
Date Wed, 27 Jul 2016 12:04:30 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 97cc175d4 -> 7defbd69f


AMBARI-17905 HBase coprocessor classes related properties in hbase-site are empty after Ambari
upgrade from 2.1.2 to 2.4.0 (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7defbd69
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7defbd69
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7defbd69

Branch: refs/heads/branch-2.4
Commit: 7defbd69fdc7721f5f6931662ce318dd97e0bda1
Parents: 97cc175
Author: Dmytro Sen <dsen@apache.org>
Authored: Wed Jul 27 15:03:40 2016 +0300
Committer: Dmytro Sen <dsen@apache.org>
Committed: Wed Jul 27 15:04:23 2016 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog240.java       | 110 ++++++++++++++-----
 .../server/upgrade/UpgradeCatalog240Test.java   |  70 +++++++++++-
 2 files changed, 148 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7defbd69/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 2339095..84b8817 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -2897,45 +2897,99 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
           StackId stackId = cluster.getCurrentStackVersion();
 
           // HBase is installed and Kerberos is enabled
-          if (installedServices.contains("HBASE") && SecurityType.KERBEROS == cluster.getSecurityType()
&& isAtLeastHdp25(stackId)) {
+          if (installedServices.contains("HBASE") && SecurityType.KERBEROS == cluster.getSecurityType())
{
             Config hbaseSite = cluster.getDesiredConfigByType(HBASE_SITE_CONFIG);
+
             if (null != hbaseSite) {
               Map<String, String> hbaseSiteProperties = hbaseSite.getProperties();
-              // Get any existing config properties (they probably don't exist)
-              String principal = hbaseSiteProperties.get(HBASE_SPNEGO_PRINCIPAL_KEY);
-              String keytab = hbaseSiteProperties.get(HBASE_SPNEGO_KEYTAB_KEY);
 
-              final Map<String, String> updatedKerberosProperties = new HashMap<>();
+              // update classes based on krb/ranger availability
+              boolean enableRangerHbase = false;
+              boolean xmlConfigurationsSupported = false;
 
-              // Set the principal for SPNEGO if it's not already set
-              if (null == principal) {
-                final KerberosDescriptor defaultDescriptor = getKerberosDescriptor(cluster);
-                final KerberosIdentityDescriptor spnegoDescriptor = defaultDescriptor.getIdentity("spnego");
-                if (null != spnegoDescriptor) {
-                  // Add the SPNEGO config for the principal
-                  KerberosPrincipalDescriptor principalDescriptor = spnegoDescriptor.getPrincipalDescriptor();
-                  if (null != principalDescriptor) {
-                    updatedKerberosProperties.put(HBASE_SPNEGO_PRINCIPAL_KEY, principalDescriptor.getValue());
-                  }
+              Config rangerHbasePluginProperties = cluster.getDesiredConfigByType("ranger-hbase-plugin-properties");
+              if (rangerHbasePluginProperties != null && rangerHbasePluginProperties.getProperties().containsKey("ranger-hbase-plugin-enabled"))
{
+                enableRangerHbase = rangerHbasePluginProperties.getProperties().get("ranger-hbase-plugin-enabled").toLowerCase()
== "yes";
+              }
+              Config rangerEnv = cluster.getDesiredConfigByType("ranger-env");
+              if (rangerEnv != null && rangerEnv.getProperties().containsKey("xml_configurations_supported"))
{
+                xmlConfigurationsSupported = Boolean.parseBoolean(rangerEnv.getProperties().get("xml_configurations_supported"));
+              }
+
+              final Map<String, String> updatedHbaseProperties = new HashMap<>();
+
+              if (hbaseSiteProperties.containsKey("hbase.coprocessor.master.classes") &&
+                  hbaseSiteProperties.get("hbase.coprocessor.master.classes").equals("{{hbase_coprocessor_master_classes}}"))
{
+                if (!enableRangerHbase) {
+                  updatedHbaseProperties.put("hbase.coprocessor.master.classes", "org.apache.hadoop.hbase.security.access.AccessController");
+                } else if (xmlConfigurationsSupported) {
+                  updatedHbaseProperties.put("hbase.coprocessor.master.classes", "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor");
+                } else {
+                  updatedHbaseProperties.put("hbase.coprocessor.master.classes", "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor");
                 }
               }
 
-              // Set the keytab for SPNEGO if it's not already set
-              if (null == keytab) {
-                final KerberosDescriptor defaultDescriptor = getKerberosDescriptor(cluster);
-                final KerberosIdentityDescriptor spnegoDescriptor = defaultDescriptor.getIdentity("spnego");
-                if (null != spnegoDescriptor) {
-                  // Add the SPNEGO config for the keytab
-                  KerberosKeytabDescriptor keytabDescriptor = spnegoDescriptor.getKeytabDescriptor();
-                  if (null != keytabDescriptor) {
-                    updatedKerberosProperties.put(HBASE_SPNEGO_KEYTAB_KEY, keytabDescriptor.getFile());
-                  }
+              if (hbaseSiteProperties.containsKey("hbase.coprocessor.regionserver.classes")
&&
+                  hbaseSiteProperties.get("hbase.coprocessor.regionserver.classes").equals("{{hbase_coprocessor_regionserver_classes}}"))
{
+                if (!enableRangerHbase) {
+                  updatedHbaseProperties.put("hbase.coprocessor.regionserver.classes", "org.apache.hadoop.hbase.security.access.AccessController");
+                } else if (xmlConfigurationsSupported) {
+                  updatedHbaseProperties.put("hbase.coprocessor.regionserver.classes", "org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor");
+                } else {
+                  updatedHbaseProperties.put("hbase.coprocessor.regionserver.classes", "com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor");
                 }
               }
 
-              // Update the configuration if we changed anything
-              if (!updatedKerberosProperties.isEmpty()) {
-                updateConfigurationProperties(HBASE_SITE_CONFIG, updatedKerberosProperties,
true, false);
+              if (hbaseSiteProperties.containsKey("hbase.coprocessor.region.classes") &&
+                  hbaseSiteProperties.get("hbase.coprocessor.region.classes").equals("{{hbase_coprocessor_region_classes}}"))
{
+                if (!enableRangerHbase) {
+                  updatedHbaseProperties.put("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.hadoop.hbase.security.access.AccessController");
+                } else if (xmlConfigurationsSupported) {
+                  updatedHbaseProperties.put("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor");
+                } else {
+                  updatedHbaseProperties.put("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,com.xasecure.authorization.hbase.XaSecureAuthorizationCoprocessor");
+                }
+              }
+              updateConfigurationProperties(HBASE_SITE_CONFIG, updatedHbaseProperties, true,
false);
+
+
+              if (isAtLeastHdp25(stackId)) {
+                // Get any existing config properties (they probably don't exist)
+                String principal = hbaseSiteProperties.get(HBASE_SPNEGO_PRINCIPAL_KEY);
+                String keytab = hbaseSiteProperties.get(HBASE_SPNEGO_KEYTAB_KEY);
+
+                final Map<String, String> updatedKerberosProperties = new HashMap<>();
+
+                // Set the principal for SPNEGO if it's not already set
+                if (null == principal) {
+                  final KerberosDescriptor defaultDescriptor = getKerberosDescriptor(cluster);
+                  final KerberosIdentityDescriptor spnegoDescriptor = defaultDescriptor.getIdentity("spnego");
+                  if (null != spnegoDescriptor) {
+                    // Add the SPNEGO config for the principal
+                    KerberosPrincipalDescriptor principalDescriptor = spnegoDescriptor.getPrincipalDescriptor();
+                    if (null != principalDescriptor) {
+                      updatedKerberosProperties.put(HBASE_SPNEGO_PRINCIPAL_KEY, principalDescriptor.getValue());
+                    }
+                  }
+                }
+
+                // Set the keytab for SPNEGO if it's not already set
+                if (null == keytab) {
+                  final KerberosDescriptor defaultDescriptor = getKerberosDescriptor(cluster);
+                  final KerberosIdentityDescriptor spnegoDescriptor = defaultDescriptor.getIdentity("spnego");
+                  if (null != spnegoDescriptor) {
+                    // Add the SPNEGO config for the keytab
+                    KerberosKeytabDescriptor keytabDescriptor = spnegoDescriptor.getKeytabDescriptor();
+                    if (null != keytabDescriptor) {
+                      updatedKerberosProperties.put(HBASE_SPNEGO_KEYTAB_KEY, keytabDescriptor.getFile());
+                    }
+                  }
+                }
+
+                // Update the configuration if we changed anything
+                if (!updatedKerberosProperties.isEmpty()) {
+                  updateConfigurationProperties(HBASE_SITE_CONFIG, updatedKerberosProperties,
true, false);
+                }
               }
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7defbd69/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index d14f6a0..d653907 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -119,10 +119,7 @@ import org.easymock.Capture;
 import org.easymock.CaptureType;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockSupport;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.*;
 import org.junit.rules.TemporaryFolder;
 
 import com.google.common.collect.Maps;
@@ -818,6 +815,71 @@ public class UpgradeCatalog240Test {
   }
 
   @Test
+  public void testUpdateHbaseConfigs() throws Exception{
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(
+        AmbariManagementController.class);
+    final ConfigHelper mockConfigHelper = easyMockSupport.createMock(ConfigHelper.class);
+
+    final Clusters mockClusters = easyMockSupport.createStrictMock(Clusters.class);
+    final Cluster mockCluster = easyMockSupport.createNiceMock(Cluster.class);
+
+    final Injector mockInjector = Guice.createInjector(new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(AmbariManagementController.class).toInstance(mockAmbariManagementController);
+        binder.bind(ConfigHelper.class).toInstance(mockConfigHelper);
+        binder.bind(Clusters.class).toInstance(mockClusters);
+        binder.bind(EntityManager.class).toInstance(entityManager);
+        binder.bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+        binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+        binder.bind(PasswordEncoder.class).toInstance(createNiceMock(PasswordEncoder.class));
+      }
+    });
+
+    expect(mockAmbariManagementController.getClusters()).andReturn(mockClusters).anyTimes();
+    expect(mockClusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", mockCluster);
+    }}).anyTimes();
+
+    expect(mockCluster.getServices()).andReturn(new HashMap<String, Service>(){{put("HBASE",null);}}).anyTimes();
+    expect(mockCluster.getSecurityType()).andReturn(SecurityType.KERBEROS).anyTimes();
+
+    final Config mockHbaseSiteConfigs = easyMockSupport.createNiceMock(Config.class);
+    expect(mockCluster.getDesiredConfigByType("hbase-site")).andReturn(mockHbaseSiteConfigs).atLeastOnce();
+    expect(mockHbaseSiteConfigs.getProperties()).andReturn(new HashMap<String, String>(){{
+      put("hbase.coprocessor.regionserver.classes","{{hbase_coprocessor_regionserver_classes}}");
+      put("hbase.coprocessor.region.classes","{{hbase_coprocessor_region_classes}}");
+    }}).anyTimes();
+
+    final Config mockRangerHbaseConfigs = easyMockSupport.createNiceMock(Config.class);
+    expect(mockCluster.getDesiredConfigByType("ranger-hbase-plugin-properties")).andReturn(mockRangerHbaseConfigs).atLeastOnce();
+    expect(mockRangerHbaseConfigs.getProperties()).andReturn(new HashMap<String, String>(){{
+      put("ranger-hbase-plugin-enabled", "yes");
+    }}).anyTimes();
+
+    final Config mockRangerEnvConfigs = easyMockSupport.createNiceMock(Config.class);
+    expect(mockCluster.getDesiredConfigByType("ranger-env")).andReturn(mockRangerEnvConfigs).atLeastOnce();
+    expect(mockRangerEnvConfigs.getProperties()).andReturn(new HashMap<String, String>(){{
+      put("xml_configurations_supported", "true");
+    }}).anyTimes();
+
+
+
+
+    Capture<Map<String, String>> hbaseCapture =  newCapture();
+    expect(mockAmbariManagementController.createConfig(eq(mockCluster), eq("hbase-site"),
+        capture(hbaseCapture), anyString(), (Map<String, Map<String, String>>)anyObject())).andReturn(null).once();
+
+    easyMockSupport.replayAll();
+    mockInjector.getInstance(UpgradeCatalog240.class).updateHBaseConfigs();
+    easyMockSupport.verifyAll();
+
+    assertEquals("org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor",
hbaseCapture.getValue().get("hbase.coprocessor.regionserver.classes"));
+    assertEquals("org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint,org.apache.ranger.authorization.hbase.RangerAuthorizationCoprocessor",
hbaseCapture.getValue().get("hbase.coprocessor.region.classes"));
+  }
+
+  @Test
   public void test_addParam_ParamsNotAvailable() {
 
     UpgradeCatalog240 upgradeCatalog240 = new UpgradeCatalog240(injector);


Mime
View raw message