ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rle...@apache.org
Subject ambari git commit: AMBARI-12356. kinit of hdfs Kerberos identity fails when starting added service(s) after upgrade to Ambari 2.1.0 (rlevas)
Date Mon, 13 Jul 2015 13:35:37 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk a663dff6f -> 8d00616b1


AMBARI-12356. kinit of hdfs Kerberos identity fails when starting added service(s) after upgrade
to Ambari 2.1.0 (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8d00616b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8d00616b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8d00616b

Branch: refs/heads/trunk
Commit: 8d00616b16b19506a5bd10ba9ed2b5787aae978c
Parents: a663dff
Author: Robert Levas <rlevas@hortonworks.com>
Authored: Mon Jul 13 09:33:16 2015 -0400
Committer: Robert Levas <rlevas@hortonworks.com>
Committed: Mon Jul 13 09:34:57 2015 -0400

----------------------------------------------------------------------
 .../internal/ArtifactResourceProvider.java      |  22 +-
 .../ambari/server/orm/dao/ArtifactDAO.java      |  15 +
 .../server/orm/entities/ArtifactEntity.java     |  15 +-
 .../server/upgrade/UpgradeCatalog210.java       | 133 ++++++++
 .../internal/ArtifactResourceProviderTest.java  |   4 +-
 .../server/upgrade/UpgradeCatalog210Test.java   | 140 +++++++-
 .../test_kerberos_descriptor_no_hdfs.json       | 136 ++++++++
 .../test_kerberos_descriptor_simple.json        | 335 +++++++++++++++++++
 8 files changed, 775 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
index 680f9b8..34952f2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ArtifactResourceProvider.java
@@ -365,22 +365,26 @@ public class ArtifactResourceProvider extends AbstractResourceProvider
{
     return new Command<Void>() {
       @Override
       public Void invoke() throws AmbariException {
-        Map<String, Object> keyProperties = new HashMap<String, Object>();
-
         // flatten out key properties as is expected by createForeignKeyMap()
+        Map<String, Object> keyProperties = new HashMap<String, Object>();
         for (Map.Entry<String, Object> entry : resource.getPropertiesMap().get("Artifacts").entrySet())
{
           keyProperties.put(String.format("Artifacts/%s", entry.getKey()), entry.getValue());
         }
 
-        // create entity and set properties
-        final ArtifactEntity entity = new ArtifactEntity();
-        entity.setArtifactName(String.valueOf(resource.getPropertyValue(ARTIFACT_NAME_PROPERTY)));
-        entity.setForeignKeys(createForeignKeyMap(keyProperties));
+        // find entity to remove
+        String artifactName = String.valueOf(resource.getPropertyValue(ARTIFACT_NAME_PROPERTY));
+        TreeMap<String, String> artifactForeignKeys = createForeignKeyMap(keyProperties);
+        ArtifactEntity entity = artifactDAO.findByNameAndForeignKeys(artifactName, artifactForeignKeys);
 
-        LOG.info("Deleting Artifact, name = {}, foreign keys = {}",
-            entity.getArtifactName(), entity.getForeignKeys());
+        if (entity != null) {
+          LOG.info("Deleting Artifact: name = {}, foreign keys = {}",
+              entity.getArtifactName(), entity.getForeignKeys());
+          artifactDAO.remove(entity);
+        } else {
+          LOG.info("Cannot find Artifact to delete, ignoring: name = {}, foreign keys = {}",
+              artifactName, artifactForeignKeys);
+        }
 
-        artifactDAO.remove(entity);
         return null;
       }
     };

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ArtifactDAO.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ArtifactDAO.java
b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ArtifactDAO.java
index 27346dd..7d642c8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ArtifactDAO.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/dao/ArtifactDAO.java
@@ -86,6 +86,21 @@ public class ArtifactDAO {
   }
 
   /**
+   * Find all artifacts for the specified artifact name.
+   *
+   * @param artifactName name of artifact to find
+   * @return all artifacts with the specified artifact name or an empty List
+   */
+  @RequiresSession
+  public List<ArtifactEntity> findByName(String artifactName) {
+    TypedQuery<ArtifactEntity> query = entityManagerProvider.get().
+        createNamedQuery("artifactByName", ArtifactEntity.class);
+    query.setParameter("artifactName", artifactName);
+
+    return query.getResultList();
+  }
+
+  /**
    * Refresh the state of the instance from the database,
    * overwriting changes made to the entity, if any.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
index 849a938..8972e6d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ArtifactEntity.java
@@ -39,11 +39,14 @@ import java.util.TreeMap;
 @IdClass(ArtifactEntityPK.class)
 @Table(name = "artifact")
 @NamedQueries({
-  @NamedQuery(name = "artifactByNameAndForeignKeys",
-    query = "SELECT artifact FROM ArtifactEntity artifact " +
-              "WHERE artifact.artifactName=:artifactName AND artifact.foreignKeys=:foreignKeys"),
-  @NamedQuery(name = "artifactByForeignKeys",
-    query = "SELECT artifact FROM ArtifactEntity artifact " +
+    @NamedQuery(name = "artifactByNameAndForeignKeys",
+        query = "SELECT artifact FROM ArtifactEntity artifact " +
+            "WHERE artifact.artifactName=:artifactName AND artifact.foreignKeys=:foreignKeys"),
+    @NamedQuery(name = "artifactByName",
+        query = "SELECT artifact FROM ArtifactEntity artifact " +
+            "WHERE artifact.artifactName=:artifactName"),
+    @NamedQuery(name = "artifactByForeignKeys",
+        query = "SELECT artifact FROM ArtifactEntity artifact " +
             "WHERE artifact.foreignKeys=:foreignKeys")
 })
 
@@ -58,7 +61,7 @@ public class ArtifactEntity {
   @Basic
   private String foreignKeys;
 
-  @Column(name = "artifact_data", nullable = false, insertable = true, updatable = false)
+  @Column(name = "artifact_data", nullable = false, insertable = true, updatable = true)
   @Basic
   private String artifactData;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index 3d4d701..7967f7f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -46,10 +46,12 @@ import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
 import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity;
 import org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
@@ -61,6 +63,12 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptorFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.apache.commons.lang.StringUtils;
@@ -991,6 +999,131 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog {
     addMissingConfigs();
     updateAlertDefinitions();
     removeStormRestApiServiceComponent();
+    updateKerberosDescriptorArtifacts();
+  }
+
+
+  /**
+   * Update the stored Kerberos Descriptor artifacts to conform to the new structure.
+   * <p/>
+   * Finds the relevant artifact entities and iterates through them to process each independently.
+   */
+  protected void updateKerberosDescriptorArtifacts() throws AmbariException {
+    ArtifactDAO artifactDAO = injector.getInstance(ArtifactDAO.class);
+    List<ArtifactEntity> artifactEntities = artifactDAO.findByName("kerberos_descriptor");
+
+    if (artifactEntities != null) {
+      for (ArtifactEntity artifactEntity : artifactEntities) {
+        updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
+      }
+    }
+  }
+
+  /**
+   * Update the specified Kerberos Descriptor artifact to conform to the new structure.
+   * <p/>
+   * To conform to the new Kerberos Descriptor structure, the global "hdfs" identity (if
exists)
+   * must be moved to the set of identities under the HDFS service.  If no HDFS service exists,
one
+   * is created to hold only the "hdfs" identity descriptor. Then, any identity descriptor
references
+   * to "/hdfs" must be changed to "/HDFS/hdfs" to point to the moved "hdfs" identity descriptor.
+   * <p/>
+   * The supplied ArtifactEntity is updated in place a merged back into the database.
+   *
+   * @param artifactDAO    the ArtifactDAO to use to store the updated ArtifactEntity
+   * @param artifactEntity the ArtifactEntity to update
+   */
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity
artifactEntity) throws AmbariException {
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+
+      if (data != null) {
+        final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(data);
+
+        if (kerberosDescriptor != null) {
+          // Get the global "hdfs" identity (if it exists)
+          KerberosIdentityDescriptor hdfsIdentity = kerberosDescriptor.getIdentity("hdfs");
+
+          if (hdfsIdentity != null) {
+            // Move the "hdfs" global identity to under HDFS service by removing it from
the
+            // collection of global identities and _merging_ it into the identities for the
HDFS
+            // service - creating a sparse HDFS service structure if necessary.
+            KerberosServiceDescriptor hdfsService = kerberosDescriptor.getService("HDFS");
+
+            if (hdfsService == null) {
+              hdfsService = new KerberosServiceDescriptorFactory().createInstance("HDFS",
(Map) null);
+              hdfsService.putIdentity(hdfsIdentity);
+              kerberosDescriptor.putService(hdfsService);
+            } else {
+              KerberosIdentityDescriptor hdfsReferenceIdentity = hdfsService.getIdentity("/hdfs");
+
+              if (hdfsReferenceIdentity != null) {
+                // Merge the changes from the reference identity into the global identity...
+                hdfsIdentity.update(hdfsReferenceIdentity);
+                // Make sure the identity's name didn't change.
+                hdfsIdentity.setName("hdfs");
+
+                hdfsService.removeIdentity("/hdfs");
+              }
+
+              hdfsService.putIdentity(hdfsIdentity);
+            }
+
+            kerberosDescriptor.removeIdentity("hdfs");
+          }
+
+          // Find all identities named "/hdfs" and update the name to "/HDFS/hdfs"
+          updateKerberosDescriptorIdentityReferences(kerberosDescriptor, "/hdfs", "/HDFS/hdfs");
+          updateKerberosDescriptorIdentityReferences(kerberosDescriptor.getServices(), "/hdfs",
"/HDFS/hdfs");
+
+          artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+          artifactDAO.merge(artifactEntity);
+        }
+      }
+    }
+  }
+
+  /**
+   * Iterates through a collection of AbstractKerberosDescriptorContainers to find and update
+   * identity descriptor references.
+   *
+   * @param descriptorMap    a String to AbstractKerberosDescriptorContainer map to iterate
trough
+   * @param referenceName    the reference name to change
+   * @param newReferenceName the new reference name
+   */
+  private void updateKerberosDescriptorIdentityReferences(Map<String, ? extends AbstractKerberosDescriptorContainer>
descriptorMap,
+                                                          String referenceName,
+                                                          String newReferenceName) {
+    if (descriptorMap != null) {
+      for (AbstractKerberosDescriptorContainer kerberosServiceDescriptor : descriptorMap.values())
{
+        updateKerberosDescriptorIdentityReferences(kerberosServiceDescriptor, referenceName,
newReferenceName);
+
+        if (kerberosServiceDescriptor instanceof KerberosServiceDescriptor) {
+          updateKerberosDescriptorIdentityReferences(((KerberosServiceDescriptor) kerberosServiceDescriptor).getComponents(),
+              referenceName, newReferenceName);
+        }
+      }
+    }
+  }
+
+  /**
+   * Given an AbstractKerberosDescriptorContainer, iterates through its contained identity
descriptors
+   * to find ones matching the reference name to change.
+   * <p/>
+   * If found, the reference name is updated to the new name.
+   *
+   * @param descriptorContainer the AbstractKerberosDescriptorContainer to update
+   * @param referenceName       the reference name to change
+   * @param newReferenceName    the new reference name
+   */
+  private void updateKerberosDescriptorIdentityReferences(AbstractKerberosDescriptorContainer
descriptorContainer,
+                                                          String referenceName,
+                                                          String newReferenceName) {
+    if (descriptorContainer != null) {
+      KerberosIdentityDescriptor identity = descriptorContainer.getIdentity(referenceName);
+      if (identity != null) {
+        identity.setName(newReferenceName);
+      }
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
index 789fb54..45c32c8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ArtifactResourceProviderTest.java
@@ -413,7 +413,6 @@ public class ArtifactResourceProviderTest {
     expect(cluster.getClusterName()).andReturn("test-cluster").anyTimes();
 
 
-    expect(dao.findByForeignKeys(eq(foreignKeys))).andReturn(entities).anyTimes();
     expect(entity.getArtifactName()).andReturn("test-artifact").anyTimes();
     expect(entity.getForeignKeys()).andReturn(responseForeignKeys).anyTimes();
     expect(entity.getArtifactData()).andReturn(artifact_data).anyTimes();
@@ -421,7 +420,10 @@ public class ArtifactResourceProviderTest {
     expect(entity2.getForeignKeys()).andReturn(responseForeignKeys).anyTimes();
     expect(entity2.getArtifactData()).andReturn(artifact_data2).anyTimes();
 
+    expect(dao.findByForeignKeys(eq(foreignKeys))).andReturn(entities).once();
+    expect(dao.findByNameAndForeignKeys(eq("test-artifact"), eq(foreignKeys))).andReturn(entity).once();
     dao.remove(capture(deleteEntityCapture));
+    expect(dao.findByNameAndForeignKeys(eq("test-artifact2"), eq(foreignKeys))).andReturn(entity2).once();
     dao.remove(capture(deleteEntityCapture2));
 
     // end of expectation setting

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
index 8708047..51855c9 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java
@@ -19,6 +19,8 @@
 package org.apache.ambari.server.upgrade;
 
 import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMockBuilder;
@@ -31,13 +33,14 @@ import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
 
+import java.io.File;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
+import java.net.URL;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -55,11 +58,13 @@ import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
 import org.apache.ambari.server.orm.dao.ClusterStateDAO;
 import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.ClusterEntity;
 import org.apache.ambari.server.orm.entities.ClusterServiceEntity;
 import org.apache.ambari.server.orm.entities.ClusterStateEntity;
@@ -75,6 +80,9 @@ import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostComponentAdminState;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.EasyMockSupport;
@@ -210,12 +218,17 @@ public class UpgradeCatalog210Test {
     Method removeStormRestApiServiceComponent =
       UpgradeCatalog210.class.getDeclaredMethod("removeStormRestApiServiceComponent");
 
+    Method updateKerberosDescriptorArtifacts =
+      UpgradeCatalog210.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
+
     UpgradeCatalog210 upgradeCatalog210 = createMockBuilder(UpgradeCatalog210.class)
-      .addMockedMethod(addNewConfigurationsFromXml)
-      .addMockedMethod(initializeClusterAndServiceWidgets)
-      .addMockedMethod(addMissingConfigs)
-      .addMockedMethod(updateAlertDefinitions)
-      .addMockedMethod(removeStormRestApiServiceComponent).createMock();
+        .addMockedMethod(addNewConfigurationsFromXml)
+        .addMockedMethod(initializeClusterAndServiceWidgets)
+        .addMockedMethod(addMissingConfigs)
+        .addMockedMethod(updateAlertDefinitions)
+        .addMockedMethod(removeStormRestApiServiceComponent)
+        .addMockedMethod(updateKerberosDescriptorArtifacts)
+        .createMock();
 
     upgradeCatalog210.addNewConfigurationsFromXml();
     expectLastCall().once();
@@ -232,6 +245,9 @@ public class UpgradeCatalog210Test {
     upgradeCatalog210.removeStormRestApiServiceComponent();
     expectLastCall().once();
 
+    upgradeCatalog210.updateKerberosDescriptorArtifacts();
+    expectLastCall().once();
+
     replay(upgradeCatalog210);
 
     upgradeCatalog210.executeDMLUpdates();
@@ -469,7 +485,7 @@ public class UpgradeCatalog210Test {
 
     final Map<String, String> propertiesExpectedHBaseSite = new HashMap<String,
String>();
     propertiesExpectedHBaseSite.put("hbase.region.server.rpc.scheduler.factory.class",
-                                    "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory");
+        "org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory");
     propertiesExpectedHBaseSite.put("hbase.security.authorization", "true");
 
     final Map<String, String> propertiesExpectedHBaseEnv = new HashMap<String, String>();
@@ -519,9 +535,9 @@ public class UpgradeCatalog210Test {
     ClusterEntity clusterEntity = upgradeCatalogHelper.createCluster(injector,
       "c1", desiredStackEntity);
     ClusterServiceEntity clusterServiceEntity = upgradeCatalogHelper.createService(
-      injector, clusterEntity, "STORM");
+        injector, clusterEntity, "STORM");
     HostEntity hostEntity = upgradeCatalogHelper.createHost(injector,
-      clusterEntity, "h1");
+        clusterEntity, "h1");
 
     // Set current stack version
     ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class);
@@ -657,6 +673,112 @@ public class UpgradeCatalog210Test {
     Assert.assertEquals("2.1.0", upgradeCatalog.getTargetVersion());
   }
 
+  @Test
+  public void testUpdateKerberosDescriptorArtifact_Simple() throws Exception {
+    final KerberosDescriptorFactory kerberosDescriptorFactory = new KerberosDescriptorFactory();
+
+    KerberosServiceDescriptor serviceDescriptor;
+
+    URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_simple.json");
+    assertNotNull(systemResourceURL);
+
+    final KerberosDescriptor kerberosDescriptorOrig = kerberosDescriptorFactory.createInstance(new
File(systemResourceURL.getFile()));
+    assertNotNull(kerberosDescriptorOrig);
+    assertNotNull(kerberosDescriptorOrig.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("HDFS");
+    assertNotNull(serviceDescriptor);
+    assertNotNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNull(serviceDescriptor.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("OOZIE");
+    assertNotNull(serviceDescriptor);
+    assertNotNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNull(serviceDescriptor.getIdentity("/HDFS/hdfs"));
+
+    UpgradeCatalog210 upgradeMock = createMockBuilder(UpgradeCatalog210.class).createMock();
+
+    Capture<Map<String, Object>> updatedData = new Capture<Map<String,
Object>>();
+
+    ArtifactEntity artifactEntity = createNiceMock(ArtifactEntity.class);
+    expect(artifactEntity.getArtifactData())
+        .andReturn(kerberosDescriptorOrig.toMap())
+        .once();
+
+    artifactEntity.setArtifactData(capture(updatedData));
+    expectLastCall().once();
+
+    replay(artifactEntity, upgradeMock);
+    upgradeMock.updateKerberosDescriptorArtifact(createNiceMock(ArtifactDAO.class), artifactEntity);
+    verify(artifactEntity, upgradeMock);
+
+    KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(updatedData.getValue());
+    assertNotNull(kerberosDescriptorUpdated);
+    assertNull(kerberosDescriptorUpdated.getIdentity("/hdfs"));
+
+    serviceDescriptor = kerberosDescriptorUpdated.getService("HDFS");
+    assertNotNull(serviceDescriptor);
+    assertNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNotNull(serviceDescriptor.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorUpdated.getService("OOZIE");
+    assertNotNull(serviceDescriptor);
+    assertNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNotNull(serviceDescriptor.getIdentity("/HDFS/hdfs"));
+  }
+
+  @Test
+  public void testUpdateKerberosDescriptorArtifact_NoHDFSService() throws Exception {
+    final KerberosDescriptorFactory kerberosDescriptorFactory = new KerberosDescriptorFactory();
+
+    KerberosServiceDescriptor serviceDescriptor;
+
+    URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_no_hdfs.json");
+    assertNotNull(systemResourceURL);
+
+    final KerberosDescriptor kerberosDescriptorOrig = kerberosDescriptorFactory.createInstance(new
File(systemResourceURL.getFile()));
+    assertNotNull(kerberosDescriptorOrig);
+    assertNotNull(kerberosDescriptorOrig.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("HDFS");
+    assertNull(serviceDescriptor);
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("OOZIE");
+    assertNotNull(serviceDescriptor);
+    assertNotNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNull(serviceDescriptor.getIdentity("/HDFS/hdfs"));
+
+    UpgradeCatalog210 upgradeMock = createMockBuilder(UpgradeCatalog210.class).createMock();
+
+    Capture<Map<String, Object>> updatedData = new Capture<Map<String,
Object>>();
+
+    ArtifactEntity artifactEntity = createNiceMock(ArtifactEntity.class);
+    expect(artifactEntity.getArtifactData())
+        .andReturn(kerberosDescriptorOrig.toMap())
+        .once();
+
+    artifactEntity.setArtifactData(capture(updatedData));
+    expectLastCall().once();
+
+    replay(artifactEntity, upgradeMock);
+    upgradeMock.updateKerberosDescriptorArtifact(createNiceMock(ArtifactDAO.class), artifactEntity);
+    verify(artifactEntity, upgradeMock);
+
+    KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(updatedData.getValue());
+    assertNotNull(kerberosDescriptorUpdated);
+    assertNull(kerberosDescriptorUpdated.getIdentity("/hdfs"));
+
+    serviceDescriptor = kerberosDescriptorUpdated.getService("HDFS");
+    assertNotNull(serviceDescriptor);
+    assertNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNotNull(serviceDescriptor.getIdentity("hdfs"));
+
+    serviceDescriptor = kerberosDescriptorUpdated.getService("OOZIE");
+    assertNotNull(serviceDescriptor);
+    assertNull(serviceDescriptor.getIdentity("/hdfs"));
+    assertNotNull(serviceDescriptor.getIdentity("/HDFS/hdfs"));
+  }
+
   // *********** Inner Classes that represent sections of the DDL ***********
   // ************************************************************************
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_no_hdfs.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_no_hdfs.json
b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_no_hdfs.json
new file mode 100644
index 0000000..8f1d075
--- /dev/null
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_no_hdfs.json
@@ -0,0 +1,136 @@
+{
+  "properties": {
+    "realm": "${kerberos-env/realm}",
+    "keytab_dir": "/etc/security/keytabs"
+  },
+  "identities": [
+    {
+      "name": "spnego",
+      "principal": {
+        "value": "HTTP/_HOST@${realm}",
+        "type": "service"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/spnego.service.keytab",
+        "owner": {
+          "name": "root",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        }
+      }
+    },
+    {
+      "name": "hdfs",
+      "principal": {
+        "value": "${hadoop-env/hdfs_user}-${cluster_name}@${realm}",
+        "type": "user",
+        "configuration": "hadoop-env/hdfs_principal_name",
+        "local_username": "${hadoop-env/hdfs_user}"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/hdfs.headless.keytab",
+        "owner": {
+          "name": "${hadoop-env/hdfs_user}",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        },
+        "configuration": "hadoop-env/hdfs_user_keytab"
+      }
+    },
+    {
+      "name": "smokeuser",
+      "principal": {
+        "value": "${cluster-env/smokeuser}-${cluster_name}@${realm}",
+        "type": "user",
+        "configuration": "cluster-env/smokeuser_principal_name",
+        "local_username": "${cluster-env/smokeuser}"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/smokeuser.headless.keytab",
+        "owner": {
+          "name": "${cluster-env/smokeuser}",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        },
+        "configuration": "cluster-env/smokeuser_keytab"
+      }
+    }
+  ],
+  "services": [
+    {
+      "name": "OOZIE",
+      "identities": [
+        {
+          "name": "/spnego"
+        },
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "/hdfs"
+        }
+      ],
+      "auth_to_local_properties" : [
+        "oozie-site/oozie.authentication.kerberos.name.rules"
+      ],
+      "configurations": [
+        {
+          "oozie-site": {
+            "oozie.authentication.type": "kerberos",
+            "oozie.service.AuthorizationService.authorization.enabled": "true",
+            "oozie.service.HadoopAccessorService.kerberos.enabled": "true",
+            "local.realm": "${realm}",
+            "oozie.authentication.kerberos.name.rules": "",
+            "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials,hive2=org.apache.oozie.action.hadoop.Hive2Credentials"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "OOZIE_SERVER",
+          "identities": [
+            {
+              "name": "oozie_server",
+              "principal": {
+                "value": "oozie/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "oozie-site/oozie.service.HadoopAccessorService.kerberos.principal",
+                "local_username" : "${oozie-env/oozie_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/oozie.service.keytab",
+                "owner": {
+                  "name": "${oozie-env/oozie_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "oozie-site/oozie.service.HadoopAccessorService.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "oozie-site/oozie.authentication.kerberos.principal"
+              },
+              "keytab": {
+                "configuration": "oozie-site/oozie.authentication.kerberos.keytab"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8d00616b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_simple.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_simple.json
b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_simple.json
new file mode 100644
index 0000000..292ad25
--- /dev/null
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_simple.json
@@ -0,0 +1,335 @@
+{
+  "properties": {
+    "realm": "${kerberos-env/realm}",
+    "keytab_dir": "/etc/security/keytabs"
+  },
+  "identities": [
+    {
+      "name": "spnego",
+      "principal": {
+        "value": "HTTP/_HOST@${realm}",
+        "type": "service"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/spnego.service.keytab",
+        "owner": {
+          "name": "root",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        }
+      }
+    },
+    {
+      "name": "hdfs",
+      "principal": {
+        "value": "${hadoop-env/hdfs_user}-${cluster_name}@${realm}",
+        "type": "user",
+        "configuration": "hadoop-env/hdfs_principal_name",
+        "local_username": "${hadoop-env/hdfs_user}"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/hdfs.headless.keytab",
+        "owner": {
+          "name": "${hadoop-env/hdfs_user}",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        },
+        "configuration": "hadoop-env/hdfs_user_keytab"
+      }
+    },
+    {
+      "name": "smokeuser",
+      "principal": {
+        "value": "${cluster-env/smokeuser}-${cluster_name}@${realm}",
+        "type": "user",
+        "configuration": "cluster-env/smokeuser_principal_name",
+        "local_username": "${cluster-env/smokeuser}"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/smokeuser.headless.keytab",
+        "owner": {
+          "name": "${cluster-env/smokeuser}",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        },
+        "configuration": "cluster-env/smokeuser_keytab"
+      }
+    }
+  ],
+  "services": [
+    {
+      "name": "HDFS",
+      "identities": [
+        {
+          "name": "/spnego",
+          "principal": {
+            "configuration": "hdfs-site/dfs.web.authentication.kerberos.principal"
+          },
+          "keytab": {
+            "configuration": "hdfs-site/dfs.web.authentication.kerberos.keytab"
+          }
+        },
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "/hdfs"
+        }
+      ],
+      "auth_to_local_properties": [
+        "core-site/hadoop.security.auth_to_local"
+      ],
+      "configurations": [
+        {
+          "core-site": {
+            "hadoop.security.authentication": "kerberos",
+            "hadoop.rpc.protection": "authentication",
+            "hadoop.security.authorization": "true",
+            "hadoop.security.auth_to_local": "",
+            "hadoop.proxyuser.HTTP.groups": "${hadoop-env/proxyuser_group}"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "NAMENODE",
+          "identities": [
+            {
+              "name": "namenode_nn",
+              "principal": {
+                "value": "nn/_HOST@${realm}",
+                "type": "service",
+                "configuration": "hdfs-site/dfs.namenode.kerberos.principal",
+                "local_username": "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.namenode.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "hdfs-site/dfs.namenode.kerberos.internal.spnego.principal"
+              }
+            }
+          ],
+          "configurations": [
+            {
+              "hdfs-site": {
+                "dfs.block.access.token.enable": "true"
+              }
+            }
+          ]
+        },
+        {
+          "name": "DATANODE",
+          "identities": [
+            {
+              "name": "datanode_dn",
+              "principal": {
+                "value": "dn/_HOST@${realm}",
+                "type": "service",
+                "configuration": "hdfs-site/dfs.datanode.kerberos.principal",
+                "local_username": "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/dn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.datanode.keytab.file"
+              }
+            }
+          ],
+          "configurations": [
+            {
+              "hdfs-site": {
+                "dfs.datanode.address": "0.0.0.0:1019",
+                "dfs.datanode.http.address": "0.0.0.0:1022"
+              }
+            }
+          ]
+        },
+        {
+          "name": "SECONDARY_NAMENODE",
+          "identities": [
+            {
+              "name": "secondary_namenode_nn",
+              "principal": {
+                "value": "nn/_HOST@${realm}",
+                "type": "service",
+                "configuration": "hdfs-site/dfs.secondary.namenode.kerberos.principal",
+                "local_username": "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.secondary.namenode.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal"
+              }
+            }
+          ]
+        },
+        {
+          "name": "NFS_GATEWAY",
+          "identities": [
+            {
+              "name": "nfsgateway",
+              "principal": {
+                "value": "nfs/_HOST@${realm}",
+                "type": "service",
+                "configuration": "hdfs-site/nfs.kerberos.principal",
+                "local_username": "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nfs.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/nfs.keytab.file"
+              }
+            },
+          ]
+        },
+        {
+          "name": "JOURNALNODE",
+          "identities": [
+            {
+              "name": "journalnode_jn",
+              "principal": {
+                "value": "jn/_HOST@${realm}",
+                "type": "service",
+                "configuration": "hdfs-site/dfs.journalnode.kerberos.principal",
+                "local_username": "${hadoop-env/hdfs_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/jn.service.keytab",
+                "owner": {
+                  "name": "${hadoop-env/hdfs_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "hdfs-site/dfs.journalnode.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal"
+              }
+            }
+          ]
+        }
+      ]
+    },
+    {
+      "name": "OOZIE",
+      "identities": [
+        {
+          "name": "/spnego"
+        },
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "/hdfs"
+        }
+      ],
+      "auth_to_local_properties" : [
+        "oozie-site/oozie.authentication.kerberos.name.rules"
+      ],
+      "configurations": [
+        {
+          "oozie-site": {
+            "oozie.authentication.type": "kerberos",
+            "oozie.service.AuthorizationService.authorization.enabled": "true",
+            "oozie.service.HadoopAccessorService.kerberos.enabled": "true",
+            "local.realm": "${realm}",
+            "oozie.authentication.kerberos.name.rules": "",
+            "oozie.credentials.credentialclasses": "hcat=org.apache.oozie.action.hadoop.HCatCredentials,hive2=org.apache.oozie.action.hadoop.Hive2Credentials"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "OOZIE_SERVER",
+          "identities": [
+            {
+              "name": "oozie_server",
+              "principal": {
+                "value": "oozie/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "oozie-site/oozie.service.HadoopAccessorService.kerberos.principal",
+                "local_username" : "${oozie-env/oozie_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/oozie.service.keytab",
+                "owner": {
+                  "name": "${oozie-env/oozie_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "oozie-site/oozie.service.HadoopAccessorService.keytab.file"
+              }
+            },
+            {
+              "name": "/spnego",
+              "principal": {
+                "configuration": "oozie-site/oozie.authentication.kerberos.principal"
+              },
+              "keytab": {
+                "configuration": "oozie-site/oozie.authentication.kerberos.keytab"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}


Mime
View raw message