ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jlun...@apache.org
Subject ambari git commit: AMBARI-9186 : Blueprint contains password fields in cluster-env [hadoop.user.password, sink.dbpassword] (jluniya)
Date Mon, 19 Jan 2015 19:32:29 GMT
Repository: ambari
Updated Branches:
  refs/heads/trunk 38429224c -> 7c5fb7b53


AMBARI-9186 : Blueprint contains password fields in cluster-env [hadoop.user.password, sink.dbpassword]
(jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7c5fb7b5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7c5fb7b5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7c5fb7b5

Branch: refs/heads/trunk
Commit: 7c5fb7b534eaec38c67dfed510ba97341bd2f0ce
Parents: 3842922
Author: Jayush Luniya <jluniya@hortonworks.com>
Authored: Mon Jan 19 11:32:02 2015 -0800
Committer: Jayush Luniya <jluniya@hortonworks.com>
Committed: Mon Jan 19 11:32:02 2015 -0800

----------------------------------------------------------------------
 .../query/render/ClusterBlueprintRenderer.java  | 54 +++++++++----
 ...StackLevelConfigurationResourceProvider.java |  2 +-
 .../apache/ambari/server/state/StackInfo.java   | 22 ++++++
 .../src/main/resources/properties.json          |  2 +-
 .../HDPWIN/2.1/configuration/cluster-env.xml    | 80 +++++++++++---------
 .../render/ClusterBlueprintRendererTest.java    | 19 +++--
 ambari-web/app/data/HDP2/site_properties.js     | 76 ++++++++++---------
 ambari-web/app/utils/config.js                  |  2 +-
 .../test/mock_data_setup/configs_mock_data.js   | 12 +--
 9 files changed, 164 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
index 9967fc6..a584490 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java
@@ -41,6 +41,7 @@ import org.apache.ambari.server.state.PropertyInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
@@ -175,12 +176,25 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements
Renderer {
    * @param stackVersion  stack version
    */
   private void determinePropertiesToStrip(TreeNode<Resource> servicesNode, String stackName,
String stackVersion) {
-    AmbariMetaInfo stackInfo = getController().getAmbariMetaInfo();
+    AmbariMetaInfo ambariMetaInfo = getController().getAmbariMetaInfo();
+    StackInfo stack;
+    try {
+      stack = ambariMetaInfo.getStack(stackName, stackVersion);
+    } catch (AmbariException e) {
+      // shouldn't ever happen.
+      // Exception indicates that stack is not defined
+      // but we are getting the stack name from a running cluster.
+      throw new RuntimeException("Unexpected exception occurred while generating a blueprint.
"  +
+          "The stack '" + stackName + ":" + stackVersion + "' does not exist");
+    }
+    Map<String, PropertyInfo> requiredStackProperties = stack.getRequiredProperties();
+    updatePropertiesToStrip(requiredStackProperties);
+
     for (TreeNode<Resource> serviceNode : servicesNode.getChildren()) {
       String name = (String) serviceNode.getObject().getPropertyValue("ServiceInfo/service_name");
       ServiceInfo service;
       try {
-        service = stackInfo.getService(stackName, stackVersion, name);
+        service = ambariMetaInfo.getService(stackName, stackVersion, name);
       } catch (AmbariException e) {
         // shouldn't ever happen.
         // Exception indicates that service is not in the stack
@@ -190,20 +204,30 @@ public class ClusterBlueprintRenderer extends BaseRenderer implements
Renderer {
       }
 
       Map<String, PropertyInfo> requiredProperties = service.getRequiredProperties();
-      for (Map.Entry<String, PropertyInfo> entry : requiredProperties.entrySet()) {
-        String propertyName = entry.getKey();
-        PropertyInfo propertyInfo = entry.getValue();
-        String configCategory = propertyInfo.getFilename();
-        if (configCategory.endsWith(".xml")) {
-          configCategory = configCategory.substring(0, configCategory.indexOf(".xml"));
-        }
-        Collection<String> categoryProperties = propertiesToStrip.get(configCategory);
-        if (categoryProperties == null) {
-          categoryProperties = new ArrayList<String>();
-          propertiesToStrip.put(configCategory, categoryProperties);
-        }
-        categoryProperties.add(propertyName);
+      updatePropertiesToStrip(requiredProperties);
+    }
+  }
+
+  /**
+   * Helper method to update propertiesToStrip with properties that are marked as required
+   *
+   * @param requiredProperties  Properties marked as required
+   */
+  private void updatePropertiesToStrip(Map<String, PropertyInfo> requiredProperties)
{
+
+    for (Map.Entry<String, PropertyInfo> entry : requiredProperties.entrySet()) {
+      String propertyName = entry.getKey();
+      PropertyInfo propertyInfo = entry.getValue();
+      String configCategory = propertyInfo.getFilename();
+      if (configCategory.endsWith(".xml")) {
+        configCategory = configCategory.substring(0, configCategory.indexOf(".xml"));
+      }
+      Collection<String> categoryProperties = propertiesToStrip.get(configCategory);
+      if (categoryProperties == null) {
+        categoryProperties = new ArrayList<String>();
+        propertiesToStrip.put(configCategory, categoryProperties);
       }
+      categoryProperties.add(propertyName);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java
index 1055626..1091bc7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackLevelConfigurationResourceProvider.java
@@ -58,7 +58,7 @@ public class StackLevelConfigurationResourceProvider extends
       .getPropertyId("StackLevelConfigurations", "property_description");
   
   public static final String PROPERTY_PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
-      .getPropertyId("StackConfigurations", "property_type");
+      .getPropertyId("StackLevelConfigurations", "property_type");
 
   public static final String PROPERTY_TYPE_PROPERTY_ID = PropertyHelper
       .getPropertyId("StackLevelConfigurations", "type");

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index e4e2af1..c1c483f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -55,6 +55,8 @@ public class StackInfo implements Comparable<StackInfo>{
 
   private String upgradesFolder = null;
 
+  private volatile Map<String, PropertyInfo> requiredProperties;
+
   public String getName() {
     return name;
   }
@@ -326,4 +328,24 @@ public class StackInfo implements Comparable<StackInfo>{
     String oId = o.name + "-" + o.version;
     return myId.compareTo(oId);
   }
+
+  //todo: ensure that required properties are never modified...
+  public Map<String, PropertyInfo> getRequiredProperties() {
+    Map<String, PropertyInfo> result = requiredProperties;
+    if (result == null) {
+      synchronized(this) {
+        result = requiredProperties;
+        if (result == null) {
+          requiredProperties = result = new HashMap<String, PropertyInfo>();
+          List<PropertyInfo> properties = getProperties();
+          for (PropertyInfo propertyInfo : properties) {
+            if (propertyInfo.isRequireInput()) {
+              result.put(propertyInfo.getName(), propertyInfo);
+            }
+          }
+        }
+      }
+    }
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 31626af..45c7e06 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -420,7 +420,7 @@
         "StackLevelConfigurations/property_description",
         "StackLevelConfigurations/type",
         "StackLevelConfigurations/final",
-        "StackConfigurations/property_type",
+        "StackLevelConfigurations/property_type",
         "_"
     ]
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
index b8f052b..a0c8673 100644
--- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/configuration/cluster-env.xml
@@ -21,40 +21,48 @@
 -->
 
 <configuration>
-    <property>
-      <name>hadoop.user.name</name>
-      <value>hadoop</value>
-    </property>
-    <property>
-        <name>security_enabled</name>
-        <value>false</value>
-        <description>Hadoop Security</description>
-    </property>
-    <property>
-        <name>kerberos_domain</name>
-        <value>EXAMPLE.COM</value>
-        <description>Kerberos realm.</description>
-    </property>
-    <property>
-        <name>ignore_groupsusers_create</name>
-        <value>false</value>
-        <description>Whether to ignore failures on users and group creation</description>
-    </property>
-    <property>
-        <name>smokeuser</name>
-        <value>ambari-qa</value>
-        <property-type>USER</property-type>
-        <description>User executing service checks</description>
-    </property>
-    <property>
-        <name>smokeuser_keytab</name>
-        <value>/etc/security/keytabs/smokeuser.headless.keytab</value>
-        <description>Path to smoke test user keytab file</description>
-    </property>
-    <property>
-        <name>user_group</name>
-        <value>hadoop</value>
-        <property-type>GROUP</property-type>
-        <description>Hadoop user group.</description>
-    </property>
+  <property>
+    <name>hadoop.user.name</name>
+    <value>hadoop</value>
+    <property-type>USER</property-type>
+    <description>User to run Hadoop services under</description>
+  </property>
+  <property require-input="true">
+    <name>hadoop.user.password</name>
+    <value> </value>
+    <property-type>PASSWORD</property-type>
+    <description>Password for hadoop user</description>
+  </property>
+  <property>
+    <name>security_enabled</name>
+    <value>false</value>
+    <description>Hadoop Security</description>
+  </property>
+  <property>
+    <name>kerberos_domain</name>
+    <value>EXAMPLE.COM</value>
+    <description>Kerberos realm.</description>
+  </property>
+  <property>
+    <name>ignore_groupsusers_create</name>
+    <value>false</value>
+    <description>Whether to ignore failures on users and group creation</description>
+  </property>
+  <property>
+    <name>smokeuser</name>
+    <value>ambari-qa</value>
+    <property-type>USER</property-type>
+    <description>User executing service checks</description>
+  </property>
+  <property>
+    <name>smokeuser_keytab</name>
+    <value>/etc/security/keytabs/smokeuser.headless.keytab</value>
+    <description>Path to smoke test user keytab file</description>
+  </property>
+  <property>
+    <name>user_group</name>
+    <value>hadoop</value>
+    <property-type>GROUP</property-type>
+    <description>Hadoop user group.</description>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
b/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
index e7db9d3..61ea1ad 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRendererTest.java
@@ -31,8 +31,10 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.internal.ResourceImpl;
 import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackInfo;
 import org.junit.Test;
 
+import java.lang.reflect.Field;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.Arrays;
@@ -45,6 +47,7 @@ import java.util.Set;
 import static org.easymock.EasyMock.createMock;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
@@ -102,14 +105,14 @@ public class ClusterBlueprintRendererTest {
   public void testFinalizeResult() throws Exception{
 
     AmbariManagementController controller = createMock(AmbariManagementController.class);
-    AmbariMetaInfo stackInfo = createNiceMock(AmbariMetaInfo.class);
-    ServiceInfo hdfsService = new ServiceInfo();
-    hdfsService.setName("HDFS");
-    ServiceInfo mrService = new ServiceInfo();
-    mrService.setName("MAPREDUCE");
-
-    expect(stackInfo.getService("HDP", "1.3.3", "HDFS")).andReturn(hdfsService);
-    expect(stackInfo.getService("HDP", "1.3.3", "MAPREDUCE")).andReturn(mrService);
+    AmbariMetaInfo ambariMetaInfo = createNiceMock(AmbariMetaInfo.class);
+    StackInfo stack = new StackInfo();
+    stack.setName("HDP");
+    stack.setVersion("1.3.3");
+
+    expect(controller.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+    expect(ambariMetaInfo.getStack("HDP", "1.3.3")).andReturn(stack).anyTimes();
+    replay(controller, ambariMetaInfo);
 
     Result result = new ResultImpl(true);
     createClusterResultTree(result.getResultTree());

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index fa1490a..74912c3 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -18,9 +18,7 @@
 
 var App = require('app');
 
-module.exports =
-{
-  "configProperties": [
+var hdp2properties = [
   //***************************************** HDP stack **************************************
   /**********************************************HDFS***************************************/
     {
@@ -47,37 +45,6 @@ module.exports =
       "index": 3
     },
     {
-      "id": "puppet var",
-      "name": "hadoop.user.name",
-      "displayName": "Hadoop User Name",
-      "description": "User to run Hadoop services under",
-      "defaultValue": "hadoop",
-      "isReconfigurable": false,
-      "displayType": "user",
-      "isOverridable": false,
-      "isVisible": App.get('isHadoopWindowsStack'),
-      "serviceName": "MISC",
-      "filename": "cluster-env.xml",
-      "category": "Users and Groups",
-      "belongsToService": ["HDFS"],
-      "index": 0
-    },
-    {
-      "id": "puppet var",
-      "name": "hadoop.user.password",
-      "displayName": "Hadoop User Password",
-      "description": "Password for hadoop user",
-      "isReconfigurable": false,
-      "displayType": "password",
-      "isOverridable": false,
-      "isVisible": App.get('isHadoopWindowsStack'),
-      "serviceName": "MISC",
-      "filename": "cluster-env.xml",
-      "category": "Users and Groups",
-      "belongsToService": ["HDFS"],
-      "index": 1
-    },
-    {
       "id": "site property",
       "name": "dfs.namenode.name.dir",
       "displayName": "NameNode directories",
@@ -4540,5 +4507,44 @@ module.exports =
       "category": "Ambari Principals",
       "index" : 6
     }
-  ]
+  ];
+if (App.get('isHadoopWindowsStack')) {
+  hdp2properties.push(
+    {
+      "id": "puppet var",
+      "name": "hadoop.user.name",
+      "displayName": "Hadoop User Name",
+      "description": "User to run Hadoop services under",
+      "defaultValue": "hadoop",
+      "isReconfigurable": false,
+      "displayType": "user",
+      "isOverridable": false,
+      "isVisible": App.get('isHadoopWindowsStack'),
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["HDFS"],
+      "index": 0
+    },
+    {
+      "id": "puppet var",
+      "name": "hadoop.user.password",
+      "displayName": "Hadoop User Password",
+      "description": "Password for hadoop user",
+      "isReconfigurable": false,
+      "displayType": "password",
+      "isOverridable": false,
+      "isVisible": App.get('isHadoopWindowsStack'),
+      "serviceName": "MISC",
+      "filename": "cluster-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["HDFS"],
+      "index": 1
+    }
+  );
+}
+
+module.exports =
+{
+  "configProperties": hdp2properties
 };

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index f3a2fc6..1a4a50d 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -830,7 +830,7 @@ App.config = Em.Object.create({
     var properties = [];
     if (data.items.length) {
       data.items.forEach(function (item) {
-        item.StackLevelConfigurations.property_type = item.StackConfigurations.property_type
|| [];
+        item.StackLevelConfigurations.property_type = item.StackLevelConfigurations.property_type
|| [];
         item.StackLevelConfigurations.service_name = 'MISC';
         var property = this.createAdvancedPropertyObject(item.StackLevelConfigurations);
         if (property) properties.push(property);

http://git-wip-us.apache.org/repos/asf/ambari/blob/7c5fb7b5/ambari-web/test/mock_data_setup/configs_mock_data.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mock_data_setup/configs_mock_data.js b/ambari-web/test/mock_data_setup/configs_mock_data.js
index cdcb4aa..7028920 100644
--- a/ambari-web/test/mock_data_setup/configs_mock_data.js
+++ b/ambari-web/test/mock_data_setup/configs_mock_data.js
@@ -471,13 +471,11 @@ module.exports = {
   advancedClusterConfigs: {
     items: [
       {
-        "StackConfigurations" : {
-          "property_type" : [ ]
-        },
         "StackLevelConfigurations" : {
           "final" : "false",
           "property_description" : "Whether to ignore failures on users and group creation",
           "property_name" : "ignore_groupsusers_create",
+          "property_type" : [ ],
           "property_value" : "false",
           "stack_name" : "HDP",
           "stack_version" : "2.2",
@@ -485,15 +483,13 @@ module.exports = {
         }
       },
       {
-        "StackConfigurations" : {
-          "property_type" : [
-            "GROUP"
-          ]
-        },
         "StackLevelConfigurations" : {
           "final" : "false",
           "property_description" : "Hadoop user group.",
           "property_name" : "user_group",
+          "property_type" : [
+            "GROUP"
+          ],
           "property_value" : "hadoop",
           "stack_name" : "HDP",
           "stack_version" : "2.2",


Mime
View raw message