ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nc...@apache.org
Subject [42/51] [abbrv] ambari git commit: AMBARI-14445 : Switch to cluster Zookeeper for AMS in distributed mode deployment
Date Wed, 23 Dec 2015 15:07:21 GMT
AMBARI-14445 : Switch to cluster Zookeeper for AMS in distributed mode deployment


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b613c336
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b613c336
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b613c336

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b613c336d611b3995f7eb0cf8ebe482500e02d39
Parents: 6b4aaa0
Author: Aravindan Vijayan <avijayan@hortonworks.com>
Authored: Tue Dec 22 12:59:41 2015 -0800
Committer: Aravindan Vijayan <avijayan@hortonworks.com>
Committed: Tue Dec 22 16:50:26 2015 -0800

----------------------------------------------------------------------
 .../query/DefaultPhoenixDataSource.java         |   2 +-
 .../TestApplicationHistoryServer.java           |   2 +-
 .../server/upgrade/UpgradeCatalog220.java       |   2 +-
 .../server/upgrade/UpgradeCatalog221.java       | 100 +++++++++
 .../AMBARI_METRICS/0.1.0/alerts.json            |  25 ---
 .../0.1.0/configuration/ams-env.xml             |   2 +-
 .../0.1.0/configuration/ams-hbase-env.xml       |   8 +-
 .../configuration/ams-hbase-security-site.xml   |   7 -
 .../0.1.0/configuration/ams-hbase-site.xml      |   4 +
 .../AMBARI_METRICS/0.1.0/kerberos.json          |   6 +-
 .../0.1.0/package/scripts/ams_service.py        |   9 +-
 .../0.1.0/package/scripts/hbase.py              |   9 +
 .../0.1.0/package/scripts/params.py             |   4 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  55 +++--
 .../server/upgrade/UpgradeCatalog220Test.java   |   2 +-
 .../server/upgrade/UpgradeCatalog221Test.java   | 215 +++++++++++++++++++
 .../AMBARI_METRICS/test_metrics_collector.py    |  35 +--
 .../stacks/2.2/common/test_stack_advisor.py     |   2 +
 .../2.2/configs/ranger-admin-upgrade.json       |   6 +-
 .../2.2/configs/ranger-usersync-upgrade.json    |   6 +-
 .../test_kerberos_descriptor_2_1_3.json         |   9 +-
 .../data/stacks/HDP-2.1/service_components.json |   6 +-
 22 files changed, 436 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultPhoenixDataSource.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultPhoenixDataSource.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultPhoenixDataSource.java
index 562049b..8283f7d 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultPhoenixDataSource.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/DefaultPhoenixDataSource.java
@@ -41,7 +41,7 @@ public class DefaultPhoenixDataSource implements ConnectionProvider {
     String zookeeperClientPort = hbaseConf.getTrimmed(ZOOKEEPER_CLIENT_PORT,
       "2181");
     String zookeeperQuorum = hbaseConf.getTrimmed(ZOOKEEPER_QUORUM);
-    String znodeParent = hbaseConf.getTrimmed(ZNODE_PARENT, "/hbase");
+    String znodeParent = hbaseConf.getTrimmed(ZNODE_PARENT, "/ams-hbase-unsecure");
     if (zookeeperQuorum == null || zookeeperQuorum.isEmpty()) {
       throw new IllegalStateException("Unable to find Zookeeper quorum to " +
         "access HBase store using Phoenix.");

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
index e1d256d..a8bbc73 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
@@ -157,7 +157,7 @@ public class TestApplicationHistoryServer {
     Connection connection = createNiceMock(Connection.class);
     Statement stmt = createNiceMock(Statement.class);
     mockStatic(DriverManager.class);
-    expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/hbase"))
+    expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/ams-hbase-unsecure"))
       .andReturn(connection).anyTimes();
     expect(connection.createStatement()).andReturn(stmt).anyTimes();
     suppress(method(Statement.class, "executeUpdate", String.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
index 1e39143..a434429 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog220.java
@@ -1079,7 +1079,7 @@ public class UpgradeCatalog220 extends AbstractUpgradeCatalog {
           }
 
           Config amsEnv = cluster.getDesiredConfigByType(AMS_ENV);
-          if (amsHbaseEnv != null) {
+          if (amsEnv != null) {
             Map<String, String> amsEnvProperties = amsEnv.getProperties();
             String content = amsEnvProperties.get("content");
             Map<String, String> newProperties = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
index 21f601e..1257f70 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog221.java
@@ -33,11 +33,13 @@ import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.sql.SQLException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -49,6 +51,14 @@ import java.util.UUID;
  */
 public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
 
+  private static final String AMS_HBASE_SITE = "ams-hbase-site";
+  private static final String AMS_HBASE_SECURITY_SITE = "ams-hbase-security-site";
+  private static final String AMS_ENV = "ams-env";
+  private static final String AMS_HBASE_ENV = "ams-hbase-env";
+  private static final String ZK_ZNODE_PARENT = "zookeeper.znode.parent";
+  private static final String CLUSTER_ENV = "cluster-env";
+  private static final String SECURITY_ENABLED = "security_enabled";
+
   @Inject
   DaoUtils daoUtils;
 
@@ -182,6 +192,96 @@ public class UpgradeCatalog221 extends AbstractUpgradeCatalog {
 
     return sourceJson.toString();
   }
+  protected void updateAMSConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+
+          Config amsEnv = cluster.getDesiredConfigByType(AMS_ENV);
+          if (amsEnv != null) {
+            Map<String, String> amsEnvProperties = amsEnv.getProperties();
+            String content = amsEnvProperties.get("content");
+            Map<String, String> newProperties = new HashMap<>();
+            newProperties.put("content", updateAmsEnvContent(content));
+            updateConfigurationPropertiesForCluster(cluster, AMS_ENV, newProperties, true, true);
+          }
+
+          String znodeParent = null;
+          Config amsHbaseSecuritySite = cluster.getDesiredConfigByType(AMS_HBASE_SECURITY_SITE);
+          if (amsHbaseSecuritySite != null) {
+            Map<String, String> amsHbaseSecuritySiteProperties = amsHbaseSecuritySite.getProperties();
+            znodeParent = amsHbaseSecuritySiteProperties.get(ZK_ZNODE_PARENT);
+            LOG.info("Removing config zookeeper.znode.parent from ams-hbase-security-site");
+            removeConfigurationPropertiesFromCluster(cluster, AMS_HBASE_SECURITY_SITE, Collections.singleton(ZK_ZNODE_PARENT));
+          }
+
+          Config amsHbaseSite = cluster.getDesiredConfigByType(AMS_HBASE_SITE);
+          if (amsHbaseSite != null) {
+            Map<String, String> amsHbaseSiteProperties = amsHbaseSite.getProperties();
+            Map<String, String> newProperties = new HashMap<>();
+
+            if (!amsHbaseSiteProperties.containsKey(ZK_ZNODE_PARENT)) {
+
+              if (StringUtils.isEmpty(znodeParent) || "/hbase".equals(znodeParent)) {
+
+                boolean isSecurityEnabled = false;
+                Config clusterEnv = cluster.getDesiredConfigByType(CLUSTER_ENV);
+                if (clusterEnv != null) {
+                  Map<String,String> clusterEnvProperties = clusterEnv.getProperties();
+                  if (clusterEnvProperties.containsKey(SECURITY_ENABLED)) {
+                    isSecurityEnabled = Boolean.valueOf(clusterEnvProperties.get(SECURITY_ENABLED));
+                  }
+                }
+                znodeParent = "/ams-hbase-" + (isSecurityEnabled ? "secure" : "unsecure");
+              }
+
+              LOG.info("Adding config zookeeper.znode.parent=" + znodeParent + " to ams-hbase-site");
+              newProperties.put(ZK_ZNODE_PARENT, znodeParent);
+
+            }
+            updateConfigurationPropertiesForCluster(cluster, AMS_HBASE_SITE, newProperties, true, true);
+          }
+
+          Config amsHbaseEnv = cluster.getDesiredConfigByType(AMS_HBASE_ENV);
+          if (amsHbaseEnv != null) {
+            Map<String, String> amsHbaseEnvProperties = amsHbaseEnv.getProperties();
+            String content = amsHbaseEnvProperties.get("content");
+            Map<String, String> newProperties = new HashMap<>();
+            newProperties.put("content", updateAmsHbaseEnvContent(content));
+            updateConfigurationPropertiesForCluster(cluster, AMS_HBASE_ENV, newProperties, true, true);
+          }
+        }
+      }
+    }
+  }
+
+  protected String updateAmsHbaseEnvContent(String content) {
+    if (content == null) {
+      return null;
+    }
+    String regSearch = "_jaas_config_file\\}\\} -Dzookeeper.sasl.client.username=\\{\\{zk_servicename\\}\\}";
+    String replacement = "_jaas_config_file}}";
+    content = content.replaceAll(regSearch, replacement);
+    return content;
+  }
+
+  protected String updateAmsEnvContent(String content) {
+
+    if (content == null) {
+      return null;
+    }
+    String regSearch = "-Djava.security.auth.login.config=\\{\\{ams_collector_jaas_config_file\\}\\} " +
+      "-Dzookeeper.sasl.client.username=\\{\\{zk_servicename\\}\\}";
+    String replacement = "-Djava.security.auth.login.config={{ams_collector_jaas_config_file}}";
+    content = content.replaceAll(regSearch, replacement);
+
+    return content;
+  }
 
   protected void updateOozieConfigs() throws AmbariException {
     AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
index 4015590..2f5246b 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/alerts.json
@@ -139,31 +139,6 @@
             "value": "{0} * 100"
           }
         }
-      },
-      {
-        "name": "ams_metrics_collector_zookeeper_server_process",
-        "label": "Metrics Collector - ZooKeeper Server Process",
-        "description": "This host-level alert is triggered if the Metrics Collector's ZooKeeper server process cannot be determined to be up and listening on the network.",
-        "interval": 1,
-        "scope": "ANY",
-        "source": {
-          "type": "PORT",
-          "uri": "{{ams-hbase-site/hbase.zookeeper.property.clientPort}}",
-          "default_port": 61181,
-          "reporting": {
-            "ok": {
-              "text": "TCP OK - {0:.3f}s response on port {1}"
-            },
-            "warning": {
-              "text": "TCP OK - {0:.3f}s response on port {1}",
-              "value": 1.5
-            },
-            "critical": {
-              "text": "Connection failed: {0} to {1}:{2}",
-              "value": 5.0
-            }
-          }
-        }
       }
     ],
     "METRICS_MONITOR": [

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
index 96e2bb3..77cd219 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-env.xml
@@ -109,7 +109,7 @@ export AMS_HBASE_FIFO_COMPACTION_ENABLED={{ams_hbase_fifo_compaction_enabled}}
 # AMS Collector options
 export AMS_COLLECTOR_OPTS="-Djava.library.path=/usr/lib/ams-hbase/lib/hadoop-native"
 {% if security_enabled %}
-export AMS_COLLECTOR_OPTS="$AMS_COLLECTOR_OPTS -Djava.security.auth.login.config={{ams_collector_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}"
+export AMS_COLLECTOR_OPTS="$AMS_COLLECTOR_OPTS -Djava.security.auth.login.config={{ams_collector_jaas_config_file}}"
 {% endif %}
 
 # AMS Collector GC options

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
index a061006..191e8b2 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
@@ -222,10 +222,10 @@ export HBASE_PID_DIR={{hbase_pid_dir}}
 export HBASE_MANAGES_ZK=false
 
 {% if security_enabled %}
-export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}"
-export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS -Djava.security.auth.login.config={{ams_zookeeper_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}"
+export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}"
+export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS -Djava.security.auth.login.config={{ams_zookeeper_jaas_config_file}}"
 {% endif %}
 
 # use embedded native libs

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-security-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-security-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-security-site.xml
index 5e7bc518..b1f702f 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-security-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-security-site.xml
@@ -139,11 +139,4 @@
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
   </property>
-  <property>
-    <name>zookeeper.znode.parent</name>
-    <value></value>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
index e97cfee..ccb7252 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-site.xml
@@ -383,5 +383,9 @@
       </property>
     </depends-on>
   </property>
+  <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/ams-hbase-unsecure</value>
+  </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/kerberos.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/kerberos.json
index dac60f3..34de6a8 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/kerberos.json
@@ -104,12 +104,16 @@
                 "hadoop.security.authentication": "kerberos",
                 "hbase.coprocessor.master.classes": "org.apache.hadoop.hbase.security.access.AccessController",
                 "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.AccessController",
-                "zookeeper.znode.parent": "/ams-hbase-secure",
                 "hbase.zookeeper.property.kerberos.removeHostFromPrincipal": "true",
                 "hbase.zookeeper.property.kerberos.removeRealmFromPrincipal": "true",
                 "hbase.zookeeper.property.authProvider.1": "org.apache.zookeeper.server.auth.SASLAuthenticationProvider",
                 "hbase.zookeeper.property.jaasLoginRenew": "3600000"
               }
+            },
+            {
+              "ams-hbase-site": {
+                "zookeeper.znode.parent": "/ams-hbase-secure"
+              }
             }
           ]
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
index 0726802..3d1ffda 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams_service.py
@@ -45,7 +45,6 @@ def ams_service(name, action):
     #no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
 
     if params.is_hbase_distributed:
-      hbase_service('zookeeper', action=action)
       hbase_service('master', action=action)
       hbase_service('regionserver', action=action)
       cmd = format("{cmd} --distributed")
@@ -59,6 +58,14 @@ def ams_service(name, action):
                   action='delete'
         )
 
+      if not params.is_hbase_distributed:
+        File(format("{ams_collector_conf_dir}/core-site.xml"),
+             action='delete',
+             owner=params.ams_user)
+
+        File(format("{ams_collector_conf_dir}/hdfs-site.xml"),
+             action='delete',
+             owner=params.ams_user)
 
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {ams_collector_keytab_path} {ams_collector_jaas_princ};")

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
index 556380e..7fb9715 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
@@ -118,6 +118,15 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
   if params.security_enabled:
     merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-security-site'])
 
+  if not params.is_hbase_distributed:
+    File(format("{hbase_conf_dir}/core-site.xml"),
+         action='delete',
+         owner=params.hbase_user)
+
+    File(format("{hbase_conf_dir}/hdfs-site.xml"),
+         action='delete',
+         owner=params.hbase_user)
+
   XmlConfig("hbase-site.xml",
             conf_dir = params.hbase_conf_dir,
             configurations = merged_ams_hbase_site,

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 87fd476..16c885b 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -147,7 +147,7 @@ max_open_files_limit = default("/configurations/ams-hbase-env/max_open_files_lim
 if not is_hbase_distributed:
   zookeeper_quorum_hosts = 'localhost'
 else:
-  zookeeper_quorum_hosts = default("/hostname", 'localhost')
+  zookeeper_quorum_hosts = ",".join(config['clusterHostInfo']['zookeeper_hosts'])
 
 ams_checkpoint_dir = config['configurations']['ams-site']['timeline.metrics.aggregator.checkpoint.dir']
 hbase_pid_dir = status_params.hbase_pid_dir
@@ -202,8 +202,6 @@ if security_enabled:
   regionserver_keytab_path = config['configurations']['ams-hbase-security-site']['hbase.regionserver.keytab.file']
   regionserver_jaas_princ = config['configurations']['ams-hbase-security-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
 
-  zk_servicename = ams_zookeeper_principal_name.rpartition('/')[0]
-
 #log4j.properties
 if (('ams-hbase-log4j' in config['configurations']) and ('content' in config['configurations']['ams-hbase-log4j'])):
   hbase_log4j_props = config['configurations']['ams-hbase-log4j']['content']

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index a723f75..97daa79 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -454,7 +454,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
   def recommendAmsConfigurations(self, configurations, clusterData, services, hosts):
     putAmsEnvProperty = self.putProperty(configurations, "ams-env", services)
     putAmsHbaseSiteProperty = self.putProperty(configurations, "ams-hbase-site", services)
-    putTimelineServiceProperty = self.putProperty(configurations, "ams-site", services)
+    putAmsSiteProperty = self.putProperty(configurations, "ams-site", services)
     putHbaseEnvProperty = self.putProperty(configurations, "ams-hbase-env", services)
 
     amsCollectorHosts = self.getComponentHostNames(services, "AMBARI_METRICS", "METRICS_COLLECTOR")
@@ -470,6 +470,12 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
       if "hbase.cluster.distributed" in services["configurations"]["ams-hbase-site"]["properties"]:
         hbaseClusterDistributed = services["configurations"]["ams-hbase-site"]["properties"]["hbase.cluster.distributed"].lower() == 'true'
 
+    if hbaseClusterDistributed:
+      zkPort = self.getZKPort(services)
+      putAmsHbaseSiteProperty("hbase.zookeeper.property.clientPort", zkPort)
+    else:
+      putAmsHbaseSiteProperty("hbase.zookeeper.property.clientPort", "61181")
+
     mountpoints = ["/"]
     for collectorHostName in amsCollectorHosts:
       for host in hosts["items"]:
@@ -497,7 +503,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     putAmsHbaseSiteProperty("hbase.hregion.memstore.flush.size", 134217728)
     putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.upperLimit", 0.35)
     putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.lowerLimit", 0.3)
-    putTimelineServiceProperty("timeline.metrics.host.aggregator.ttl", 86400)
+    putAmsSiteProperty("timeline.metrics.host.aggregator.ttl", 86400)
 
     if len(amsCollectorHosts) > 1:
       pass
@@ -511,7 +517,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.upperLimit", 0.3)
         putAmsHbaseSiteProperty("hbase.regionserver.global.memstore.lowerLimit", 0.25)
         putAmsHbaseSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 20)
-        putTimelineServiceProperty("phoenix.query.maxGlobalMemoryPercentage", 30)
+        putAmsSiteProperty("phoenix.query.maxGlobalMemoryPercentage", 30)
         putAmsHbaseSiteProperty("phoenix.coprocessor.maxMetaDataCacheSize", 81920000)
       elif total_sinks_count >= 500:
         putAmsHbaseSiteProperty("hbase.regionserver.handler.count", 60)
@@ -579,8 +585,8 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
       precision_splits = result.precision
     if result.aggregate:
       aggregate_splits = result.aggregate
-    putTimelineServiceProperty("timeline.metrics.host.aggregate.splitpoints", ','.join(precision_splits))
-    putTimelineServiceProperty("timeline.metrics.cluster.aggregate.splitpoints", ','.join(aggregate_splits))
+    putAmsSiteProperty("timeline.metrics.host.aggregate.splitpoints", ','.join(precision_splits))
+    putAmsSiteProperty("timeline.metrics.cluster.aggregate.splitpoints", ','.join(aggregate_splits))
 
     pass
 
@@ -621,10 +627,11 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
                               and hostname in componentEntry["StackServiceComponents"]["hostnames"]])
     return components
 
-  def getZKHostPortString(self, services):
+  def getZKHostPortString(self, services, include_port=True):
     """
     Returns the comma delimited string of zookeeper server host with the configure port installed in a cluster
     Example: zk.host1.org:2181,zk.host2.org:2181,zk.host3.org:2181
+    include_port boolean param -> If port is also needed.
     """
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     include_zookeeper = "ZOOKEEPER" in servicesList
@@ -632,16 +639,25 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
     if include_zookeeper:
       zookeeper_hosts = self.getHostNamesWithComponent("ZOOKEEPER", "ZOOKEEPER_SERVER", services)
-      zookeeper_port = '2181'     #default port
-      if 'zoo.cfg' in services['configurations'] and ('clientPort' in services['configurations']['zoo.cfg']['properties']):
-        zookeeper_port = services['configurations']['zoo.cfg']['properties']['clientPort']
-
       zookeeper_host_port_arr = []
-      for i in range(len(zookeeper_hosts)):
-        zookeeper_host_port_arr.append(zookeeper_hosts[i] + ':' + zookeeper_port)
+
+      if include_port:
+        zookeeper_port = self.getZKPort(services)
+        for i in range(len(zookeeper_hosts)):
+          zookeeper_host_port_arr.append(zookeeper_hosts[i] + ':' + zookeeper_port)
+      else:
+        for i in range(len(zookeeper_hosts)):
+          zookeeper_host_port_arr.append(zookeeper_hosts[i])
+
       zookeeper_host_port = ",".join(zookeeper_host_port_arr)
     return zookeeper_host_port
 
+  def getZKPort(self, services):
+    zookeeper_port = '2181'     #default port
+    if 'zoo.cfg' in services['configurations'] and ('clientPort' in services['configurations']['zoo.cfg']['properties']):
+      zookeeper_port = services['configurations']['zoo.cfg']['properties']['clientPort']
+    return zookeeper_port
+
   def getConfigurationClusterSummary(self, servicesList, hosts, components, services):
 
     hBaseInstalled = False
@@ -850,8 +866,21 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     if hbase_rootdir and hbase_rootdir.startswith("hdfs://") and not distributed.lower() == "true":
       distributed_item = self.getErrorItem("Distributed property should be set to true if hbase.rootdir points to HDFS.")
 
+    hbase_zk_client_port = properties.get("hbase.zookeeper.property.clientPort")
+    zkPort = self.getZKPort(services)
+    hbase_zk_client_port_item = None
+    if distributed.lower() == "true" and op_mode == "distributed" and hbase_zk_client_port != zkPort:
+      hbase_zk_client_port_item = self.getErrorItem("In AMS distributed mode, hbase.zookeeper.property.clientPort "
+                                                    "should be the cluster zookeeper server port : {0}".format(zkPort))
+
+    if distributed.lower() == "false" and op_mode == "embedded" and hbase_zk_client_port == zkPort:
+      hbase_zk_client_port_item = self.getErrorItem("In AMS embedded mode, hbase.zookeeper.property.clientPort "
+                                                    "should be a different port than cluster zookeeper port."
+                                                    "(default:61181)")
+
     validationItems.extend([{"config-name":'hbase.rootdir', "item": rootdir_item },
-                            {"config-name":'hbase.cluster.distributed', "item": distributed_item }])
+                            {"config-name":'hbase.cluster.distributed', "item": distributed_item },
+                            {"config-name":'hbase.zookeeper.property.clientPort', "item": hbase_zk_client_port_item }])
 
     for collectorHostName in amsCollectorHosts:
       for host in hosts["items"]:

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog220Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog220Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog220Test.java
index f0b4501..8263001 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog220Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog220Test.java
@@ -845,7 +845,7 @@ public class UpgradeCatalog220Test {
     String result = (String) updateAmsEnvContent.invoke(upgradeCatalog220, oldContent);
     Assert.assertEquals(expectedContent, result);
   }
-  
+
   public void testUpdateKafkaConfigs() throws Exception {
     EasyMockSupport easyMockSupport = new EasyMockSupport();
     final AmbariManagementController mockAmbariManagementController = easyMockSupport.createNiceMock(AmbariManagementController.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
index 5eb3c14..bbae7a7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog221Test.java
@@ -20,14 +20,21 @@ package org.apache.ambari.server.upgrade;
 
 
 import com.google.inject.AbstractModule;
+import com.google.common.collect.Maps;
+import com.google.gson.Gson;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 import com.google.inject.Provider;
 import com.google.inject.persist.PersistService;
 import junit.framework.Assert;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
@@ -36,12 +43,15 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.stack.OsFamily;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
 import org.easymock.EasyMockSupport;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
 import javax.persistence.EntityManager;
+import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.util.Collections;
 import java.util.HashMap;
@@ -55,6 +65,10 @@ import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.capture;
+import static org.junit.Assert.assertTrue;
 
 public class UpgradeCatalog221Test {
   private Injector injector;
@@ -194,4 +208,205 @@ public class UpgradeCatalog221Test {
     easyMockSupport.verifyAll();
   }
 
+  @Test
+  public void testUpdateAmsHbaseSiteConfigs() throws Exception {
+
+    Map<String, String> clusterEnvProperties = new HashMap<String, String>();
+    Map<String, String> amsHbaseSecuritySite = new HashMap<String, String>();
+    Map<String, String> newPropertiesAmsHbaseSite = new HashMap<String, String>();
+
+    //Unsecure
+    amsHbaseSecuritySite.put("zookeeper.znode.parent", "/ams-hbase-unsecure");
+    newPropertiesAmsHbaseSite.put("zookeeper.znode.parent", "/ams-hbase-unsecure");
+    testAmsHbaseSiteUpdates(new HashMap<String, String>(),
+      newPropertiesAmsHbaseSite,
+      amsHbaseSecuritySite,
+      clusterEnvProperties);
+
+    //Secure
+    amsHbaseSecuritySite.put("zookeeper.znode.parent", "/ams-hbase-secure");
+    newPropertiesAmsHbaseSite.put("zookeeper.znode.parent", "/ams-hbase-secure");
+    testAmsHbaseSiteUpdates(new HashMap<String, String>(),
+      newPropertiesAmsHbaseSite,
+      amsHbaseSecuritySite,
+      clusterEnvProperties);
+
+    //Unsecure with empty value
+    clusterEnvProperties.put("security_enabled","false");
+    amsHbaseSecuritySite.put("zookeeper.znode.parent", "");
+    newPropertiesAmsHbaseSite.put("zookeeper.znode.parent", "/ams-hbase-unsecure");
+    testAmsHbaseSiteUpdates(new HashMap<String, String>(),
+      newPropertiesAmsHbaseSite,
+      amsHbaseSecuritySite,
+      clusterEnvProperties);
+
+    //Secure with /hbase value
+    clusterEnvProperties.put("security_enabled","true");
+    amsHbaseSecuritySite.put("zookeeper.znode.parent", "/hbase");
+    newPropertiesAmsHbaseSite.put("zookeeper.znode.parent", "/ams-hbase-secure");
+    testAmsHbaseSiteUpdates(new HashMap<String, String>(),
+      newPropertiesAmsHbaseSite,
+      amsHbaseSecuritySite,
+      clusterEnvProperties);
+
+  }
+
+  private void testAmsHbaseSiteUpdates(Map<String, String> oldPropertiesAmsHbaseSite,
+                                       Map<String, String> newPropertiesAmsHbaseSite,
+                                       Map<String, String> amsHbaseSecuritySiteProperties,
+                                       Map<String, String> clusterEnvProperties ) throws AmbariException {
+
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).once();
+
+    Config mockAmsHbaseSite = easyMockSupport.createNiceMock(Config.class);
+    expect(cluster.getDesiredConfigByType("ams-hbase-site")).andReturn(mockAmsHbaseSite).atLeastOnce();
+    expect(mockAmsHbaseSite.getProperties()).andReturn(oldPropertiesAmsHbaseSite).times(2);
+
+    Config mockAmsHbaseSecuritySite = easyMockSupport.createNiceMock(Config.class);
+    expect(cluster.getDesiredConfigByType("ams-hbase-security-site")).andReturn(mockAmsHbaseSecuritySite).anyTimes();
+    expect(mockAmsHbaseSecuritySite.getProperties()).andReturn(amsHbaseSecuritySiteProperties).anyTimes();
+
+    Config clusterEnv = easyMockSupport.createNiceMock(Config.class);
+    expect(cluster.getDesiredConfigByType("cluster-env")).andReturn(clusterEnv).anyTimes();
+    expect(clusterEnv.getProperties()).andReturn(clusterEnvProperties).anyTimes();
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class)).anyTimes();
+
+    replay(injector, clusters, mockAmsHbaseSite, mockAmsHbaseSecuritySite, clusterEnv, cluster);
+
+    AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+      .addMockedMethod("createConfiguration")
+      .addMockedMethod("getClusters", new Class[] { })
+      .addMockedMethod("createConfig")
+      .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+      .createNiceMock();
+
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+    expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(propertiesCapture), anyString(),
+      anyObject(Map.class))).andReturn(createNiceMock(Config.class)).anyTimes();
+
+    replay(controller, injector2);
+    new UpgradeCatalog221(injector2).updateAMSConfigs();
+    easyMockSupport.verifyAll();
+
+    Map<String, String> updatedProperties = propertiesCapture.getValue();
+    assertTrue(Maps.difference(newPropertiesAmsHbaseSite, updatedProperties).areEqual());
+  }
+
+  @Test
+  public void testUpdateAmsHbaseSecuritySiteConfigs() throws Exception{
+
+    Map<String, String> oldPropertiesAmsHbaseSecuritySite = new HashMap<String, String>() {
+      {
+        put("zookeeper.znode.parent", "/ams-hbase-secure");
+      }
+    };
+
+    Map<String, String> newPropertiesAmsHbaseSecuritySite = new HashMap<String, String>() {
+      {
+      }
+    };
+
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+    Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+    final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+    Config mockAmsHbaseSecuritySite = easyMockSupport.createNiceMock(Config.class);
+
+    expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+      put("normal", cluster);
+    }}).once();
+
+    expect(cluster.getDesiredConfigByType("ams-hbase-security-site")).andReturn(mockAmsHbaseSecuritySite).atLeastOnce();
+    expect(mockAmsHbaseSecuritySite.getProperties()).andReturn(oldPropertiesAmsHbaseSecuritySite).times(2);
+
+    Injector injector = easyMockSupport.createNiceMock(Injector.class);
+    expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+    expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class)).anyTimes();
+
+    replay(injector, clusters, mockAmsHbaseSecuritySite, cluster);
+
+    AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+      .addMockedMethod("createConfiguration")
+      .addMockedMethod("getClusters", new Class[] { })
+      .addMockedMethod("createConfig")
+      .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+      .createNiceMock();
+
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+    expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(propertiesCapture), anyString(),
+      anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+
+    replay(controller, injector2);
+    new UpgradeCatalog221(injector2).updateAMSConfigs();
+    easyMockSupport.verifyAll();
+
+    Map<String, String> updatedProperties = propertiesCapture.getValue();
+    assertTrue(Maps.difference(newPropertiesAmsHbaseSecuritySite, updatedProperties).areEqual());
+
+  }
+
+  @Test
+  public void testUpdateAmsHbaseEnvContent() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+    Method updateAmsHbaseEnvContent = UpgradeCatalog221.class.getDeclaredMethod("updateAmsHbaseEnvContent", String.class);
+    UpgradeCatalog221 upgradeCatalog221 = new UpgradeCatalog221(injector);
+    String oldContent = "some_content\n" +
+      "{% if security_enabled %}\n" +
+      "export HBASE_OPTS=\"$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}\"\n" +
+      "export HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}\"\n" +
+      "export HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}\"\n" +
+      "export HBASE_ZOOKEEPER_OPTS=\"$HBASE_ZOOKEEPER_OPTS -Djava.security.auth.login.config={{ams_zookeeper_jaas_config_file}} -Dzookeeper.sasl.client.username={{zk_servicename}}\"\n" +
+      "{% endif %}";
+
+    String expectedContent = "some_content\n" +
+      "{% if security_enabled %}\n" +
+      "export HBASE_OPTS=\"$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}\"\n" +
+      "export HBASE_MASTER_OPTS=\"$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}\"\n" +
+      "export HBASE_REGIONSERVER_OPTS=\"$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}\"\n" +
+      "export HBASE_ZOOKEEPER_OPTS=\"$HBASE_ZOOKEEPER_OPTS -Djava.security.auth.login.config={{ams_zookeeper_jaas_config_file}}\"\n" +
+      "{% endif %}";
+
+    String result = (String) updateAmsHbaseEnvContent.invoke(upgradeCatalog221, oldContent);
+    Assert.assertEquals(expectedContent, result);
+  }
+
+  @Test
+  public void testUpdateAmsEnvContent() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException
+  {
+    Method updateAmsEnvContent = UpgradeCatalog221.class.getDeclaredMethod("updateAmsEnvContent", String.class);
+    UpgradeCatalog221 upgradeCatalog221 = new UpgradeCatalog221(injector);
+    String oldContent = "some_content\n" +
+      "# AMS Collector options\n" +
+      "export AMS_COLLECTOR_OPTS=\"-Djava.library.path=/usr/lib/ams-hbase/lib/hadoop-native\"\n" +
+      "{% if security_enabled %}\n" +
+      "export AMS_COLLECTOR_OPTS=\"$AMS_COLLECTOR_OPTS -Djava.security.auth.login.config={{ams_collector_jaas_config_file}} " +
+      "-Dzookeeper.sasl.client.username={{zk_servicename}}\"\n" +
+      "{% endif %}";
+
+    String expectedContent = "some_content\n" +
+      "# AMS Collector options\n" +
+      "export AMS_COLLECTOR_OPTS=\"-Djava.library.path=/usr/lib/ams-hbase/lib/hadoop-native\"\n" +
+      "{% if security_enabled %}\n" +
+      "export AMS_COLLECTOR_OPTS=\"$AMS_COLLECTOR_OPTS -Djava.security.auth.login.config={{ams_collector_jaas_config_file}}\"\n" +
+      "{% endif %}";
+
+    String result = (String) updateAmsEnvContent.invoke(upgradeCatalog221, oldContent);
+    Assert.assertEquals(expectedContent, result);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
index ab4d006..307274f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -39,14 +39,6 @@ class TestMetricsCollector(RMFTestCase):
     self.assert_hbase_configure('master', distributed=True)
     self.assert_hbase_configure('regionserver', distributed=True)
     self.assert_ams('collector', distributed=True)
-    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf stop zookeeper',
-                              on_timeout = 'ls /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid`',
-                              timeout = 30,
-                              user = 'ams'
-    )
-    self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid',
-                              action = ['delete']
-    )
     self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf stop master',
                               on_timeout = 'ls /var/run/ambari-metrics-collector//hbase-ams-master.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid`',
                               timeout = 30,
@@ -66,10 +58,6 @@ class TestMetricsCollector(RMFTestCase):
     self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf --distributed stop',
                               user = 'ams'
     )
-    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf start zookeeper',
-                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid` >/dev/null 2>&1',
-                              user = 'ams'
-    )
     self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf start master',
                               not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-master.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid` >/dev/null 2>&1',
                               user = 'ams'
@@ -105,6 +93,17 @@ class TestMetricsCollector(RMFTestCase):
     self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase-tmp/zookeeper',
                               action = ['delete']
     )
+
+    self.assertResourceCalled('File', '/etc/ambari-metrics-collector/conf/core-site.xml',
+                                owner = 'ams',
+                                action = ['delete']
+                                )
+
+    self.assertResourceCalled('File', '/etc/ambari-metrics-collector/conf/hdfs-site.xml',
+                              owner = 'ams',
+                              action = ['delete']
+                              )
+
     self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf start',
                               user = 'ams'
     )
@@ -239,6 +238,18 @@ class TestMetricsCollector(RMFTestCase):
                               mode = 0775,
                               create_parents = True
     )
+
+    if not distributed:
+      self.assertResourceCalled('File', '/etc/ams-hbase/conf/core-site.xml',
+                                owner = 'ams',
+                                action = ['delete']
+                                )
+
+      self.assertResourceCalled('File', '/etc/ams-hbase/conf/hdfs-site.xml',
+                                owner = 'ams',
+                                action = ['delete']
+                                )
+
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
                               owner = 'ams',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 1ee3e7a..4e5458e 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -2024,6 +2024,7 @@ class TestHDP22StackAdvisor(TestCase):
           "hbase.regionserver.global.memstore.lowerLimit": "0.3",
           "hbase.regionserver.global.memstore.upperLimit": "0.35",
           "hbase.hregion.memstore.flush.size": "134217728",
+          "hbase.zookeeper.property.clientPort": "61181",
           "hfile.block.cache.size": "0.3",
           "hbase.rootdir": "file:///var/lib/ambari-metrics-collector/hbase",
           "hbase.tmp.dir": "/var/lib/ambari-metrics-collector/hbase-tmp",
@@ -2197,6 +2198,7 @@ class TestHDP22StackAdvisor(TestCase):
     services["configurations"]['ams-hbase-site']['properties']['hbase.rootdir'] = 'hdfs://host1/amshbase'
     services["configurations"]['ams-hbase-site']['properties']['hbase.cluster.distributed'] = 'true'
     expected['ams-hbase-site']['properties']['hbase.rootdir'] = 'hdfs://host1/amshbase'
+    expected['ams-hbase-site']['properties']['hbase.zookeeper.property.clientPort'] = '2181'
     expected['ams-hbase-env']['properties']['hbase_master_heapsize'] = '512'
     # services["configurations"]['ams-hbase-site']['properties']['dfs.client.read.shortcircuit'] = 'true'
     expected['ams-hbase-site']['properties']['dfs.client.read.shortcircuit'] = 'true'

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
index 61a7cdc..9fd16d3 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/ranger-admin-upgrade.json
@@ -206,8 +206,7 @@
             "hbase.security.authorization": "true", 
             "hbase.master.kerberos.principal": "amshbase/_HOST@EXAMPLE.COM", 
             "hbase.regionserver.keytab.file": "/etc/security/keytabs/ams-hbase.regionserver.keytab", 
-            "zookeeper.znode.parent": "/ams-hbase-secure", 
-            "hbase.regionserver.kerberos.principal": "amshbase/_HOST@EXAMPLE.COM", 
+            "hbase.regionserver.kerberos.principal": "amshbase/_HOST@EXAMPLE.COM",
             "hbase.myclient.keytab": "/etc/security/keytabs/ams.collector.keytab", 
             "ams.zookeeper.keytab": "/etc/security/keytabs/zk.service.ams.keytab", 
             "hbase.zookeeper.property.kerberos.removeRealmFromPrincipal": "true", 
@@ -586,7 +585,8 @@
             "hbase.client.scanner.caching": "10000", 
             "phoenix.sequence.saltBuckets": "2", 
             "hbase.hstore.flusher.count": "2", 
-            "hbase.zookeeper.peerport": "61288"
+            "hbase.zookeeper.peerport": "61288",
+            "zookeeper.znode.parent": "/ams-hbase-secure"
         }, 
         "yarn-env": {
             "yarn_pid_dir_prefix": "/var/run/hadoop-yarn", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json b/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
index bba7542..a354d61 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/ranger-usersync-upgrade.json
@@ -205,8 +205,7 @@
             "hbase.security.authorization": "true", 
             "hbase.master.kerberos.principal": "amshbase/_HOST@EXAMPLE.COM", 
             "hbase.regionserver.keytab.file": "/etc/security/keytabs/ams-hbase.regionserver.keytab", 
-            "zookeeper.znode.parent": "/ams-hbase-secure", 
-            "hbase.regionserver.kerberos.principal": "amshbase/_HOST@EXAMPLE.COM", 
+            "hbase.regionserver.kerberos.principal": "amshbase/_HOST@EXAMPLE.COM",
             "hbase.myclient.keytab": "/etc/security/keytabs/ams.collector.keytab", 
             "ams.zookeeper.keytab": "/etc/security/keytabs/zk.service.ams.keytab", 
             "hbase.zookeeper.property.kerberos.removeRealmFromPrincipal": "true", 
@@ -585,7 +584,8 @@
             "hbase.client.scanner.caching": "10000", 
             "phoenix.sequence.saltBuckets": "2", 
             "hbase.hstore.flusher.count": "2", 
-            "hbase.zookeeper.peerport": "61288"
+            "hbase.zookeeper.peerport": "61288",
+            "zookeeper.znode.parent": "/ams-hbase-secure"
         }, 
         "yarn-env": {
             "yarn_pid_dir_prefix": "/var/run/hadoop-yarn", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
index 3b4dff4..09d1d0c 100644
--- a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_1_3.json
@@ -1203,13 +1203,18 @@
           "hbase.security.authentication": "kerberos",
           "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.AccessController",
           "hbase.security.authorization": "true",
-          "zookeeper.znode.parent": "/ams-hbase-secure",
           "hbase.zookeeper.property.kerberos.removeRealmFromPrincipal": "true",
           "hbase.zookeeper.property.jaasLoginRenew": "3600000",
           "hbase.zookeeper.property.authProvider.1": "org.apache.zookeeper.server.auth.SASLAuthenticationProvider",
           "hbase.zookeeper.property.kerberos.removeHostFromPrincipal": "true"
         }
-      }],
+      },
+        {
+          "ams-hbase-site": {
+            "zookeeper.znode.parent": "/ams-hbase-secure"
+          }
+        }
+      ],
       "name": "METRICS_COLLECTOR"
     }],
     "identities": [{

http://git-wip-us.apache.org/repos/asf/ambari/blob/b613c336/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json b/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
index ca9ac3c..d853f24 100644
--- a/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
+++ b/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
@@ -172,12 +172,16 @@
                     "hbase.security.authentication" : "kerberos",
                     "hbase.coprocessor.region.classes" : "org.apache.hadoop.hbase.security.token.TokenProvider,org.apache.hadoop.hbase.security.access.AccessController",
                     "hbase.security.authorization" : "true",
-                    "zookeeper.znode.parent" : "/ams-hbase-secure",
                     "hbase.zookeeper.property.kerberos.removeRealmFromPrincipal" : "true",
                     "hbase.zookeeper.property.jaasLoginRenew" : "3600000",
                     "hbase.zookeeper.property.authProvider.1" : "org.apache.zookeeper.server.auth.SASLAuthenticationProvider",
                     "hbase.zookeeper.property.kerberos.removeHostFromPrincipal" : "true"
                   }
+                },
+                {
+                  "ams-hbase-site": {
+                    "zookeeper.znode.parent": "/ams-hbase-secure"
+                  }
                 }
               ],
               "name" : "METRICS_COLLECTOR"


Mime
View raw message