ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From swa...@apache.org
Subject ambari git commit: AMBARI-14278 : On a cluster with both AMS and Storm, stack advisor throws error in validateStormConfigurations after upgrade if the metrics.reporter.register property is not present. (Aravindan Vijayan via swagle)
Date Thu, 10 Dec 2015 19:26:36 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 864adc7c4 -> 1f631152d


AMBARI-14278 : On a cluster with both AMS and Storm, stack advisor throws error in validateStormConfigurations
after upgrade if the metrics.reporter.register property is not present. (Aravindan Vijayan
via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1f631152
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1f631152
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1f631152

Branch: refs/heads/branch-2.2
Commit: 1f631152dc82d8abfaed704e6731fb655a45456c
Parents: 864adc7
Author: Siddharth Wagle <swagle@hortonworks.com>
Authored: Thu Dec 10 11:26:16 2015 -0800
Committer: Siddharth Wagle <swagle@hortonworks.com>
Committed: Thu Dec 10 11:26:16 2015 -0800

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog213.java       | 23 +++++++++++++++++---
 .../0.1.0/package/scripts/service_check.py      | 12 +++++-----
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  2 +-
 .../server/upgrade/UpgradeCatalog213Test.java   |  3 ++-
 4 files changed, 29 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1f631152/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
index f80f7b9..2ee63ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog213.java
@@ -36,6 +36,7 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.RepositoryVersionState;
 import org.apache.ambari.server.state.SecurityType;
+import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.alert.SourceType;
@@ -97,6 +98,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
   private static final String AMS_HBASE_SITE_NORMALIZER_ENABLED_PROPERTY = "hbase.normalizer.enabled";
   private static final String AMS_HBASE_SITE_NORMALIZER_PERIOD_PROPERTY = "hbase.normalizer.period";
   private static final String AMS_HBASE_SITE_NORMALIZER_CLASS_PROPERTY = "hbase.master.normalizer.class";
+  private static final String TIMELINE_METRICS_HBASE_FIFO_COMPACTION_ENABLED = "timeline.metrics.hbase.fifo.compaction.enabled";
   private static final String HBASE_ENV_CONFIG = "hbase-env";
   private static final String FLUME_ENV_CONFIG = "flume-env";
   private static final String HIVE_SITE_CONFIG = "hive-site";
@@ -106,6 +108,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
   private static final String ZOOKEEPER_LOG4J_CONFIG = "zookeeper-log4j";
   private static final String HADOOP_ENV_CONFIG = "hadoop-env";
   private static final String NIMBS_MONITOR_FREQ_SECS_PROPERTY = "nimbus.monitor.freq.secs";
+  private static final String STORM_METRICS_REPORTER = "metrics.reporter.register";
   private static final String HIVE_SERVER2_OPERATION_LOG_LOCATION_PROPERTY = "hive.server2.logging.operation.log.location";
   private static final String CONTENT_PROPERTY = "content";
   private static final String HADOOP_ENV_CONTENT_TO_APPEND = "\n{% if is_datanode_max_locked_memory_set
%}\n" +
@@ -914,11 +917,25 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
     for (final Cluster cluster : getCheckedClusterMap(ambariManagementController.getClusters()).values())
{
       Config stormSiteProps = cluster.getDesiredConfigByType(STORM_SITE);
       if (stormSiteProps != null) {
+        Map<String, String> updates = new HashMap<>();
+
         String nimbusMonitorFreqSecs = stormSiteProps.getProperties().get(NIMBS_MONITOR_FREQ_SECS_PROPERTY);
         if (nimbusMonitorFreqSecs != null && nimbusMonitorFreqSecs.equals("10"))
{
-          Map<String, String> updates = Collections.singletonMap(NIMBS_MONITOR_FREQ_SECS_PROPERTY,
"120");
-          updateConfigurationPropertiesForCluster(cluster, STORM_SITE, updates, true, false);
+          updates.put(NIMBS_MONITOR_FREQ_SECS_PROPERTY, "120");
+        }
+
+        Service amsService = null;
+        try {
+          amsService = cluster.getService("AMBARI_METRICS");
+        } catch(AmbariException ambariException) {
+          LOG.info("AMBARI_METRICS service not found in cluster while updating storm-site
properties");
+        }
+        String metricsReporter = stormSiteProps.getProperties().get(STORM_METRICS_REPORTER);
+        if (amsService != null && StringUtils.isEmpty(metricsReporter)) {
+          updates.put(STORM_METRICS_REPORTER, "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter");
         }
+
+        updateConfigurationPropertiesForCluster(cluster, STORM_SITE, updates, true, false);
       }
     }
   }
@@ -1067,7 +1084,7 @@ public class UpgradeCatalog213 extends AbstractUpgradeCatalog {
             newProperties.put("timeline.metrics.cluster.aggregator.second.disabled", String.valueOf(false));
 
             //Add compaction policy property
-            newProperties.put("hbase.fifo.compaction.policy.enabled", String.valueOf(true));
+            newProperties.put(TIMELINE_METRICS_HBASE_FIFO_COMPACTION_ENABLED, String.valueOf(true));
 
             updateConfigurationPropertiesForCluster(cluster, AMS_SITE, newProperties, true,
true);
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f631152/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
index 60eb286..f19c823 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py
@@ -38,7 +38,7 @@ import socket
 class AMSServiceCheck(Script):
   AMS_METRICS_POST_URL = "/ws/v1/timeline/metrics/"
   AMS_METRICS_GET_URL = "/ws/v1/timeline/metrics?%s"
-  AMS_CONNECT_TRIES = 40
+  AMS_CONNECT_TRIES = 30
   AMS_CONNECT_TIMEOUT = 15
 
   @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
@@ -67,15 +67,15 @@ class AMSServiceCheck(Script):
     env.set_params(params)
 
     random_value1 = random.random()
-    current_time = int(time.time()) * 1000
-    metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1,
-                           current_time=current_time).get_content()
-    Logger.info("Generated metrics:\n%s" % metric_json)
-
     headers = {"Content-type": "application/json"}
 
     for i in xrange(0, self.AMS_CONNECT_TRIES):
       try:
+        current_time = int(time.time()) * 1000
+        metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1,
+                           current_time=current_time).get_content()
+        Logger.info("Generated metrics:\n%s" % metric_json)
+
         Logger.info("Connecting (POST) to %s:%s%s" % (params.metric_collector_host,
                                                       params.metric_collector_port,
                                                       self.AMS_METRICS_POST_URL))

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f631152/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 946deed..a723f75 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -908,7 +908,7 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
     validationItems = []
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     # Storm AMS integration
-    if 'AMBARI_METRICS' in servicesList and \
+    if 'AMBARI_METRICS' in servicesList and "metrics.reporter.register" in properties and
\
       "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter" not in properties.get("metrics.reporter.register"):
 
       validationItems.append({"config-name": 'metrics.reporter.register',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1f631152/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
index d83f0a8..425eb1d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog213Test.java
@@ -590,6 +590,7 @@ public class UpgradeCatalog213Test {
     final Map<String, String> propertiesStormSite = new HashMap<String, String>()
{
       {
         put("nimbus.monitor.freq.secs", "10");
+        put("metrics.reporter.register", "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter");
       }
     };
 
@@ -748,7 +749,7 @@ public class UpgradeCatalog213Test {
         put("timeline.metrics.cluster.aggregator.minute.ttl", String.valueOf(7776000));
         put("timeline.metrics.cluster.aggregator.second.checkpointCutOffMultiplier", String.valueOf(2));
         put("timeline.metrics.cluster.aggregator.second.disabled", String.valueOf(false));
-        put("hbase.fifo.compaction.policy.enabled", String.valueOf(true));
+        put("timeline.metrics.hbase.fifo.compaction.enabled", String.valueOf(true));
       }
     };
     EasyMockSupport easyMockSupport = new EasyMockSupport();


Mime
View raw message