ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From avija...@apache.org
Subject [5/5] ambari git commit: Revert "AMBARI-15902. Refactor Metadata manager for supporting distributed collector. (swagle)"
Date Thu, 19 May 2016 20:59:44 GMT
Revert "AMBARI-15902. Refactor Metadata manager for supporting distributed collector. (swagle)"

This reverts commit e3c9816403ebef2f777d48405fb96a43ec46462c.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bc994b73
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bc994b73
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bc994b73

Branch: refs/heads/branch-2.4
Commit: bc994b73b050cb45f27019149cb825c880926fef
Parents: 9987b97
Author: Aravindan Vijayan <avijayan@hortonworks.com>
Authored: Wed May 18 14:02:27 2016 -0700
Committer: Aravindan Vijayan <avijayan@hortonworks.com>
Committed: Thu May 19 13:44:25 2016 -0700

----------------------------------------------------------------------
 .../timeline/TimelineMetricConfiguration.java   |   3 +
 .../TimelineMetricMetadataManager.java          |  64 +++-------
 .../discovery/TimelineMetricMetadataSync.java   |  72 +-----------
 .../timeline/discovery/TestMetadataManager.java |  19 +--
 .../timeline/discovery/TestMetadataSync.java    | 116 -------------------
 .../server/upgrade/UpgradeCatalog240.java       |  21 ++--
 .../0.1.0/configuration/ams-site.xml            |   8 --
 .../templates/hadoop-metrics2.properties.j2     |   4 +
 8 files changed, 34 insertions(+), 273 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index 683e5d4..5a04ad2 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -228,6 +228,7 @@ public class TimelineMetricConfiguration {
   public static final String TIMELINE_METRICS_TABLES_DURABILITY =
     "timeline.metrics.tables.durability";
 
+<<<<<<< HEAD
   public static final String TIMELINE_METRIC_METADATA_FILTERS =
     "timeline.metrics.service.metadata.filters";
 
@@ -237,6 +238,8 @@ public class TimelineMetricConfiguration {
   public static final String DEFAULT_TOPN_HOSTS_LIMIT =
     "timeline.metrics.default.topn.hosts.limit";
 
+=======
+>>>>>>> parent of e3c9816... AMBARI-15902. Refactor Metadata manager
for supporting distributed collector. (swagle)
   public static final String HOST_APP_ID = "HOST";
 
   public static final String DEFAULT_INSTANCE_PORT = "12001";

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
index 3d28c27..7790501 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataManager.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -26,11 +25,8 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
 import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -38,10 +34,13 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
+<<<<<<< HEAD
+=======
+
+>>>>>>> parent of e3c9816... AMBARI-15902. Refactor Metadata manager
for supporting distributed collector. (swagle)
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_METRIC_METADATA_MGMT;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRIC_METADATA_FILTERS;
 
 public class TimelineMetricMetadataManager {
   private static final Log LOG = LogFactory.getLog(TimelineMetricMetadataManager.class);
@@ -59,19 +58,10 @@ public class TimelineMetricMetadataManager {
   private PhoenixHBaseAccessor hBaseAccessor;
   private Configuration metricsConf;
 
-  TimelineMetricMetadataSync metricMetadataSync;
-  // Filter metrics names matching given patterns, from metadata
-  final List<String> metricNameFilters = new ArrayList<>();
-
   public TimelineMetricMetadataManager(PhoenixHBaseAccessor hBaseAccessor,
                                        Configuration metricsConf) {
     this.hBaseAccessor = hBaseAccessor;
     this.metricsConf = metricsConf;
-
-    String patternStrings = metricsConf.get(TIMELINE_METRIC_METADATA_FILTERS);
-    if (!StringUtils.isEmpty(patternStrings)) {
-      metricNameFilters.addAll(Arrays.asList(patternStrings.split(",")));
-    }
   }
 
   /**
@@ -81,21 +71,21 @@ public class TimelineMetricMetadataManager {
     if (metricsConf.getBoolean(DISABLE_METRIC_METADATA_MGMT, false)) {
       isDisabled = true;
     } else {
-      metricMetadataSync = new TimelineMetricMetadataSync(this);
       // Schedule the executor to sync to store
-      executorService.scheduleWithFixedDelay(metricMetadataSync,
+      executorService.scheduleWithFixedDelay(new TimelineMetricMetadataSync(this),
         metricsConf.getInt(METRICS_METADATA_SYNC_INIT_DELAY, 120), // 2 minutes
         metricsConf.getInt(METRICS_METADATA_SYNC_SCHEDULE_DELAY, 300), // 5 minutes
         TimeUnit.SECONDS);
       // Read from store and initialize map
       try {
-        Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata = getMetadataFromStore();
+        Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
+          hBaseAccessor.getTimelineMetricMetadata();
 
         LOG.info("Retrieved " + metadata.size() + ", metadata objects from store.");
         // Store in the cache
         METADATA_CACHE.putAll(metadata);
 
-        Map<String, Set<String>> hostedAppData = getHostedAppsFromStore();
+        Map<String, Set<String>> hostedAppData = hBaseAccessor.getHostedAppsMetadata();
 
         LOG.info("Retrieved " + hostedAppData.size() + " host objects from store.");
         HOSTED_APPS_MAP.putAll(hostedAppData);
@@ -123,26 +113,10 @@ public class TimelineMetricMetadataManager {
   }
 
   /**
-   * Test metric name for valid patterns and return true/false
-   */
-  boolean skipMetadataCache(String metricName) {
-    for (String pattern : metricNameFilters) {
-      if (metricName.contains(pattern)) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  /**
    * Update value in metadata cache
    * @param metadata @TimelineMetricMetadata
    */
   public void putIfModifiedTimelineMetricMetadata(TimelineMetricMetadata metadata) {
-    if (skipMetadataCache(metadata.getMetricName())) {
-      return;
-    }
-
     TimelineMetricMetadataKey key = new TimelineMetricMetadataKey(
       metadata.getMetricName(), metadata.getAppId());
 
@@ -200,27 +174,15 @@ public class TimelineMetricMetadataManager {
     );
   }
 
-  public boolean isDisabled() {
-    return isDisabled;
-  }
-
-  boolean isDistributedModeEnabled() {
-    return metricsConf.get("timeline.metrics.service.operation.mode", "").equals("distributed");
-  }
-
-  /**
-   * Fetch metrics metadata from store
-   * @throws SQLException
-   */
-  Map<TimelineMetricMetadataKey, TimelineMetricMetadata> getMetadataFromStore() throws
SQLException {
-    return hBaseAccessor.getTimelineMetricMetadata();
-  }
-
   /**
    * Fetch hosted apps from store
    * @throws SQLException
    */
-  Map<String, Set<String>> getHostedAppsFromStore() throws SQLException {
+  Map<String, Set<String>> getPersistedHostedAppsData() throws SQLException {
     return hBaseAccessor.getHostedAppsMetadata();
   }
+
+  public boolean isDisabled() {
+    return isDisabled;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
index 25b525a..54ea200 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TimelineMetricMetadataSync.java
@@ -41,22 +41,6 @@ public class TimelineMetricMetadataSync implements Runnable {
 
   @Override
   public void run() {
-    LOG.debug("Persisting metric metadata...");
-    persistMetricMetadata();
-    LOG.debug("Persisting hosted apps metadata...");
-    persistHostAppsMetadata();
-    if (cacheManager.isDistributedModeEnabled()) {
-      LOG.debug("Refreshing metric metadata...");
-      refreshMetricMetadata();
-      LOG.debug("Refreshing hosted apps metadata...");
-      refreshHostAppsMetadata();
-    }
-  }
-
-  /**
-   * Find metrics not persisted to store and persist them
-   */
-  private void persistMetricMetadata() {
     List<TimelineMetricMetadata> metadataToPersist = new ArrayList<>();
     // Find all entries to persist
     for (TimelineMetricMetadata metadata : cacheManager.getMetadataCache().values()) {
@@ -86,38 +70,11 @@ public class TimelineMetricMetadataSync implements Runnable {
         cacheManager.getMetadataCache().put(key, metadata);
       }
     }
-  }
-
-  /**
-   * Read all metric metadata and update cached values - HA mode
-   */
-  private void refreshMetricMetadata() {
-    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadataFromStore = null;
-    try {
-      metadataFromStore = cacheManager.getMetadataFromStore();
-    } catch (SQLException e) {
-      LOG.warn("Error refreshing metadata from store.", e);
-    }
-    if (metadataFromStore != null) {
-      Map<TimelineMetricMetadataKey, TimelineMetricMetadata> cachedMetadata =
-        cacheManager.getMetadataCache();
-
-      for (Map.Entry<TimelineMetricMetadataKey, TimelineMetricMetadata> metadataEntry
: metadataFromStore.entrySet()) {
-        if (!cachedMetadata.containsKey(metadataEntry.getKey())) {
-          cachedMetadata.put(metadataEntry.getKey(), metadataEntry.getValue());
-        }
-      }
-    }
-  }
-
-  /**
-   * Sync hosted apps data if needed
-   */
-  private void persistHostAppsMetadata() {
+    // Sync hosted apps data is needed
     if (cacheManager.syncHostedAppsMetadata()) {
       Map<String, Set<String>> persistedData = null;
       try {
-        persistedData = cacheManager.getHostedAppsFromStore();
+        persistedData = cacheManager.getPersistedHostedAppsData();
       } catch (SQLException e) {
         LOG.warn("Failed on fetching hosted apps data from store.", e);
         return; // Something wrong with store
@@ -129,8 +86,8 @@ public class TimelineMetricMetadataSync implements Runnable {
         for (Map.Entry<String, Set<String>> cacheEntry : cachedData.entrySet())
{
           // No persistence / stale data in store
           if (persistedData == null || persistedData.isEmpty() ||
-            !persistedData.containsKey(cacheEntry.getKey()) ||
-            !persistedData.get(cacheEntry.getKey()).containsAll(cacheEntry.getValue())) {
+              !persistedData.containsKey(cacheEntry.getKey()) ||
+              !persistedData.get(cacheEntry.getKey()).containsAll(cacheEntry.getValue()))
{
             dataToSync.put(cacheEntry.getKey(), cacheEntry.getValue());
           }
         }
@@ -145,25 +102,4 @@ public class TimelineMetricMetadataSync implements Runnable {
 
     }
   }
-
-  /**
-   * Read all hosted apps metadata and update cached values - HA
-   */
-  private void refreshHostAppsMetadata() {
-    Map<String, Set<String>> hostedAppsDataFromStore = null;
-    try {
-      hostedAppsDataFromStore = cacheManager.getHostedAppsFromStore();
-    } catch (SQLException e) {
-      LOG.warn("Error refreshing metadata from store.", e);
-    }
-    if (hostedAppsDataFromStore != null) {
-      Map<String, Set<String>> cachedData = cacheManager.getHostedAppsCache();
-
-      for (Map.Entry<String, Set<String>> storeEntry : hostedAppsDataFromStore.entrySet())
{
-        if (!cachedData.containsKey(storeEntry.getKey())) {
-          cachedData.put(storeEntry.getKey(), storeEntry.getValue());
-        }
-      }
-    }
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
index 06c71c5..92e4dfc 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
@@ -23,30 +23,15 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractMiniHBaseClusterTest;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
-import org.easymock.EasyMock;
 import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
 
 import java.io.IOException;
 import java.sql.SQLException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 
-import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.GAUGE;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_INIT_DELAY;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_METADATA_SYNC_SCHEDULE_DELAY;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-
 public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
   TimelineMetricMetadataManager metadataManager;
 
@@ -115,11 +100,13 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest
{
     Assert.assertEquals(value2, savedData.get(key2));
 
     Map<String, Set<String>> cachedHostData = metadataManager.getHostedAppsCache();
-    Map<String, Set<String>> savedHostData = metadataManager.getHostedAppsFromStore();
+    Map<String, Set<String>> savedHostData = metadataManager.getPersistedHostedAppsData();
     Assert.assertEquals(cachedData.size(), savedData.size());
     Assert.assertEquals("dummy_app1", cachedHostData.get("dummy_host1").iterator().next());
     Assert.assertEquals("dummy_app2", cachedHostData.get("dummy_host2").iterator().next());
     Assert.assertEquals("dummy_app1", savedHostData.get("dummy_host1").iterator().next());
     Assert.assertEquals("dummy_app2", savedHostData.get("dummy_host2").iterator().next());
   }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
deleted file mode 100644
index 78f2bfe..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
-
-import junit.framework.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
-import org.junit.Test;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import static org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata.MetricType.GAUGE;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRIC_METADATA_FILTERS;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
-
-public class TestMetadataSync {
-  @Test
-  public void testRefreshMetadataOnWrite() throws Exception {
-    Configuration configuration = createNiceMock(Configuration.class);
-    PhoenixHBaseAccessor hBaseAccessor = createNiceMock(PhoenixHBaseAccessor.class);
-
-    final TimelineMetricMetadata testMetadata1 = new TimelineMetricMetadata(
-      "m1", "a1", "", GAUGE.name(), System.currentTimeMillis(), true);
-    final TimelineMetricMetadata testMetadata2 = new TimelineMetricMetadata(
-      "m2", "a2", "", GAUGE.name(), System.currentTimeMillis(), true);
-
-    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
-      new HashMap<TimelineMetricMetadataKey, TimelineMetricMetadata>() {{
-        put(new TimelineMetricMetadataKey("m1", "a1"), testMetadata1);
-        put(new TimelineMetricMetadataKey("m2", "a2"), testMetadata2);
-      }};
-
-    Map<String, Set<String>> hostedApps = new HashMap<String, Set<String>>()
{{
-      put("h1", new HashSet<>(Arrays.asList("a1")));
-      put("h2", new HashSet<>(Arrays.asList("a1", "a2")));
-    }};
-
-    expect(configuration.get("timeline.metrics.service.operation.mode", "")).andReturn("distributed");
-    expect(hBaseAccessor.getTimelineMetricMetadata()).andReturn(metadata);
-    expect(hBaseAccessor.getHostedAppsMetadata()).andReturn(hostedApps);
-
-    replay(configuration, hBaseAccessor);
-
-    TimelineMetricMetadataManager metadataManager = new
-      TimelineMetricMetadataManager(hBaseAccessor, configuration);
-
-    metadataManager.metricMetadataSync = new TimelineMetricMetadataSync(metadataManager);
-
-    metadataManager.metricMetadataSync.run();
-
-    verify(configuration, hBaseAccessor);
-
-    metadata = metadataManager.getMetadataCache();
-    Assert.assertEquals(2, metadata.size());
-    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m1", "a1")));
-    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m2", "a2")));
-
-    hostedApps = metadataManager.getHostedAppsCache();
-    Assert.assertEquals(2, hostedApps.size());
-    Assert.assertEquals(1, hostedApps.get("h1").size());
-    Assert.assertEquals(2, hostedApps.get("h2").size());
-  }
-
-  @Test
-  public void testFilterByRegexOnMetricName() throws Exception {
-    Configuration configuration = createNiceMock(Configuration.class);
-    PhoenixHBaseAccessor hBaseAccessor = createNiceMock(PhoenixHBaseAccessor.class);
-
-    TimelineMetricMetadata metadata1 = new TimelineMetricMetadata(
-      "xxx.abc.yyy", "a1", "", GAUGE.name(), System.currentTimeMillis(), true);
-    TimelineMetricMetadata metadata2 = new TimelineMetricMetadata(
-      "xxx.cdef.yyy", "a2", "", GAUGE.name(), System.currentTimeMillis(), true);
-    TimelineMetricMetadata metadata3 = new TimelineMetricMetadata(
-      "xxx.pqr.zzz", "a3", "", GAUGE.name(), System.currentTimeMillis(), true);
-
-    expect(configuration.get(TIMELINE_METRIC_METADATA_FILTERS)).andReturn("abc,cde");
-
-    replay(configuration, hBaseAccessor);
-
-    TimelineMetricMetadataManager metadataManager = new
-      TimelineMetricMetadataManager(hBaseAccessor, configuration);
-
-    metadataManager.putIfModifiedTimelineMetricMetadata(metadata1);
-    metadataManager.putIfModifiedTimelineMetricMetadata(metadata2);
-    metadataManager.putIfModifiedTimelineMetricMetadata(metadata3);
-
-    verify(configuration, hBaseAccessor);
-
-    Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
-      metadataManager.getMetadataCache();
-
-    Assert.assertEquals(1, metadata.size());
-    Assert.assertEquals("xxx.pqr.zzz", metadata.keySet().iterator().next().getMetricName());
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 41f538e..3f21771 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -1160,10 +1160,10 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
    */
   protected void updateAlertCurrentTable() throws SQLException {
     dbAccessor.addColumn(ALERT_CURRENT_TABLE,
-      new DBColumnInfo(ALERT_CURRENT_OCCURRENCES_COLUMN, Long.class, null, 1, false));
+            new DBColumnInfo(ALERT_CURRENT_OCCURRENCES_COLUMN, Long.class, null, 1, false));
 
     dbAccessor.addColumn(ALERT_CURRENT_TABLE, new DBColumnInfo(ALERT_CURRENT_FIRMNESS_COLUMN,
-      String.class, 255, AlertFirmness.HARD.name(), false));
+            String.class, 255, AlertFirmness.HARD.name(), false));
   }
 
   /**
@@ -1188,15 +1188,15 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     dbAccessor.executeUpdate(String.format(updateStatement,
         2, PermissionEntity.CLUSTER_ADMINISTRATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-      3, PermissionEntity.CLUSTER_OPERATOR_PERMISSION_NAME));
+            3, PermissionEntity.CLUSTER_OPERATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-      4, PermissionEntity.SERVICE_ADMINISTRATOR_PERMISSION_NAME));
+            4, PermissionEntity.SERVICE_ADMINISTRATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-      5, PermissionEntity.SERVICE_OPERATOR_PERMISSION_NAME));
+            5, PermissionEntity.SERVICE_OPERATOR_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-      6, PermissionEntity.CLUSTER_USER_PERMISSION_NAME));
+            6, PermissionEntity.CLUSTER_USER_PERMISSION_NAME));
     dbAccessor.executeUpdate(String.format(updateStatement,
-      7, PermissionEntity.VIEW_USER_PERMISSION_NAME));
+            7, PermissionEntity.VIEW_USER_PERMISSION_NAME));
   }
 
   /**
@@ -1584,13 +1584,6 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
           Config amsSite = cluster.getDesiredConfigByType(AMS_SITE);
           if (amsSite != null) {
-            String metadataFilters = amsSite.getProperties().get("timeline.metrics.service.metadata.filters");
-            if (StringUtils.isEmpty(metadataFilters) ||
-                !metadataFilters.contains("ContainerResource")) {
-              updateConfigurationProperties("ams-site",
-                Collections.singletonMap("timeline.metrics.service.metadata.filters", "ContainerResource"),
true, false);
-            }
-
             Map<String, String> amsSiteProperties = amsSite.getProperties();
             Map<String, String> newProperties = new HashMap<>();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
index a7fd9c8..e5758bf 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-site.xml
@@ -577,13 +577,5 @@
       Enable Linear interpolation for missing slices of data, while aggregating.
     </description>
   </property>
-  <property>
-    <name>timeline.metrics.service.metadata.filters</name>
-    <value>ContainerResource</value>
-    <description>
-      Commas separated list of regular expressions that match metric names
-      which prevents certain metrics from ending up in metadata cache.
-    </description>
-  </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc994b73/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
index fcd9b23..6f32000 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hadoop-metrics2.properties.j2
@@ -101,4 +101,8 @@ namenode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}
 namenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}
 {% endif %}
 
+# Switch off container metrics
+*.source.filter.class=org.apache.hadoop.metrics2.filter.GlobFilter
+nodemanager.*.source.filter.exclude=*ContainerResource*
+
 {% endif %}


Mime
View raw message