ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From avija...@apache.org
Subject [2/5] ambari git commit: AMBARI-21214 : Use a uuid vs long row key for metrics in AMS schema. (avijayan)
Date Mon, 19 Jun 2017 18:25:18 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/041e4e9a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
index 3adf770..ca1fc20 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataManager.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery;
 
+import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 
@@ -27,7 +28,7 @@ import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractMiniHBaseClusterTest;
 import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricsFilter;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
 import org.easymock.EasyMock;
 import org.junit.Before;
 import org.junit.Test;
@@ -35,6 +36,10 @@ import org.junit.Test;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
@@ -44,8 +49,7 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest {
 
   @Before
   public void insertDummyRecords() throws IOException, SQLException, URISyntaxException {
-    // Initialize new manager
-    metadataManager = new TimelineMetricMetadataManager(new Configuration(), hdb);
+
     final long now = System.currentTimeMillis();
 
     TimelineMetrics timelineMetrics = new TimelineMetrics();
@@ -77,29 +81,13 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest
{
     }});
     timelineMetrics.getMetrics().add(metric2);
 
+    Configuration metricsConf = createNiceMock(Configuration.class);
+    expect(metricsConf.get("timeline.metrics.service.operation.mode")).andReturn("distributed").anyTimes();
+    replay(metricsConf);
 
-    //Test whitelisting
-    TimelineMetric metric3 = new TimelineMetric();
-    metric3.setMetricName("dummy_metric3");
-    metric3.setHostName("dummy_host3");
-    metric3.setTimestamp(now);
-    metric3.setStartTime(now - 1000);
-    metric3.setAppId("dummy_app3");
-    metric3.setType("Integer");
-    metric3.setMetricValues(new TreeMap<Long, Double>() {{
-      put(now - 100, 1.0);
-      put(now - 200, 2.0);
-      put(now - 300, 3.0);
-    }});
-    timelineMetrics.getMetrics().add(metric3);
-
-    Configuration metricsConf = new Configuration();
-    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
-    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
-    replay(configuration);
-    TimelineMetricsFilter.initializeMetricFilter(configuration);
-    TimelineMetricsFilter.addToWhitelist("dummy_metric1");
-    TimelineMetricsFilter.addToWhitelist("dummy_metric2");
+    // Initialize new manager
+    metadataManager = new TimelineMetricMetadataManager(metricsConf, hdb);
+    hdb.setMetadataInstance(metadataManager);
 
     hdb.insertMetricRecordsWithMetadata(metadataManager, timelineMetrics, true);
   }
@@ -109,20 +97,16 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest
{
     Map<TimelineMetricMetadataKey, TimelineMetricMetadata> cachedData = metadataManager.getMetadataCache();
 
     Assert.assertNotNull(cachedData);
-    Assert.assertEquals(3, cachedData.size());
-    TimelineMetricMetadataKey key1 = new TimelineMetricMetadataKey("dummy_metric1", "dummy_app1");
-    TimelineMetricMetadataKey key2 = new TimelineMetricMetadataKey("dummy_metric2", "dummy_app2");
-    TimelineMetricMetadataKey key3 = new TimelineMetricMetadataKey("dummy_metric3", "dummy_app3");
+    Assert.assertEquals(2, cachedData.size());
+    TimelineMetricMetadataKey key1 = new TimelineMetricMetadataKey("dummy_metric1", "dummy_app1",
null);
+    TimelineMetricMetadataKey key2 = new TimelineMetricMetadataKey("dummy_metric2", "dummy_app2",
"instance2");
     TimelineMetricMetadata value1 = new TimelineMetricMetadata("dummy_metric1",
-      "dummy_app1", "Integer", null, 1L, true, false);
+      "dummy_app1", null, null, "Integer", 1L, true, true);
     TimelineMetricMetadata value2 = new TimelineMetricMetadata("dummy_metric2",
-      "dummy_app2", "Integer", null, 1L, true, false);
-    TimelineMetricMetadata value3 = new TimelineMetricMetadata("dummy_metric3",
-      "dummy_app3", "Integer", null, 1L, true, true);
+      "dummy_app2", "instance2", null, "Integer", 1L, true, true);
 
     Assert.assertEquals(value1, cachedData.get(key1));
     Assert.assertEquals(value2, cachedData.get(key2));
-    Assert.assertEquals(value3, cachedData.get(key3));
 
     TimelineMetricMetadataSync syncRunnable = new TimelineMetricMetadataSync(metadataManager);
     syncRunnable.run();
@@ -131,26 +115,125 @@ public class TestMetadataManager extends AbstractMiniHBaseClusterTest
{
       hdb.getTimelineMetricMetadata();
 
     Assert.assertNotNull(savedData);
-    Assert.assertEquals(3, savedData.size());
+    Assert.assertEquals(2, savedData.size());
     Assert.assertEquals(value1, savedData.get(key1));
     Assert.assertEquals(value2, savedData.get(key2));
-    Assert.assertEquals(value3, savedData.get(key3));
 
-    Map<String, Set<String>> cachedHostData = metadataManager.getHostedAppsCache();
-    Map<String, Set<String>> savedHostData = metadataManager.getHostedAppsFromStore();
+    Map<String, TimelineMetricHostMetadata> cachedHostData = metadataManager.getHostedAppsCache();
+    Map<String, TimelineMetricHostMetadata> savedHostData = metadataManager.getHostedAppsFromStore();
     Assert.assertEquals(cachedData.size(), savedData.size());
-    Assert.assertEquals("dummy_app1", cachedHostData.get("dummy_host1").iterator().next());
-    Assert.assertEquals("dummy_app2", cachedHostData.get("dummy_host2").iterator().next());
-    Assert.assertEquals("dummy_app3", cachedHostData.get("dummy_host3").iterator().next());
-    Assert.assertEquals("dummy_app1", savedHostData.get("dummy_host1").iterator().next());
-    Assert.assertEquals("dummy_app2", savedHostData.get("dummy_host2").iterator().next());
-    Assert.assertEquals("dummy_app3", cachedHostData.get("dummy_host3").iterator().next());
-
+    Assert.assertEquals("dummy_app1", cachedHostData.get("dummy_host1").getHostedApps().iterator().next());
+    Assert.assertEquals("dummy_app2", cachedHostData.get("dummy_host2").getHostedApps().iterator().next());
+    Assert.assertEquals("dummy_app1", savedHostData.get("dummy_host1").getHostedApps().iterator().next());
+    Assert.assertEquals("dummy_app2", savedHostData.get("dummy_host2").getHostedApps().iterator().next());
 
     Map<String, Set<String>> cachedHostInstanceData = metadataManager.getHostedInstanceCache();
     Map<String, Set<String>> savedHostInstanceData = metadataManager.getHostedInstancesFromStore();
     Assert.assertEquals(cachedHostInstanceData.size(), savedHostInstanceData.size());
     Assert.assertEquals("dummy_host2", cachedHostInstanceData.get("instance2").iterator().next());
+  }
 
+  @Test
+  public void testGenerateUuidFromMetric() throws SQLException {
+
+    TimelineMetric timelineMetric = new TimelineMetric();
+    timelineMetric.setMetricName("regionserver.Server.blockCacheExpressHitPercent");
+    timelineMetric.setAppId("hbase");
+    timelineMetric.setHostName("avijayan-ams-2.openstacklocal");
+    timelineMetric.setInstanceId("test1");
+
+    byte[] uuid = metadataManager.getUuid(timelineMetric);
+    Assert.assertNotNull(uuid);
+    Assert.assertEquals(uuid.length, 20);
+
+    byte[] uuidWithoutHost = metadataManager.getUuid(new TimelineClusterMetric(timelineMetric.getMetricName(),
timelineMetric.getAppId(), timelineMetric.getInstanceId(), -1));
+    Assert.assertNotNull(uuidWithoutHost);
+    Assert.assertEquals(uuidWithoutHost.length, 16);
+
+    TimelineMetric metric2 = metadataManager.getMetricFromUuid(uuid);
+    Assert.assertEquals(metric2, timelineMetric);
+    TimelineMetric metric3 = metadataManager.getMetricFromUuid(uuidWithoutHost);
+    Assert.assertEquals(metric3.getMetricName(), timelineMetric.getMetricName());
+    Assert.assertEquals(metric3.getAppId(), timelineMetric.getAppId());
+    Assert.assertEquals(metric3.getInstanceId(), timelineMetric.getInstanceId());
+    Assert.assertEquals(metric3.getHostName(), null);
+
+    String metricName1 = metadataManager.getMetricNameFromUuid(uuid);
+    Assert.assertEquals(metricName1, "regionserver.Server.blockCacheExpressHitPercent");
+    String metricName2 = metadataManager.getMetricNameFromUuid(uuidWithoutHost);
+    Assert.assertEquals(metricName2, "regionserver.Server.blockCacheExpressHitPercent");
   }
+
+  @Test
+  public void testWildcardSanitization() throws IOException, SQLException, URISyntaxException
{
+    // Initialize new manager
+    metadataManager = new TimelineMetricMetadataManager(new Configuration(), hdb);
+    final long now = System.currentTimeMillis();
+
+    TimelineMetrics timelineMetrics = new TimelineMetrics();
+
+    TimelineMetric metric1 = new TimelineMetric();
+    metric1.setMetricName("dummy_m1");
+    metric1.setHostName("dummy_host1");
+    metric1.setTimestamp(now);
+    metric1.setStartTime(now - 1000);
+    metric1.setAppId("dummy_app1");
+    metric1.setType("Integer");
+    metric1.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric1);
+
+    TimelineMetric metric2 = new TimelineMetric();
+    metric2.setMetricName("dummy_m2");
+    metric2.setHostName("dummy_host2");
+    metric2.setTimestamp(now);
+    metric2.setStartTime(now - 1000);
+    metric2.setAppId("dummy_app2");
+    metric2.setType("Integer");
+    metric2.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric2);
+
+    TimelineMetric metric3 = new TimelineMetric();
+    metric3.setMetricName("gummy_3");
+    metric3.setHostName("dummy_3h");
+    metric3.setTimestamp(now);
+    metric3.setStartTime(now - 1000);
+    metric3.setAppId("dummy_app3");
+    metric3.setType("Integer");
+    metric3.setMetricValues(new TreeMap<Long, Double>() {{
+      put(now - 100, 1.0);
+      put(now - 200, 2.0);
+      put(now - 300, 3.0);
+    }});
+    timelineMetrics.getMetrics().add(metric3);
+
+    Configuration metricsConf = new Configuration();
+    TimelineMetricConfiguration configuration = EasyMock.createNiceMock(TimelineMetricConfiguration.class);
+    expect(configuration.getMetricsConf()).andReturn(metricsConf).once();
+    replay(configuration);
+
+    hdb.insertMetricRecordsWithMetadata(metadataManager, timelineMetrics, true);
+
+    List<byte[]> uuids = metadataManager.getUuids(Collections.singletonList("dummy_m%"),
+      Collections.singletonList("dummy_host2"), "dummy_app1", null);
+    Assert.assertTrue(uuids.size() == 2);
+
+    uuids = metadataManager.getUuids(Collections.singletonList("dummy_m%"),
+      Collections.singletonList("dummy_host%"), "dummy_app2", null);
+    Assert.assertTrue(uuids.size() == 4);
+
+    Collection<String> metrics = Arrays.asList("dummy_m%", "dummy_3", "dummy_m2");
+    List<String> hosts = Arrays.asList("dummy_host%", "dummy_3h");
+    uuids = metadataManager.getUuids(metrics, hosts, "dummy_app2", null);
+    Assert.assertTrue(uuids.size() == 9);
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/041e4e9a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
index a524b13..8d486e1 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/discovery/TestMetadataSync.java
@@ -41,19 +41,19 @@ public class TestMetadataSync {
     PhoenixHBaseAccessor hBaseAccessor = createNiceMock(PhoenixHBaseAccessor.class);
 
     final TimelineMetricMetadata testMetadata1 = new TimelineMetricMetadata(
-      "m1", "a1", "", GAUGE.name(), System.currentTimeMillis(), true, false);
+      "m1", "a1", null, "", GAUGE.name(), System.currentTimeMillis(), true, false);
     final TimelineMetricMetadata testMetadata2 = new TimelineMetricMetadata(
-      "m2", "a2", "", GAUGE.name(), System.currentTimeMillis(), true, false);
+      "m2", "a2", null, "", GAUGE.name(), System.currentTimeMillis(), true, false);
 
     Map<TimelineMetricMetadataKey, TimelineMetricMetadata> metadata =
       new HashMap<TimelineMetricMetadataKey, TimelineMetricMetadata>() {{
-        put(new TimelineMetricMetadataKey("m1", "a1"), testMetadata1);
-        put(new TimelineMetricMetadataKey("m2", "a2"), testMetadata2);
+        put(new TimelineMetricMetadataKey("m1", "a1", null), testMetadata1);
+        put(new TimelineMetricMetadataKey("m2", "a2", null), testMetadata2);
       }};
 
-    Map<String, Set<String>> hostedApps = new HashMap<String, Set<String>>()
{{
-      put("h1", new HashSet<>(Arrays.asList("a1")));
-      put("h2", new HashSet<>(Arrays.asList("a1", "a2")));
+    Map<String, TimelineMetricHostMetadata> hostedApps = new HashMap<String, TimelineMetricHostMetadata>()
{{
+      put("h1", new TimelineMetricHostMetadata(new HashSet<>(Arrays.asList("a1"))));
+      put("h2", new TimelineMetricHostMetadata((new HashSet<>(Arrays.asList("a1", "a2")))));
     }};
 
     Map<String, Set<String>> hostedInstances = new HashMap<String, Set<String>>()
{{
@@ -61,14 +61,14 @@ public class TestMetadataSync {
       put("i2", new HashSet<>(Arrays.asList("h1", "h2")));
     }};
 
-    expect(configuration.get("timeline.metrics.service.operation.mode", "")).andReturn("distributed");
+    expect(configuration.get("timeline.metrics.service.operation.mode")).andReturn("distributed");
     expect(hBaseAccessor.getTimelineMetricMetadata()).andReturn(metadata);
     expect(hBaseAccessor.getHostedAppsMetadata()).andReturn(hostedApps);
     expect(hBaseAccessor.getInstanceHostsMetdata()).andReturn(hostedInstances);
 
     replay(configuration, hBaseAccessor);
 
-    TimelineMetricMetadataManager metadataManager = new TimelineMetricMetadataManager(new
Configuration(), hBaseAccessor);
+    TimelineMetricMetadataManager metadataManager = new TimelineMetricMetadataManager(configuration,
hBaseAccessor);
 
     metadataManager.metricMetadataSync = new TimelineMetricMetadataSync(metadataManager);
 
@@ -78,13 +78,13 @@ public class TestMetadataSync {
 
     metadata = metadataManager.getMetadataCache();
     Assert.assertEquals(2, metadata.size());
-    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m1", "a1")));
-    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m2", "a2")));
+    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m1", "a1", null)));
+    Assert.assertTrue(metadata.containsKey(new TimelineMetricMetadataKey("m2", "a2", null)));
 
     hostedApps = metadataManager.getHostedAppsCache();
     Assert.assertEquals(2, hostedApps.size());
-    Assert.assertEquals(1, hostedApps.get("h1").size());
-    Assert.assertEquals(2, hostedApps.get("h2").size());
+    Assert.assertEquals(1, hostedApps.get("h1").getHostedApps().size());
+    Assert.assertEquals(2, hostedApps.get("h2").getHostedApps().size());
 
     hostedInstances = metadataManager.getHostedInstanceCache();
     Assert.assertEquals(2, hostedInstances.size());
@@ -99,11 +99,11 @@ public class TestMetadataSync {
     PhoenixHBaseAccessor hBaseAccessor = createNiceMock(PhoenixHBaseAccessor.class);
 
     TimelineMetricMetadata metadata1 = new TimelineMetricMetadata(
-      "xxx.abc.yyy", "a1", "", GAUGE.name(), System.currentTimeMillis(), true, false);
+      "xxx.abc.yyy", "a1", null, "", GAUGE.name(), System.currentTimeMillis(), true, false);
     TimelineMetricMetadata metadata2 = new TimelineMetricMetadata(
-      "xxx.cdef.yyy", "a2", "", GAUGE.name(), System.currentTimeMillis(), true, false);
+      "xxx.cdef.yyy", "a2", null, "", GAUGE.name(), System.currentTimeMillis(), true, false);
     TimelineMetricMetadata metadata3 = new TimelineMetricMetadata(
-      "xxx.pqr.zzz", "a3", "", GAUGE.name(), System.currentTimeMillis(), true, false);
+      "xxx.pqr.zzz", "a3", null, "", GAUGE.name(), System.currentTimeMillis(), true, false);
 
     expect(configuration.get(TIMELINE_METRIC_METADATA_FILTERS)).andReturn("abc,cde");
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/041e4e9a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/uuid/TimelineMetricUuidManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/uuid/TimelineMetricUuidManagerTest.java
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/uuid/TimelineMetricUuidManagerTest.java
new file mode 100644
index 0000000..d1b3f01
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/uuid/TimelineMetricUuidManagerTest.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.uuid;
+
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.TimelineClusterMetric;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class TimelineMetricUuidManagerTest {
+
+
+  private List<String> apps = Arrays.asList("namenode",
+    "datanode", "master_hbase", "slave_hbase", "kafka_broker", "nimbus", "ams-hbase",
+    "accumulo", "nodemanager", "resourcemanager", "ambari_server", "HOST", "timeline_metric_store_watcher",
+    "jobhistoryserver", "hiveserver2", "hivemetastore", "applicationhistoryserver", "amssmoketestfake");
+
+  private Map<String, Set<String>> metricSet  = new HashMap<>(populateMetricWhitelistFromFile());
+
+  @Test
+  public void testHashBasedUuidForMetricName() throws SQLException {
+
+    MetricUuidGenStrategy strategy = new HashBasedUuidGenStrategy();
+    Map<String, TimelineClusterMetric> uuids = new HashMap<>();
+    for (String app : metricSet.keySet()) {
+      Set<String> metrics = metricSet.get(app);
+      for (String metric : metrics) {
+        TimelineClusterMetric timelineClusterMetric = new TimelineClusterMetric(metric, app,
null, -1l);
+        byte[] uuid = strategy.computeUuid(timelineClusterMetric, 16);
+        Assert.assertNotNull(uuid);
+        Assert.assertTrue(uuid.length == 16);
+        String uuidStr = new String(uuid);
+        Assert.assertFalse(uuids.containsKey(uuidStr) && !uuids.containsValue(timelineClusterMetric));
+        if (uuids.containsKey(uuidStr) ) {
+          if (!uuids.containsValue(timelineClusterMetric)) {
+            System.out.println("COLLISION : " + timelineClusterMetric.toString() + " = "
+ uuids.get(uuidStr));
+          }
+        }
+        uuids.put(uuidStr, timelineClusterMetric);
+      }
+    }
+  }
+
+  @Test
+  public void testHaseBasedUuidForAppIds() throws SQLException {
+
+    MetricUuidGenStrategy strategy = new HashBasedUuidGenStrategy();
+    Map<String, TimelineClusterMetric> uuids = new HashMap<>();
+    for (String app : metricSet.keySet()) {
+      TimelineClusterMetric timelineClusterMetric = new TimelineClusterMetric("TestMetric",
app, null, -1l);
+      byte[] uuid = strategy.computeUuid(timelineClusterMetric, 16);
+      String uuidStr = new String(uuid);
+      if (uuids.containsKey(uuidStr) ) {
+        if (!uuids.containsValue(timelineClusterMetric)) {
+          System.out.println("COLLISION : " + timelineClusterMetric.toString() + " = " +
uuids.get(uuidStr));
+        }
+      }
+      uuids.put(uuidStr, timelineClusterMetric);
+    }
+  }
+
+  @Test
+  public void testHashBasedUuidForHostnames() throws SQLException {
+
+    MetricUuidGenStrategy strategy = new HashBasedUuidGenStrategy();
+    Map<String, String> uuids = new HashMap<>();
+
+    List<String> hosts = new ArrayList<>();
+    String hostPrefix = "TestHost.";
+    String hostSuffix = ".ambari.apache.org";
+
+    for (int i=0; i<=2000; i++) {
+      hosts.add(hostPrefix + i + hostSuffix);
+    }
+
+    for (String host : hosts) {
+      byte[] uuid = strategy.computeUuid(host, 4);
+      Assert.assertNotNull(uuid);
+      Assert.assertTrue(uuid.length == 4);
+      String uuidStr = new String(uuid);
+      Assert.assertFalse(uuids.containsKey(uuidStr));
+      uuids.put(uuidStr, host);
+    }
+  }
+
+
+  @Test
+  public void testRandomUuidForWhitelistedMetrics() throws SQLException {
+
+    MetricUuidGenStrategy strategy = new RandomUuidGenStrategy();
+    Map<String, String> uuids = new HashMap<>();
+    for (String app : metricSet.keySet()) {
+      Set<String> metrics = metricSet.get(app);
+      for (String metric : metrics) {
+        byte[] uuid = strategy.computeUuid(new TimelineClusterMetric(metric, app, null, -1l),
16);
+        Assert.assertNotNull(uuid);
+        Assert.assertTrue(uuid.length == 16);
+        String uuidStr = new String(uuid);
+        Assert.assertFalse(uuids.containsKey(uuidStr) && !uuids.containsValue(metric));
+        uuids.put(uuidStr, metric);
+      }
+    }
+  }
+
+  public Map<String, Set<String>> populateMetricWhitelistFromFile() {
+
+
+    Map<String, Set<String>> metricSet = new HashMap<String, Set<String>>();
+    FileInputStream fstream = null;
+    BufferedReader br = null;
+    String strLine;
+    for (String appId : apps) {
+      URL fileUrl = ClassLoader.getSystemResource("metrics_def/" + appId.toUpperCase() +
".dat");
+
+      Set<String> metricsForApp = new HashSet<>();
+      try {
+        fstream = new FileInputStream(fileUrl.getPath());
+        br = new BufferedReader(new InputStreamReader(fstream));
+        while ((strLine = br.readLine()) != null)   {
+          strLine = strLine.trim();
+          metricsForApp.add(strLine);
+        }
+      } catch (Exception ioEx) {
+        System.out.println("Metrics for AppId " + appId + " not found.");
+      } finally {
+        if (br != null) {
+          try {
+            br.close();
+          } catch (IOException e) {
+          }
+        }
+
+        if (fstream != null) {
+          try {
+            fstream.close();
+          } catch (IOException e) {
+          }
+        }
+      }
+      metricsForApp.add("live_hosts");
+      metricSet.put(appId.contains("hbase") ? "hbase" : appId, metricsForApp);
+      System.out.println("Found " + metricsForApp.size() + " metrics for appId = " + appId);
+    }
+    return metricSet;
+  }
+}


Mime
View raw message