ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From swa...@apache.org
Subject [16/22] AMBARI-5707. Metrics system prototype implementation. (swagle)
Date Mon, 22 Sep 2014 18:02:26 GMT
http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
new file mode 100644
index 0000000..7a45405
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestMemoryApplicationHistoryStore extends
+    ApplicationHistoryStoreTestUtils {
+
+  @Before
+  public void setup() {
+    store = new MemoryApplicationHistoryStore();
+  }
+
+  @Test
+  public void testReadWriteApplicationHistory() throws Exception {
+    // Out of order
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    try {
+      writeApplicationFinishData(appId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains(
+        "is stored before the start information"));
+    }
+    // Normal
+    int numApps = 5;
+    for (int i = 1; i <= numApps; ++i) {
+      appId = ApplicationId.newInstance(0, i);
+      writeApplicationStartData(appId);
+      writeApplicationFinishData(appId);
+    }
+    Assert.assertEquals(numApps, store.getAllApplications().size());
+    for (int i = 1; i <= numApps; ++i) {
+      appId = ApplicationId.newInstance(0, i);
+      ApplicationHistoryData data = store.getApplication(appId);
+      Assert.assertNotNull(data);
+      Assert.assertEquals(appId.toString(), data.getApplicationName());
+      Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo());
+    }
+    // Write again
+    appId = ApplicationId.newInstance(0, 1);
+    try {
+      writeApplicationStartData(appId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("is already stored"));
+    }
+    try {
+      writeApplicationFinishData(appId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("is already stored"));
+    }
+  }
+
+  @Test
+  public void testReadWriteApplicationAttemptHistory() throws Exception {
+    // Out of order
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    ApplicationAttemptId appAttemptId =
+        ApplicationAttemptId.newInstance(appId, 1);
+    try {
+      writeApplicationAttemptFinishData(appAttemptId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains(
+        "is stored before the start information"));
+    }
+    // Normal
+    int numAppAttempts = 5;
+    writeApplicationStartData(appId);
+    for (int i = 1; i <= numAppAttempts; ++i) {
+      appAttemptId = ApplicationAttemptId.newInstance(appId, i);
+      writeApplicationAttemptStartData(appAttemptId);
+      writeApplicationAttemptFinishData(appAttemptId);
+    }
+    Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId)
+      .size());
+    for (int i = 1; i <= numAppAttempts; ++i) {
+      appAttemptId = ApplicationAttemptId.newInstance(appId, i);
+      ApplicationAttemptHistoryData data =
+          store.getApplicationAttempt(appAttemptId);
+      Assert.assertNotNull(data);
+      Assert.assertEquals(appAttemptId.toString(), data.getHost());
+      Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo());
+    }
+    writeApplicationFinishData(appId);
+    // Write again
+    appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
+    try {
+      writeApplicationAttemptStartData(appAttemptId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("is already stored"));
+    }
+    try {
+      writeApplicationAttemptFinishData(appAttemptId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("is already stored"));
+    }
+  }
+
+  @Test
+  public void testReadWriteContainerHistory() throws Exception {
+    // Out of order
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    ApplicationAttemptId appAttemptId =
+        ApplicationAttemptId.newInstance(appId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    try {
+      writeContainerFinishData(containerId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains(
+        "is stored before the start information"));
+    }
+    // Normal
+    writeApplicationAttemptStartData(appAttemptId);
+    int numContainers = 5;
+    for (int i = 1; i <= numContainers; ++i) {
+      containerId = ContainerId.newInstance(appAttemptId, i);
+      writeContainerStartData(containerId);
+      writeContainerFinishData(containerId);
+    }
+    Assert
+      .assertEquals(numContainers, store.getContainers(appAttemptId).size());
+    for (int i = 1; i <= numContainers; ++i) {
+      containerId = ContainerId.newInstance(appAttemptId, i);
+      ContainerHistoryData data = store.getContainer(containerId);
+      Assert.assertNotNull(data);
+      Assert.assertEquals(Priority.newInstance(containerId.getId()),
+        data.getPriority());
+      Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo());
+    }
+    ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
+    Assert.assertNotNull(masterContainer);
+    Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
+      masterContainer.getContainerId());
+    writeApplicationAttemptFinishData(appAttemptId);
+    // Write again
+    containerId = ContainerId.newInstance(appAttemptId, 1);
+    try {
+      writeContainerStartData(containerId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("is already stored"));
+    }
+    try {
+      writeContainerFinishData(containerId);
+      Assert.fail();
+    } catch (IOException e) {
+      Assert.assertTrue(e.getMessage().contains("is already stored"));
+    }
+  }
+
+  @Test
+  public void testMassiveWriteContainerHistory() throws IOException {
+    long mb = 1024 * 1024;
+    Runtime runtime = Runtime.getRuntime();
+    long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb;
+    int numContainers = 100000;
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    ApplicationAttemptId appAttemptId =
+        ApplicationAttemptId.newInstance(appId, 1);
+    for (int i = 1; i <= numContainers; ++i) {
+      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
+      writeContainerStartData(containerId);
+      writeContainerFinishData(containerId);
+    }
+    long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb;
+    Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
new file mode 100644
index 0000000..72e43d1
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+import java.util.Arrays;
+import java.util.HashMap;
+
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.AbstractTimelineAggregator.*;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
+
+public class TestPhoenixTransactSQL {
+  @Test
+  public void testConditionClause() throws Exception {
+    Condition condition = new Condition(
+      Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, null, false);
+
+    String preparedClause = condition.getConditionClause();
+    String expectedClause = "METRIC_NAME IN (?,?) AND HOSTNAME = ? AND APP_ID" +
+      " = ? AND INSTANCE_ID = ? AND START_TIME >= ? AND START_TIME < ?";
+
+    Assert.assertNotNull(preparedClause);
+    Assert.assertEquals(expectedClause, preparedClause);
+  }
+
+  @Test
+  public void testTimelineMetricHourlyAggregateSerialization() throws Exception {
+    final long now = System.currentTimeMillis();
+
+    MetricHostAggregate hostAggregate = new MetricHostAggregate();
+    TimelineMetric metric = new TimelineMetric();
+    metric.setMetricName("m01");
+    metric.setAppId("app01");
+    metric.setHostName("h1");
+    metric.setInstanceId("i1");
+    metric.setTimestamp(now);
+    metric.setStartTime(now);
+    metric.addMetricValues(new HashMap<Long, Double>() {{
+      put(now, 0.1);
+      put(now + 10000, 0.2);
+    }});
+
+    hostAggregate.updateMinuteAggregates(metric);
+    //String json = hostAggregate.toJSON();
+    ObjectMapper mapper = new ObjectMapper();
+    System.out.println(mapper.writeValueAsString(hostAggregate));
+    System.out.println(hostAggregate.toJSON());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
new file mode 100644
index 0000000..c893314
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class TestTimelineMetricStore implements TimelineMetricStore {
+  @Override
+  public TimelineMetrics getTimelineMetrics(List<String> metricNames,
+      String hostname, String applicationId, String instanceId, Long startTime,
+      Long endTime, Integer limit, boolean groupedByHost) throws SQLException,
+    IOException {
+    TimelineMetrics timelineMetrics = new TimelineMetrics();
+    List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
+    timelineMetrics.setMetrics(metricList);
+    TimelineMetric metric1 = new TimelineMetric();
+    TimelineMetric metric2 = new TimelineMetric();
+    metricList.add(metric1);
+    metricList.add(metric2);
+    metric1.setMetricName("cpu_user");
+    metric1.setAppId("1");
+    metric1.setInstanceId(null);
+    metric1.setHostName("c6401");
+    metric1.setStartTime(1407949812L);
+    metric1.setMetricValues(new HashMap<Long, Double>() {{
+      put(1407949812L, 1.0d);
+      put(1407949912L, 1.8d);
+      put(1407950002L, 0.7d);
+    }});
+
+    metric2.setMetricName("mem_free");
+    metric2.setAppId("2");
+    metric2.setInstanceId("3");
+    metric2.setHostName("c6401");
+    metric2.setStartTime(1407949812L);
+    metric2.setMetricValues(new HashMap<Long, Double>() {{
+      put(1407949812L, 2.5d);
+      put(1407949912L, 3.0d);
+      put(1407950002L, 0.9d);
+    }});
+
+    return timelineMetrics;
+  }
+
+  @Override
+  public TimelineMetric getTimelineMetric(String metricName, String hostname,
+      String applicationId, String instanceId, Long startTime, Long endTime,
+      Integer limit) throws SQLException, IOException {
+
+    return null;
+  }
+
+  @Override
+  public TimelinePutResponse putMetrics(TimelineMetrics metrics)
+      throws SQLException, IOException {
+
+    return new TimelinePutResponse();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
new file mode 100644
index 0000000..d684a27
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class TestGenericObjectMapper {
+
+  @Test
+  public void testEncoding() {
+    testEncoding(Long.MAX_VALUE);
+    testEncoding(Long.MIN_VALUE);
+    testEncoding(0l);
+    testEncoding(128l);
+    testEncoding(256l);
+    testEncoding(512l);
+    testEncoding(-256l);
+  }
+
+  private static void testEncoding(long l) {
+    byte[] b = GenericObjectMapper.writeReverseOrderedLong(l);
+    assertEquals("error decoding", l,
+        GenericObjectMapper.readReverseOrderedLong(b, 0));
+    byte[] buf = new byte[16];
+    System.arraycopy(b, 0, buf, 5, 8);
+    assertEquals("error decoding at offset", l,
+        GenericObjectMapper.readReverseOrderedLong(buf, 5));
+    if (l > Long.MIN_VALUE) {
+      byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1);
+      assertEquals("error preserving ordering", 1,
+          WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length));
+    }
+    if (l < Long.MAX_VALUE) {
+      byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1);
+      assertEquals("error preserving ordering", 1,
+          WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length));
+    }
+  }
+
+  private static void verify(Object o) throws IOException {
+    assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o)));
+  }
+
+  @Test
+  public void testValueTypes() throws IOException {
+    verify(Integer.MAX_VALUE);
+    verify(Integer.MIN_VALUE);
+    assertEquals(Integer.MAX_VALUE, GenericObjectMapper.read(
+        GenericObjectMapper.write((long) Integer.MAX_VALUE)));
+    assertEquals(Integer.MIN_VALUE, GenericObjectMapper.read(
+        GenericObjectMapper.write((long) Integer.MIN_VALUE)));
+    verify((long)Integer.MAX_VALUE + 1l);
+    verify((long)Integer.MIN_VALUE - 1l);
+
+    verify(Long.MAX_VALUE);
+    verify(Long.MIN_VALUE);
+
+    assertEquals(42, GenericObjectMapper.read(GenericObjectMapper.write(42l)));
+    verify(42);
+    verify(1.23);
+    verify("abc");
+    verify(true);
+    List<String> list = new ArrayList<String>();
+    list.add("123");
+    list.add("abc");
+    verify(list);
+    Map<String,String> map = new HashMap<String,String>();
+    map.put("k1","v1");
+    map.put("k2","v2");
+    verify(map);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
new file mode 100644
index 0000000..9b27309
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.iq80.leveldb.DBIterator;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
+import static org.junit.Assert.assertEquals;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class TestLeveldbTimelineStore extends TimelineStoreTestUtils {
+  private FileContext fsContext;
+  private File fsPath;
+
+  @Before
+  public void setup() throws Exception {
+    fsContext = FileContext.getLocalFSFileContext();
+    Configuration conf = new Configuration();
+    fsPath = new File("target", this.getClass().getSimpleName() +
+        "-tmpDir").getAbsoluteFile();
+    fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+    conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH,
+        fsPath.getAbsolutePath());
+    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false);
+    store = new LeveldbTimelineStore();
+    store.init(conf);
+    store.start();
+    loadTestData();
+    loadVerificationData();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    store.stop();
+    fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+  }
+
+  @Test
+  public void testGetSingleEntity() throws IOException {
+    super.testGetSingleEntity();
+    ((LeveldbTimelineStore)store).clearStartTimeCache();
+    super.testGetSingleEntity();
+    loadTestData();
+  }
+
+  @Test
+  public void testGetEntities() throws IOException {
+    super.testGetEntities();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromId() throws IOException {
+    super.testGetEntitiesWithFromId();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromTs() throws IOException {
+    super.testGetEntitiesWithFromTs();
+  }
+
+  @Test
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    super.testGetEntitiesWithPrimaryFilters();
+  }
+
+  @Test
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    super.testGetEntitiesWithSecondaryFilters();
+  }
+
+  @Test
+  public void testGetEvents() throws IOException {
+    super.testGetEvents();
+  }
+
+  @Test
+  public void testCacheSizes() {
+    Configuration conf = new Configuration();
+    assertEquals(10000, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
+    assertEquals(10000, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
+    conf.setInt(
+        YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE,
+        10001);
+    assertEquals(10001, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
+    conf = new Configuration();
+    conf.setInt(
+        YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE,
+        10002);
+    assertEquals(10002, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
+  }
+
+  private boolean deleteNextEntity(String entityType, byte[] ts)
+      throws IOException, InterruptedException {
+    DBIterator iterator = null;
+    DBIterator pfIterator = null;
+    try {
+      iterator = ((LeveldbTimelineStore)store).getDbIterator(false);
+      pfIterator = ((LeveldbTimelineStore)store).getDbIterator(false);
+      return ((LeveldbTimelineStore)store).deleteNextEntity(entityType, ts,
+          iterator, pfIterator, false);
+    } finally {
+      IOUtils.cleanup(null, iterator, pfIterator);
+    }
+  }
+
+  @Test
+  public void testGetEntityTypes() throws IOException {
+    List<String> entityTypes = ((LeveldbTimelineStore)store).getEntityTypes();
+    assertEquals(4, entityTypes.size());
+    assertEquals(entityType1, entityTypes.get(0));
+    assertEquals(entityType2, entityTypes.get(1));
+    assertEquals(entityType4, entityTypes.get(2));
+    assertEquals(entityType5, entityTypes.get(3));
+  }
+
+  @Test
+  public void testDeleteEntities() throws IOException, InterruptedException {
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+
+    assertEquals(false, deleteNextEntity(entityType1,
+        writeReverseOrderedLong(122l)));
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+
+    assertEquals(true, deleteNextEntity(entityType1,
+        writeReverseOrderedLong(123l)));
+    List<TimelineEntity> entities = getEntities("type_2");
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, Collections.singletonMap(
+        entityType1, Collections.singleton(entityId1b)), EMPTY_PRIMARY_FILTERS,
+        EMPTY_MAP, entities.get(0));
+    entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    ((LeveldbTimelineStore)store).discardOldEntities(-123l);
+    assertEquals(1, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(3, ((LeveldbTimelineStore)store).getEntityTypes().size());
+
+    ((LeveldbTimelineStore)store).discardOldEntities(123l);
+    assertEquals(0, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());
+    assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+  }
+
+  @Test
+  public void testDeleteEntitiesPrimaryFilters()
+      throws IOException, InterruptedException {
+    Map<String, Set<Object>> primaryFilter =
+        Collections.singletonMap("user", Collections.singleton(
+            (Object) "otheruser"));
+    TimelineEntities atsEntities = new TimelineEntities();
+    atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
+        null)));
+    TimelinePutResponse response = store.put(atsEntities);
+    assertEquals(0, response.getErrors().size());
+
+    NameValuePair pfPair = new NameValuePair("user", "otheruser");
+    List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
+        pfPair);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
+        EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    ((LeveldbTimelineStore)store).discardOldEntities(-123l);
+    assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+
+    ((LeveldbTimelineStore)store).discardOldEntities(123l);
+    assertEquals(0, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());
+
+    assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
+    assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+  }
+
+  @Test
+  public void testFromTsWithDeletion()
+      throws IOException, InterruptedException {
+    long l = System.currentTimeMillis();
+    assertEquals(2, getEntitiesFromTs("type_1", l).size());
+    assertEquals(1, getEntitiesFromTs("type_2", l).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    ((LeveldbTimelineStore)store).discardOldEntities(123l);
+    assertEquals(0, getEntitiesFromTs("type_1", l).size());
+    assertEquals(0, getEntitiesFromTs("type_2", l).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    assertEquals(0, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    loadTestData();
+    assertEquals(0, getEntitiesFromTs("type_1", l).size());
+    assertEquals(0, getEntitiesFromTs("type_2", l).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
new file mode 100644
index 0000000..415de53
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class TestMemoryTimelineStore extends TimelineStoreTestUtils {
+
+  @Before
+  public void setup() throws Exception {
+    store = new MemoryTimelineStore();
+    store.init(new YarnConfiguration());
+    store.start();
+    loadTestData();
+    loadVerificationData();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    store.stop();
+  }
+
+  public TimelineStore getTimelineStore() {
+    return store;
+  }
+
+  @Test
+  public void testGetSingleEntity() throws IOException {
+    super.testGetSingleEntity();
+  }
+
+  @Test
+  public void testGetEntities() throws IOException {
+    super.testGetEntities();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromId() throws IOException {
+    super.testGetEntitiesWithFromId();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromTs() throws IOException {
+    super.testGetEntitiesWithFromTs();
+  }
+
+  @Test
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    super.testGetEntitiesWithPrimaryFilters();
+  }
+
+  @Test
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    super.testGetEntitiesWithSecondaryFilters();
+  }
+
+  @Test
+  public void testGetEvents() throws IOException {
+    super.testGetEvents();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
new file mode 100644
index 0000000..d760536
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
@@ -0,0 +1,789 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+
+public class TimelineStoreTestUtils {
+
+  protected static final List<TimelineEvent> EMPTY_EVENTS =
+      Collections.emptyList();
+  protected static final Map<String, Object> EMPTY_MAP =
+      Collections.emptyMap();
+  protected static final Map<String, Set<Object>> EMPTY_PRIMARY_FILTERS =
+      Collections.emptyMap();
+  protected static final Map<String, Set<String>> EMPTY_REL_ENTITIES =
+      Collections.emptyMap();
+
+  protected TimelineStore store;
+  protected String entityId1;
+  protected String entityType1;
+  protected String entityId1b;
+  protected String entityId2;
+  protected String entityType2;
+  protected String entityId4;
+  protected String entityType4;
+  protected String entityId5;
+  protected String entityType5;
+  protected Map<String, Set<Object>> primaryFilters;
+  protected Map<String, Object> secondaryFilters;
+  protected Map<String, Object> allFilters;
+  protected Map<String, Object> otherInfo;
+  protected Map<String, Set<String>> relEntityMap;
+  protected Map<String, Set<String>> relEntityMap2;
+  protected NameValuePair userFilter;
+  protected NameValuePair numericFilter1;
+  protected NameValuePair numericFilter2;
+  protected NameValuePair numericFilter3;
+  protected Collection<NameValuePair> goodTestingFilters;
+  protected Collection<NameValuePair> badTestingFilters;
+  protected TimelineEvent ev1;
+  protected TimelineEvent ev2;
+  protected TimelineEvent ev3;
+  protected TimelineEvent ev4;
+  protected Map<String, Object> eventInfo;
+  protected List<TimelineEvent> events1;
+  protected List<TimelineEvent> events2;
+  protected long beforeTs;
+
+  /**
+   * Load test data into the given store
+   */
+  protected void loadTestData() throws IOException {
+    beforeTs = System.currentTimeMillis()-1;
+    TimelineEntities entities = new TimelineEntities();
+    Map<String, Set<Object>> primaryFilters =
+        new HashMap<String, Set<Object>>();
+    Set<Object> l1 = new HashSet<Object>();
+    l1.add("username");
+    Set<Object> l2 = new HashSet<Object>();
+    l2.add((long)Integer.MAX_VALUE);
+    Set<Object> l3 = new HashSet<Object>();
+    l3.add("123abc");
+    Set<Object> l4 = new HashSet<Object>();
+    l4.add((long)Integer.MAX_VALUE + 1l);
+    primaryFilters.put("user", l1);
+    primaryFilters.put("appname", l2);
+    primaryFilters.put("other", l3);
+    primaryFilters.put("long", l4);
+    Map<String, Object> secondaryFilters = new HashMap<String, Object>();
+    secondaryFilters.put("startTime", 123456l);
+    secondaryFilters.put("status", "RUNNING");
+    Map<String, Object> otherInfo1 = new HashMap<String, Object>();
+    otherInfo1.put("info1", "val1");
+    otherInfo1.putAll(secondaryFilters);
+
+    String entityId1 = "id_1";
+    String entityType1 = "type_1";
+    String entityId1b = "id_2";
+    String entityId2 = "id_2";
+    String entityType2 = "type_2";
+    String entityId4 = "id_4";
+    String entityType4 = "type_4";
+    String entityId5 = "id_5";
+    String entityType5 = "type_5";
+
+    Map<String, Set<String>> relatedEntities =
+        new HashMap<String, Set<String>>();
+    relatedEntities.put(entityType2, Collections.singleton(entityId2));
+
+    TimelineEvent ev3 = createEvent(789l, "launch_event", null);
+    TimelineEvent ev4 = createEvent(-123l, "init_event", null);
+    List<TimelineEvent> events = new ArrayList<TimelineEvent>();
+    events.add(ev3);
+    events.add(ev4);
+    entities.setEntities(Collections.singletonList(createEntity(entityId2,
+        entityType2, null, events, null, null, null)));
+    TimelinePutResponse response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    TimelineEvent ev1 = createEvent(123l, "start_event", null);
+    entities.setEntities(Collections.singletonList(createEntity(entityId1,
+        entityType1, 123l, Collections.singletonList(ev1),
+        relatedEntities, primaryFilters, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+    entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, null, Collections.singletonList(ev1), relatedEntities,
+        primaryFilters, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    Map<String, Object> eventInfo = new HashMap<String, Object>();
+    eventInfo.put("event info 1", "val1");
+    TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
+    Map<String, Object> otherInfo2 = new HashMap<String, Object>();
+    otherInfo2.put("info2", "val2");
+    entities.setEntities(Collections.singletonList(createEntity(entityId1,
+        entityType1, null, Collections.singletonList(ev2), null,
+        primaryFilters, otherInfo2)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+    entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, 789l, Collections.singletonList(ev2), null,
+        primaryFilters, otherInfo2)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    entities.setEntities(Collections.singletonList(createEntity(
+        "badentityid", "badentity", null, null, null, null, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(1, response.getErrors().size());
+    TimelinePutError error = response.getErrors().get(0);
+    assertEquals("badentityid", error.getEntityId());
+    assertEquals("badentity", error.getEntityType());
+    assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
+
+    relatedEntities.clear();
+    relatedEntities.put(entityType5, Collections.singleton(entityId5));
+    entities.setEntities(Collections.singletonList(createEntity(entityId4,
+        entityType4, 42l, null, relatedEntities, null, null)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+  }
+
+  /**
+   * Load verification data
+   */
+  protected void loadVerificationData() throws Exception {
+    userFilter = new NameValuePair("user", "username");
+    numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE);
+    numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l);
+    numericFilter3 = new NameValuePair("other", "123abc");
+    goodTestingFilters = new ArrayList<NameValuePair>();
+    goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
+    goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
+    badTestingFilters = new ArrayList<NameValuePair>();
+    badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
+    badTestingFilters.add(new NameValuePair("status", "FINISHED"));
+
+    primaryFilters = new HashMap<String, Set<Object>>();
+    Set<Object> l1 = new HashSet<Object>();
+    l1.add("username");
+    Set<Object> l2 = new HashSet<Object>();
+    l2.add(Integer.MAX_VALUE);
+    Set<Object> l3 = new HashSet<Object>();
+    l3.add("123abc");
+    Set<Object> l4 = new HashSet<Object>();
+    l4.add((long)Integer.MAX_VALUE + 1l);
+    primaryFilters.put("user", l1);
+    primaryFilters.put("appname", l2);
+    primaryFilters.put("other", l3);
+    primaryFilters.put("long", l4);
+    secondaryFilters = new HashMap<String, Object>();
+    secondaryFilters.put("startTime", 123456);
+    secondaryFilters.put("status", "RUNNING");
+    allFilters = new HashMap<String, Object>();
+    allFilters.putAll(secondaryFilters);
+    for (Entry<String, Set<Object>> pf : primaryFilters.entrySet()) {
+      for (Object o : pf.getValue()) {
+        allFilters.put(pf.getKey(), o);
+      }
+    }
+    otherInfo = new HashMap<String, Object>();
+    otherInfo.put("info1", "val1");
+    otherInfo.put("info2", "val2");
+    otherInfo.putAll(secondaryFilters);
+
+    entityId1 = "id_1";
+    entityType1 = "type_1";
+    entityId1b = "id_2";
+    entityId2 = "id_2";
+    entityType2 = "type_2";
+    entityId4 = "id_4";
+    entityType4 = "type_4";
+    entityId5 = "id_5";
+    entityType5 = "type_5";
+
+    ev1 = createEvent(123l, "start_event", null);
+
+    eventInfo = new HashMap<String, Object>();
+    eventInfo.put("event info 1", "val1");
+    ev2 = createEvent(456l, "end_event", eventInfo);
+    events1 = new ArrayList<TimelineEvent>();
+    events1.add(ev2);
+    events1.add(ev1);
+
+    relEntityMap =
+        new HashMap<String, Set<String>>();
+    Set<String> ids = new HashSet<String>();
+    ids.add(entityId1);
+    ids.add(entityId1b);
+    relEntityMap.put(entityType1, ids);
+
+    relEntityMap2 =
+        new HashMap<String, Set<String>>();
+    relEntityMap2.put(entityType4, Collections.singleton(entityId4));
+
+    ev3 = createEvent(789l, "launch_event", null);
+    ev4 = createEvent(-123l, "init_event", null);
+    events2 = new ArrayList<TimelineEvent>();
+    events2.add(ev3);
+    events2.add(ev4);
+  }
+
+  public void testGetSingleEntity() throws IOException {
+    // test getting entity info
+    verifyEntityInfo(null, null, null, null, null, null,
+        store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, 123l, store.getEntity(entityId1,
+        entityType1, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, 123l, store.getEntity(entityId1b,
+        entityType1, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2,
+        entityType2, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4,
+        entityType4, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5,
+        entityType5, EnumSet.allOf(Field.class)));
+
+    // test getting single fields
+    verifyEntityInfo(entityId1, entityType1, events1, null, null, null,
+        store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS)));
+
+    verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2),
+        null, null, null, store.getEntity(entityId1, entityType1,
+        EnumSet.of(Field.LAST_EVENT_ONLY)));
+
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1,
+        null));
+
+    verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null,
+        store.getEntity(entityId1, entityType1,
+            EnumSet.of(Field.PRIMARY_FILTERS)));
+
+    verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo,
+        store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO)));
+
+    verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null,
+        store.getEntity(entityId2, entityType2,
+            EnumSet.of(Field.RELATED_ENTITIES)));
+  }
+
+  protected List<TimelineEntity> getEntities(String entityType)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter) throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromId(String entityType,
+      String fromId) throws IOException {
+    return store.getEntities(entityType, null, null, null, fromId, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromTs(String entityType,
+      long fromTs) throws IOException {
+    return store.getEntities(entityType, null, null, null, null, fromTs,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter, String fromId)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, fromId, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromTsWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter, long fromTs)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, fromTs,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithWindow(String entityType,
+      Long windowEnd, String fromId) throws IOException {
+    return store.getEntities(entityType, null, null, windowEnd, fromId, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilterAndWindow(
+      String entityType, Long windowEnd, String fromId,
+      NameValuePair primaryFilter) throws IOException {
+    return store.getEntities(entityType, null, null, windowEnd, fromId, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesWithFilters(String entityType,
+      NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        primaryFilter, secondaryFilters, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntities(String entityType, Long limit,
+      Long windowStart, Long windowEnd, NameValuePair primaryFilter,
+      EnumSet<Field> fields) throws IOException {
+    return store.getEntities(entityType, limit, windowStart, windowEnd, null,
+        null, primaryFilter, null, fields).getEntities();
+  }
+
+  public void testGetEntities() throws IOException {
+    // test getting entities
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_0").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_3").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_6").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_0", userFilter).size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_3", userFilter).size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_6", userFilter).size());
+
+    List<TimelineEntity> entities = getEntities("type_1");
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntities("type_2");
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+    entities = getEntities("type_1", 1l, null, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", 1l, 0l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", null, 234l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 123l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 234l, 345l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, null, 345l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntities("type_1", null, null, 123l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+  }
+
+  public void testGetEntitiesWithFromId() throws IOException {
+    List<TimelineEntity> entities = getEntitiesFromId("type_1", entityId1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesFromId("type_1", entityId1b);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromId("type_2", "a");
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromId("type_2", entityId2);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+    entities = getEntitiesFromIdWithWindow("type_2", -456l, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", -456l, "a");
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", 0l, null);
+    assertEquals(1, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", 0l, entityId2);
+    assertEquals(1, entities.size());
+
+    // same tests with primary filters
+    entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
+        entityId1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
+        entityId1b);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l,
+        entityId1, userFilter);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a");
+    assertEquals(0, entities.size());
+  }
+
+  public void testGetEntitiesWithFromTs() throws IOException {
+    assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
+    assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        beforeTs).size());
+    long afterTs = System.currentTimeMillis();
+    assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
+    assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        afterTs).size());
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+    // check insert time is not overwritten
+    long beforeTs = this.beforeTs;
+    loadTestData();
+    assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
+    assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        beforeTs).size());
+    assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
+    assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        afterTs).size());
+  }
+
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    // test using primary filter
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_1",
+            new NameValuePair("none", "none")).size());
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_2",
+            new NameValuePair("none", "none")).size());
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_3",
+            new NameValuePair("none", "none")).size());
+
+    List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
+        userFilter);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_2", userFilter);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", 1l, null, null, userFilter, null);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", 1l, 0l, null, userFilter, null);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", null, 234l, null, userFilter, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 234l, 345l, userFilter, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, null, 345l, userFilter, null);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+  }
+
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    // test using secondary filter
+    List<TimelineEntity> entities = getEntitiesWithFilters("type_1", null,
+        goodTestingFilters);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithFilters("type_1", userFilter, goodTestingFilters);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithFilters("type_1", null,
+        Collections.singleton(new NameValuePair("user", "none")));
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesWithFilters("type_1", null, badTestingFilters);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters);
+    assertEquals(0, entities.size());
+  }
+
+  public void testGetEvents() throws IOException {
+    // test getting entity timelines
+    SortedSet<String> sortedSet = new TreeSet<String>();
+    sortedSet.add(entityId1);
+    List<EventsOfOneEntity> timelines =
+        store.getEntityTimelines(entityType1, sortedSet, null, null,
+            null, null).getAllEvents();
+    assertEquals(1, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+
+    sortedSet.add(entityId1b);
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, 1l,
+        null, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        345l, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        123l, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, 345l, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, 123l, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, null, Collections.singleton("end_event")).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    sortedSet.add(entityId2);
+    timelines = store.getEntityTimelines(entityType2, sortedSet, null,
+        null, null, null).getAllEvents();
+    assertEquals(1, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4);
+  }
+
+  /**
+   * Verify a single entity and its start time
+   */
+  protected static void verifyEntityInfo(String entityId, String entityType,
+      List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
+      Long startTime, TimelineEntity retrievedEntityInfo) {
+
+    verifyEntityInfo(entityId, entityType, events, relatedEntities,
+        primaryFilters, otherInfo, retrievedEntityInfo);
+    assertEquals(startTime, retrievedEntityInfo.getStartTime());
+  }
+
+  /**
+   * Verify a single entity
+   */
+  protected static void verifyEntityInfo(String entityId, String entityType,
+      List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
+      TimelineEntity retrievedEntityInfo) {
+    if (entityId == null) {
+      assertNull(retrievedEntityInfo);
+      return;
+    }
+    assertEquals(entityId, retrievedEntityInfo.getEntityId());
+    assertEquals(entityType, retrievedEntityInfo.getEntityType());
+    if (events == null) {
+      assertNull(retrievedEntityInfo.getEvents());
+    } else {
+      assertEquals(events, retrievedEntityInfo.getEvents());
+    }
+    if (relatedEntities == null) {
+      assertNull(retrievedEntityInfo.getRelatedEntities());
+    } else {
+      assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities());
+    }
+    if (primaryFilters == null) {
+      assertNull(retrievedEntityInfo.getPrimaryFilters());
+    } else {
+      assertTrue(primaryFilters.equals(
+          retrievedEntityInfo.getPrimaryFilters()));
+    }
+    if (otherInfo == null) {
+      assertNull(retrievedEntityInfo.getOtherInfo());
+    } else {
+      assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo()));
+    }
+  }
+
+  /**
+   * Verify timeline events
+   */
+  private static void verifyEntityTimeline(
+      EventsOfOneEntity retrievedEvents, String entityId, String entityType,
+      TimelineEvent... actualEvents) {
+    assertEquals(entityId, retrievedEvents.getEntityId());
+    assertEquals(entityType, retrievedEvents.getEntityType());
+    assertEquals(actualEvents.length, retrievedEvents.getEvents().size());
+    for (int i = 0; i < actualEvents.length; i++) {
+      assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i));
+    }
+  }
+
+  /**
+   * Create a test entity
+   */
+  protected static TimelineEntity createEntity(String entityId, String entityType,
+      Long startTime, List<TimelineEvent> events,
+      Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters,
+      Map<String, Object> otherInfo) {
+    TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId(entityId);
+    entity.setEntityType(entityType);
+    entity.setStartTime(startTime);
+    entity.setEvents(events);
+    if (relatedEntities != null) {
+      for (Entry<String, Set<String>> e : relatedEntities.entrySet()) {
+        for (String v : e.getValue()) {
+          entity.addRelatedEntity(e.getKey(), v);
+        }
+      }
+    } else {
+      entity.setRelatedEntities(null);
+    }
+    entity.setPrimaryFilters(primaryFilters);
+    entity.setOtherInfo(otherInfo);
+    return entity;
+  }
+
+  /**
+   * Create a test event
+   */
+  private static TimelineEvent createEvent(long timestamp, String type, Map<String,
+      Object> info) {
+    TimelineEvent event = new TimelineEvent();
+    event.setTimestamp(timestamp);
+    event.setEventType(type);
+    event.setEventInfo(info);
+    return event;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/865d187e/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
new file mode 100644
index 0000000..0e65a50
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.apache.hadoop.yarn.webapp.Params.TITLE;
+import static org.mockito.Mockito.mock;
+import junit.framework.Assert;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.api.ApplicationContext;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerImpl;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStoreTestUtils;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
+import org.apache.hadoop.yarn.util.StringHelper;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.webapp.test.WebAppTests;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Injector;
+
+public class TestAHSWebApp extends ApplicationHistoryStoreTestUtils {
+
+  public void setApplicationHistoryStore(ApplicationHistoryStore store) {
+    this.store = store;
+  }
+
+  @Before
+  public void setup() {
+    store = new MemoryApplicationHistoryStore();
+  }
+
+  @Test
+  public void testAppControllerIndex() throws Exception {
+    ApplicationHistoryManager ahManager = mock(ApplicationHistoryManager.class);
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationHistoryManager.class,
+          ahManager);
+    AHSController controller = injector.getInstance(AHSController.class);
+    controller.index();
+    Assert
+      .assertEquals("Application History", controller.get(TITLE, "unknown"));
+  }
+
+  @Test
+  public void testView() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(5, 1, 1));
+    AHSView ahsViewInstance = injector.getInstance(AHSView.class);
+
+    ahsViewInstance.render();
+    WebAppTests.flushOutput(injector);
+
+    ahsViewInstance.set(YarnWebParams.APP_STATE,
+      YarnApplicationState.FAILED.toString());
+    ahsViewInstance.render();
+    WebAppTests.flushOutput(injector);
+
+    ahsViewInstance.set(YarnWebParams.APP_STATE, StringHelper.cjoin(
+      YarnApplicationState.FAILED.toString(), YarnApplicationState.KILLED));
+    ahsViewInstance.render();
+    WebAppTests.flushOutput(injector);
+  }
+
+  @Test
+  public void testAppPage() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(1, 5, 1));
+    AppPage appPageInstance = injector.getInstance(AppPage.class);
+
+    appPageInstance.render();
+    WebAppTests.flushOutput(injector);
+
+    appPageInstance.set(YarnWebParams.APPLICATION_ID, ApplicationId
+      .newInstance(0, 1).toString());
+    appPageInstance.render();
+    WebAppTests.flushOutput(injector);
+  }
+
+  @Test
+  public void testAppAttemptPage() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(1, 1, 5));
+    AppAttemptPage appAttemptPageInstance =
+        injector.getInstance(AppAttemptPage.class);
+
+    appAttemptPageInstance.render();
+    WebAppTests.flushOutput(injector);
+
+    appAttemptPageInstance.set(YarnWebParams.APPLICATION_ATTEMPT_ID,
+      ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1)
+        .toString());
+    appAttemptPageInstance.render();
+    WebAppTests.flushOutput(injector);
+  }
+
+  @Test
+  public void testContainerPage() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(1, 1, 1));
+    ContainerPage containerPageInstance =
+        injector.getInstance(ContainerPage.class);
+
+    containerPageInstance.render();
+    WebAppTests.flushOutput(injector);
+
+    containerPageInstance.set(
+      YarnWebParams.CONTAINER_ID,
+      ContainerId
+        .newInstance(
+          ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1),
+          1).toString());
+    containerPageInstance.render();
+    WebAppTests.flushOutput(injector);
+  }
+
+  ApplicationHistoryManager mockApplicationHistoryManager(int numApps,
+      int numAppAttempts, int numContainers) throws Exception {
+    ApplicationHistoryManager ahManager =
+        new MockApplicationHistoryManagerImpl(store);
+    for (int i = 1; i <= numApps; ++i) {
+      ApplicationId appId = ApplicationId.newInstance(0, i);
+      writeApplicationStartData(appId);
+      for (int j = 1; j <= numAppAttempts; ++j) {
+        ApplicationAttemptId appAttemptId =
+            ApplicationAttemptId.newInstance(appId, j);
+        writeApplicationAttemptStartData(appAttemptId);
+        for (int k = 1; k <= numContainers; ++k) {
+          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
+          writeContainerStartData(containerId);
+          writeContainerFinishData(containerId);
+        }
+        writeApplicationAttemptFinishData(appAttemptId);
+      }
+      writeApplicationFinishData(appId);
+    }
+    return ahManager;
+  }
+
+  class MockApplicationHistoryManagerImpl extends ApplicationHistoryManagerImpl {
+
+    public MockApplicationHistoryManagerImpl(ApplicationHistoryStore store) {
+      super();
+      init(new YarnConfiguration());
+      start();
+    }
+
+    @Override
+    protected ApplicationHistoryStore createApplicationHistoryStore(
+        Configuration conf) {
+      return store;
+    }
+  };
+
+}


Mime
View raw message