Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 8AEC9200B42 for ; Sun, 10 Jul 2016 17:39:36 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 896F7160A58; Sun, 10 Jul 2016 15:39:36 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 22417160A69 for ; Sun, 10 Jul 2016 17:39:33 +0200 (CEST) Received: (qmail 29099 invoked by uid 500); 10 Jul 2016 15:39:32 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 28911 invoked by uid 99); 10 Jul 2016 15:39:32 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Sun, 10 Jul 2016 15:39:32 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 0F9AEEC22C; Sun, 10 Jul 2016 15:39:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: sjlee@apache.org To: common-commits@hadoop.apache.org Date: Sun, 10 Jul 2016 15:39:32 -0000 Message-Id: <855f71f0a237419e8435fd23250df355@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [2/4] hadoop git commit: Made a number of miscellaneous fixes for javac, javadoc, and checstyle warnings. archived-at: Sun, 10 Jul 2016 15:39:36 -0000 http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java index fd5a7f5..a8de759 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java @@ -444,17 +444,17 @@ public class TestHBaseTimelineStorage { te.addEntity(entity2); HBaseTimelineWriterImpl hbi = null; try { - hbi = new HBaseTimelineWriterImpl(util.getConfiguration()); - hbi.init(util.getConfiguration()); - hbi.start(); - String cluster = "cluster1"; - String user = "user1"; - String flow = "some_flow_name"; - String flowVersion = "AB7822C10F1111"; - long runid = 1002345678919L; - String appName = "application_1231111111_1111"; - hbi.write(cluster, user, flow, flowVersion, runid, appName, te); - hbi.stop(); + hbi = new HBaseTimelineWriterImpl(util.getConfiguration()); + hbi.init(util.getConfiguration()); + hbi.start(); + String cluster = "cluster1"; + String user = "user1"; + String flow = "some_flow_name"; + String flowVersion = "AB7822C10F1111"; + long runid = 1002345678919L; + String appName = "application_1231111111_1111"; + hbi.write(cluster, user, flow, flowVersion, runid, appName, te); + hbi.stop(); } finally { if (hbi != null) { hbi.stop(); @@ -531,7 +531,7 @@ public class TestHBaseTimelineStorage { int count = 0; for (Result rr = resultScanner.next(); rr != null; rr = resultScanner.next()) { - count++; + count++; } // there should be no rows written // no exceptions thrown during write @@ -1173,7 +1173,7 @@ public class TestHBaseTimelineStorage { for (TimelineEvent e : events) { assertEquals(eventId, e.getId()); assertEquals(expTs, Long.valueOf(e.getTimestamp())); - Map info = e.getInfo(); + Map info = e.getInfo(); assertEquals(1, info.size()); for (Map.Entry infoEntry : info.entrySet()) { assertEquals(expKey, infoEntry.getKey()); @@ -1249,7 +1249,7 @@ public class TestHBaseTimelineStorage { // the qualifier is a compound key // hence match individual values assertEquals(eventId, eventColumnName.getId()); - assertEquals(expTs,eventColumnName.getTimestamp()); + assertEquals(expTs, eventColumnName.getTimestamp()); // key must be empty assertNull(eventColumnName.getInfoKey()); Object value = e.getValue(); @@ -1280,7 +1280,7 @@ public class TestHBaseTimelineStorage { for (TimelineEvent e : events) { assertEquals(eventId, e.getId()); assertEquals(expTs, Long.valueOf(e.getTimestamp())); - Map info = e.getInfo(); + Map info = e.getInfo(); assertTrue(info == null || info.isEmpty()); } } finally { @@ -1337,7 +1337,7 @@ public class TestHBaseTimelineStorage { for (TimelineEvent e : events) { assertEquals(eventId, e.getId()); assertEquals(expTs, e.getTimestamp()); - Map info = e.getInfo(); + Map info = e.getInfo(); assertEquals(1, info.size()); for (Map.Entry infoEntry : info.entrySet()) { assertEquals(expKey, infoEntry.getKey()); @@ -1417,14 +1417,14 @@ public class TestHBaseTimelineStorage { public void testReadEntities() throws Exception { TimelineEntity entity = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertNotNull(entity); assertEquals(3, entity.getConfigs().size()); assertEquals(1, entity.getIsRelatedToEntities().size()); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); assertEquals(3, entities.size()); @@ -1460,7 +1460,7 @@ public class TestHBaseTimelineStorage { public void testFilterEntitiesByCreatedTime() throws Exception { Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, null, null, null, null, null), new TimelineDataToRetrieve()); assertEquals(3, entities.size()); @@ -1468,12 +1468,12 @@ public class TestHBaseTimelineStorage { if (!entity.getId().equals("hello") && !entity.getId().equals("hello1") && !entity.getId().equals("hello2")) { Assert.fail("Entities with ids' hello, hello1 and hello2 should be" + - " present"); + " present"); } } entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, 1425016502015L, null, null, null, null, null, null, null), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); @@ -1485,15 +1485,15 @@ public class TestHBaseTimelineStorage { } entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, 1425016502015L, null, null, null, null, null, null), new TimelineDataToRetrieve()); - assertEquals(1, entities.size()); - for (TimelineEntity entity : entities) { - if (!entity.getId().equals("hello")) { - Assert.fail("Entity with id hello should be present"); - } - } + assertEquals(1, entities.size()); + for (TimelineEntity entity : entities) { + if (!entity.getId().equals("hello")) { + Assert.fail("Entity with id hello should be present"); + } + } } @Test @@ -1518,7 +1518,7 @@ public class TestHBaseTimelineStorage { new HashSet(Arrays.asList("relatedto5")))); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, relatesTo, isRelatedTo, null, null, null, eventFilter), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); @@ -1547,7 +1547,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "end_event")); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -1568,7 +1568,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "end_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef1), new TimelineDataToRetrieve()); @@ -1587,7 +1587,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "end_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef2), new TimelineDataToRetrieve()); @@ -1609,7 +1609,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.EQUAL, "dummy_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef3), new TimelineDataToRetrieve()); @@ -1626,7 +1626,7 @@ public class TestHBaseTimelineStorage { TimelineFilterList ef4 = new TimelineFilterList(Operator.OR, list1, list2); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef4), new TimelineDataToRetrieve()); @@ -1647,7 +1647,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "end_event")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, null, ef5), new TimelineDataToRetrieve()); @@ -1656,8 +1656,8 @@ public class TestHBaseTimelineStorage { for (TimelineEntity timelineEntity : entities) { eventCnt += timelineEntity.getEvents().size(); if (!timelineEntity.getId().equals("hello")) { - Assert.fail("Entity id should have been hello"); - } + Assert.fail("Entity id should have been hello"); + } } assertEquals(0, eventCnt); } @@ -1673,7 +1673,7 @@ public class TestHBaseTimelineStorage { new HashSet(Arrays.asList("relatedto4")))); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, irt, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -1822,7 +1822,7 @@ public class TestHBaseTimelineStorage { new HashSet(Arrays.asList("relatesto4")))); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, rt, null, null, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -2000,7 +2000,7 @@ public class TestHBaseTimelineStorage { public void testReadEntitiesDefaultView() throws Exception { TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve()); assertNotNull(e1); assertTrue(e1.getInfo().isEmpty() && e1.getConfigs().isEmpty() && @@ -2008,7 +2008,7 @@ public class TestHBaseTimelineStorage { e1.getRelatesToEntities().isEmpty()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve()); assertEquals(3, es1.size()); @@ -2023,7 +2023,7 @@ public class TestHBaseTimelineStorage { public void testReadEntitiesByFields() throws Exception { TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve( null, null, EnumSet.of(Field.INFO, Field.CONFIGS), null)); assertNotNull(e1); @@ -2031,7 +2031,7 @@ public class TestHBaseTimelineStorage { assertEquals(0, e1.getIsRelatedToEntities().size()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve( null, null, EnumSet.of(Field.IS_RELATED_TO, Field.METRICS), null)); @@ -2056,13 +2056,13 @@ public class TestHBaseTimelineStorage { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_")); TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(list, null, null, null)); assertNotNull(e1); assertEquals(1, e1.getConfigs().size()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(list, null, null, null)); int cfgCnt = 0; @@ -2092,7 +2092,7 @@ public class TestHBaseTimelineStorage { new TimelineFilterList(Operator.OR, list1, list2); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2106,7 +2106,7 @@ public class TestHBaseTimelineStorage { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -2122,7 +2122,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "cfg_param1", "value1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2141,7 +2141,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "config_param2", "value2")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList2, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2153,7 +2153,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.EQUAL, "dummy_config", "value1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList3, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2165,7 +2165,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "dummy_config", "value1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList4, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2177,7 +2177,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "dummy_config", "value1", false)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList5, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.CONFIGS), @@ -2195,7 +2195,7 @@ public class TestHBaseTimelineStorage { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "cfg_")); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList, null, null), new TimelineDataToRetrieve(list, null, null, null)); @@ -2226,7 +2226,7 @@ public class TestHBaseTimelineStorage { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "config_")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, confFilterList1, null, null), new TimelineDataToRetrieve(confsToRetrieve, null, null, null)); @@ -2237,7 +2237,7 @@ public class TestHBaseTimelineStorage { for (String confKey : entity.getConfigs().keySet()) { assertTrue("Config key returned should start with config_", confKey.startsWith("config_")); - } + } } assertEquals(2, cfgCnt); } @@ -2249,13 +2249,13 @@ public class TestHBaseTimelineStorage { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_")); TimelineEntity e1 = reader.getEntity( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", "hello"), + 1002345678919L, "application_1231111111_1111", "world", "hello"), new TimelineDataToRetrieve(null, list, null, null)); assertNotNull(e1); assertEquals(1, e1.getMetrics().size()); Set es1 = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, list, null, null)); int metricCnt = 0; @@ -2283,7 +2283,7 @@ public class TestHBaseTimelineStorage { new TimelineFilterList(Operator.OR, list1, list2); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2297,7 +2297,7 @@ public class TestHBaseTimelineStorage { entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); @@ -2315,7 +2315,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "MAP1_BYTES", 30)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2334,7 +2334,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "MAP1_BYTES", 30)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2346,7 +2346,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.EQUAL, "dummy_metric", 5)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList3, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2358,7 +2358,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "dummy_metric", 5)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList4, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2370,7 +2370,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "dummy_metric", 5, false)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList5, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.METRICS), @@ -2388,7 +2388,7 @@ public class TestHBaseTimelineStorage { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_")); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList, null), new TimelineDataToRetrieve(null, list, null, null)); @@ -2417,7 +2417,7 @@ public class TestHBaseTimelineStorage { new TimelinePrefixFilter(TimelineCompareOp.EQUAL, "MAP1_")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList1, null), new TimelineDataToRetrieve( @@ -2436,9 +2436,9 @@ public class TestHBaseTimelineStorage { assertEquals(2, metricCnt); entities = reader.getEntities(new TimelineReaderContext("cluster1", "user1", - "some_flow_name", 1002345678919L, "application_1231111111_1111","world", - null), new TimelineEntityFilters(null, null, null, null, null, null, - null, metricFilterList1, null), new TimelineDataToRetrieve(null, + "some_flow_name", 1002345678919L, "application_1231111111_1111", + "world", null), new TimelineEntityFilters(null, null, null, null, null, + null, null, metricFilterList1, null), new TimelineDataToRetrieve(null, metricsToRetrieve, EnumSet.of(Field.METRICS), Integer.MAX_VALUE)); assertEquals(2, entities.size()); metricCnt = 0; @@ -2471,7 +2471,7 @@ public class TestHBaseTimelineStorage { new TimelineFilterList(Operator.OR, list1, list2); Set entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2487,7 +2487,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "infoMapKey1", "infoMapValue1")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList1, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2505,7 +2505,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "infoMapKey3", 71.4)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList2, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2516,7 +2516,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.EQUAL, "dummy_info", "some_value")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList3, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2527,7 +2527,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value")); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList4, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2538,7 +2538,7 @@ public class TestHBaseTimelineStorage { TimelineCompareOp.NOT_EQUAL, "dummy_info", "some_value", false)); entities = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, "application_1231111111_1111","world", null), + 1002345678919L, "application_1231111111_1111", "world", null), new TimelineEntityFilters(null, null, null, null, null, infoFilterList5, null, null, null), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO), null)); @@ -2593,19 +2593,19 @@ public class TestHBaseTimelineStorage { @Test public void testFilterAppsByCreatedTime() throws Exception { Set entities = reader.getEntities( - new TimelineReaderContext("cluster1", "user1", "some_flow_name", - 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), - null), - new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, - null, null, null, null, null), - new TimelineDataToRetrieve()); + new TimelineReaderContext("cluster1", "user1", "some_flow_name", + 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), + null), + new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null, + null, null, null, null, null), + new TimelineDataToRetrieve()); assertEquals(3, entities.size()); for (TimelineEntity entity : entities) { if (!entity.getId().equals("application_1111111111_2222") && !entity.getId().equals("application_1111111111_3333") && !entity.getId().equals("application_1111111111_4444")) { Assert.fail("Entities with ids' application_1111111111_2222, " + - "application_1111111111_3333 and application_1111111111_4444" + + "application_1111111111_3333 and application_1111111111_4444" + " should be present"); } } @@ -2736,8 +2736,8 @@ public class TestHBaseTimelineStorage { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt1, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt1, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -2760,8 +2760,8 @@ public class TestHBaseTimelineStorage { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt2, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt2, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(2, entities.size()); isRelatedToCnt = 0; @@ -2783,8 +2783,8 @@ public class TestHBaseTimelineStorage { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt3, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt3, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -2807,8 +2807,8 @@ public class TestHBaseTimelineStorage { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt4, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt4, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -2820,8 +2820,8 @@ public class TestHBaseTimelineStorage { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt5, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt5, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(0, entities.size()); @@ -2841,8 +2841,8 @@ public class TestHBaseTimelineStorage { new TimelineReaderContext("cluster1", "user1", "some_flow_name", 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(), null), - new TimelineEntityFilters(null, null, null, null, irt6, null, null, null, - null), + new TimelineEntityFilters(null, null, null, null, irt6, null, null, + null, null), new TimelineDataToRetrieve()); assertEquals(1, entities.size()); isRelatedToCnt = 0; @@ -3335,8 +3335,8 @@ public class TestHBaseTimelineStorage { for (TimelineEntity timelineEntity : entities) { eventCnt += timelineEntity.getEvents().size(); if (!timelineEntity.getId().equals("application_1111111111_2222")) { - Assert.fail("Entity id should have been application_1111111111_2222"); - } + Assert.fail("Entity id should have been application_1111111111_2222"); + } } assertEquals(0, eventCnt); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java index 58d5e61..e34ae90 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestPhoenixOfflineAggregationWriterImpl.java @@ -74,8 +74,8 @@ public class TestPhoenixOfflineAggregationWriterImpl extends BaseTest { } private static PhoenixOfflineAggregationWriterImpl - setupPhoenixClusterAndWriterForTest(YarnConfiguration conf) - throws Exception{ + setupPhoenixClusterAndWriterForTest(YarnConfiguration conf) + throws Exception { Map props = new HashMap<>(); // Must update config before starting server props.put(QueryServices.STATS_USE_CURRENT_TIME_ATTRIB, http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java index 0535a13..b608987 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java @@ -31,12 +31,14 @@ import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants; import org.apache.hadoop.conf.Configuration; /** - * Generates the data/entities for the FlowRun and FlowActivity Tables + * Generates the data/entities for the FlowRun and FlowActivity Tables. */ -class TestFlowDataGenerator { +final class TestFlowDataGenerator { + private TestFlowDataGenerator() { + } - private static final String metric1 = "MAP_SLOT_MILLIS"; - private static final String metric2 = "HDFS_BYTES_READ"; + private static final String METRIC_1 = "MAP_SLOT_MILLIS"; + private static final String METRIC_2 = "HDFS_BYTES_READ"; public static final long END_TS_INCR = 10000L; static TimelineEntity getEntityMetricsApp1(long insertTs, Configuration c1) { @@ -51,7 +53,7 @@ class TestFlowDataGenerator { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; @@ -64,10 +66,10 @@ class TestFlowDataGenerator { metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); ts = System.currentTimeMillis(); - for (int k=1; k< 100 ; k++) { + for (int k = 1; k < 100; k++) { metricValues.put(ts - k*100000L, 31L); } @@ -81,7 +83,8 @@ class TestFlowDataGenerator { } - static TimelineEntity getEntityMetricsApp1Complete(long insertTs, Configuration c1) { + static TimelineEntity getEntityMetricsApp1Complete(long insertTs, + Configuration c1) { TimelineEntity entity = new TimelineEntity(); String id = "flowRunMetrics_test"; String type = TimelineEntityType.YARN_APPLICATION.toString(); @@ -93,7 +96,7 @@ class TestFlowDataGenerator { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; @@ -103,7 +106,7 @@ class TestFlowDataGenerator { metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); ts = insertTs; metricValues.put(ts - 80000, 57L); @@ -134,7 +137,7 @@ class TestFlowDataGenerator { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; metricValues.put(ts - 100000, 2L); @@ -144,7 +147,7 @@ class TestFlowDataGenerator { metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); ts = insertTs; metricValues.put(ts - 100000, 31L); @@ -177,7 +180,7 @@ class TestFlowDataGenerator { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = insertTs; metricValues.put(ts - 100000, 5L); @@ -208,7 +211,7 @@ class TestFlowDataGenerator { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); long ts = System.currentTimeMillis(); metricValues.put(ts - 120000, 100000000L); @@ -232,7 +235,7 @@ class TestFlowDataGenerator { event = new TimelineEvent(); event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - long expTs = cTime + 21600000;// start time + 6hrs + long expTs = cTime + 21600000; // start time + 6hrs event.setTimestamp(expTs); event.addInfo(expKey, expVal); entity.addEvent(event); @@ -250,7 +253,7 @@ class TestFlowDataGenerator { // add metrics Set metrics = new HashSet<>(); TimelineMetric m1 = new TimelineMetric(); - m1.setId(metric1); + m1.setId(METRIC_1); Map metricValues = new HashMap(); metricValues.put(ts - 120000, 100000000L); metricValues.put(ts - 100000, 200000000L); @@ -262,7 +265,7 @@ class TestFlowDataGenerator { m1.setValues(metricValues); metrics.add(m1); TimelineMetric m2 = new TimelineMetric(); - m2.setId(metric2); + m2.setId(METRIC_2); metricValues = new HashMap(); metricValues.put(ts - 900000, 31L); metricValues.put(ts - 30000, 57L); @@ -281,7 +284,7 @@ class TestFlowDataGenerator { event = new TimelineEvent(); event.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE); - long expTs = ts + 21600000;// start time + 6hrs + long expTs = ts + 21600000; // start time + 6hrs event.setTimestamp(expTs); event.addInfo(expKey, expVal); entity.addEvent(event); http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java index 37490ff..1906574 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java @@ -58,7 +58,7 @@ import org.junit.BeforeClass; import org.junit.Test; /** - * Tests the FlowRun and FlowActivity Tables + * Tests the FlowRun and FlowActivity Tables. */ public class TestHBaseStorageFlowActivity { @@ -114,7 +114,7 @@ public class TestHBaseStorageFlowActivity { String appName = "application_100000000000_1111"; long minStartTs = 1424995200300L; long greaterStartTs = 1424995200300L + 864000L; - long endTs = 1424995200300L + 86000000L;; + long endTs = 1424995200300L + 86000000L; TimelineEntity entityMinStartTime = TestFlowDataGenerator .getEntityMinStartTime(minStartTs); @@ -209,7 +209,7 @@ public class TestHBaseStorageFlowActivity { /** * Write 1 application entity and checks the record for today in the flow - * activity table + * activity table. */ @Test public void testWriteFlowActivityOneFlow() throws Exception { @@ -313,10 +313,10 @@ public class TestHBaseStorageFlowActivity { /** * Writes 3 applications each with a different run id and version for the same - * {cluster, user, flow} + * {cluster, user, flow}. * * They should be getting inserted into one record in the flow activity table - * with 3 columns, one per run id + * with 3 columns, one per run id. */ @Test public void testFlowActivityTableOneFlowMultipleRunIds() throws IOException { @@ -425,7 +425,8 @@ public class TestHBaseStorageFlowActivity { s.setStartRow(startRow); String clusterStop = cluster + "1"; byte[] stopRow = - new FlowActivityRowKey(clusterStop, appCreatedTime, user, flow).getRowKey(); + new FlowActivityRowKey(clusterStop, appCreatedTime, user, flow) + .getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); Table table1 = conn.getTable(TableName http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java index 6c4c810..74b9e50 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java @@ -69,7 +69,7 @@ import org.junit.BeforeClass; import org.junit.Test; /** - * Tests the FlowRun and FlowActivity Tables + * Tests the FlowRun and FlowActivity Tables. */ public class TestHBaseStorageFlowRun { @@ -356,18 +356,20 @@ public class TestHBaseStorageFlowRun { /* * checks the batch limits on a scan */ - void checkFlowRunTableBatchLimit(String cluster, String user, + void checkFlowRunTableBatchLimit(String cluster, String user, String flow, long runid, Configuration c1) throws IOException { Scan s = new Scan(); s.addFamily(FlowRunColumnFamily.INFO.getBytes()); - byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey(); + byte[] startRow = + new FlowRunRowKey(cluster, user, flow, runid).getRowKey(); s.setStartRow(startRow); // set a batch limit int batchLimit = 2; s.setBatch(batchLimit); String clusterStop = cluster + "1"; - byte[] stopRow = new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey(); + byte[] stopRow = + new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey(); s.setStopRow(stopRow); Connection conn = ConnectionFactory.createConnection(c1); Table table1 = conn http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java index 71523b8..3094088 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java @@ -63,14 +63,14 @@ import org.junit.BeforeClass; import org.junit.Test; /** - * Tests the FlowRun and FlowActivity Tables + * Tests the FlowRun and FlowActivity Tables. */ public class TestHBaseStorageFlowRunCompaction { private static HBaseTestingUtility util; - private static final String metric1 = "MAP_SLOT_MILLIS"; - private static final String metric2 = "HDFS_BYTES_READ"; + private static final String METRIC_1 = "MAP_SLOT_MILLIS"; + private static final String METRIC_2 = "HDFS_BYTES_READ"; private final byte[] aRowKey = Bytes.toBytes("a"); private final byte[] aFamily = Bytes.toBytes("family"); @@ -89,8 +89,8 @@ public class TestHBaseStorageFlowRunCompaction { TimelineSchemaCreator.createAllTables(util.getConfiguration(), false); } - /** writes non numeric data into flow run table - * reads it back + /** Writes non numeric data into flow run table + * reads it back. * * @throws Exception */ @@ -262,7 +262,7 @@ public class TestHBaseStorageFlowRunCompaction { .getFamilyMap(FlowRunColumnFamily.INFO.getBytes()); // we expect all back in one next call assertEquals(4, values.size()); - System.out.println(" values size " + values.size() + " " + batchLimit ); + System.out.println(" values size " + values.size() + " " + batchLimit); rowCount++; } // should get back 1 row with each invocation @@ -325,11 +325,12 @@ public class TestHBaseStorageFlowRunCompaction { .valueOf(FlowRunTable.DEFAULT_TABLE_NAME)); List regions = server.getOnlineRegions(TableName .valueOf(FlowRunTable.DEFAULT_TABLE_NAME)); - assertTrue("Didn't find any regions for primary table!", regions.size() > 0); + assertTrue("Didn't find any regions for primary table!", + regions.size() > 0); // flush and compact all the regions of the primary table for (Region region : regions) { - region.flush(true); - region.compact(true); + region.flush(true); + region.compact(true); } // check flow run for one flow many apps @@ -363,13 +364,13 @@ public class TestHBaseStorageFlowRunCompaction { rowCount++; // check metric1 byte[] q = ColumnHelper.getColumnQualifier( - FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric1); + FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), METRIC_1); assertTrue(values.containsKey(q)); assertEquals(141, Bytes.toLong(values.get(q))); // check metric2 q = ColumnHelper.getColumnQualifier( - FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric2); + FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), METRIC_2); assertTrue(values.containsKey(q)); assertEquals(57, Bytes.toLong(values.get(q))); } @@ -385,7 +386,7 @@ public class TestHBaseStorageFlowRunCompaction { // okay to pass in nulls for the constructor arguments // because all we want to do is invoke the process summation FlowScanner fs = new FlowScanner(null, null, - (request.isMajor() == true ? FlowScannerOperation.MAJOR_COMPACTION + (request.isMajor() ? FlowScannerOperation.MAJOR_COMPACTION : FlowScannerOperation.MINOR_COMPACTION)); assertNotNull(fs); return fs; @@ -404,7 +405,7 @@ public class TestHBaseStorageFlowRunCompaction { long currentTimestamp = System.currentTimeMillis(); long cell1Ts = 1200120L; long cell2Ts = TimestampGenerator.getSupplementedTimestamp( - System.currentTimeMillis(),"application_123746661110_11202"); + System.currentTimeMillis(), "application_123746661110_11202"); long cell3Ts = 1277719L; long cell4Ts = currentTimestamp - 10; @@ -571,7 +572,8 @@ public class TestHBaseStorageFlowRunCompaction { // of type SUM and SUM_FINAL // NOT cells of SUM_FINAL will expire @Test - public void checkProcessSummationMoreCellsSumFinalVariedTags() throws IOException { + public void checkProcessSummationMoreCellsSumFinalVariedTags() + throws IOException { FlowScanner fs = getFlowScannerForTestingCompaction(); int countFinal = 20100; int countNotFinal = 1000; @@ -585,7 +587,8 @@ public class TestHBaseStorageFlowRunCompaction { long cellTsFinalStart = 10001120L; long cellTsFinal = cellTsFinalStart; - long cellTsFinalStartNotExpire = TimestampGenerator.getSupplementedTimestamp( + long cellTsFinalStartNotExpire = + TimestampGenerator.getSupplementedTimestamp( System.currentTimeMillis(), "application_10266666661166_118821"); long cellTsFinalNotExpire = cellTsFinalStartNotExpire; http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java index 2dff937..29ef1f8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorWebService.java @@ -53,7 +53,6 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity; import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType; import org.apache.hadoop.yarn.api.records.timelineservice.UserEntity; -import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.webapp.ForbiddenException; import org.apache.hadoop.yarn.webapp.NotFoundException; @@ -180,7 +179,7 @@ public class TimelineCollectorWebService { private static ApplicationId parseApplicationId(String appId) { try { if (appId != null) { - return ConverterUtils.toApplicationId(appId.trim()); + return ApplicationId.fromString(appId.trim()); } else { return null; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java index eda14e6..03f508f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/package-info.java @@ -26,4 +26,3 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.application; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; - http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java index f5f7aa6..4cb46e6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/AppIdKeyConverter.java @@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.common; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.ConverterUtils; /** * Encodes and decodes {@link ApplicationId} for row keys. @@ -50,7 +49,7 @@ public final class AppIdKeyConverter implements KeyConverter { */ @Override public byte[] encode(String appIdStr) { - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); byte[] appIdBytes = new byte[getKeySize()]; byte[] clusterTs = Bytes.toBytes( LongConverter.invertLong(appId.getClusterTimestamp())); http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java index 288046c..d03b37d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimestampGenerator.java @@ -21,7 +21,6 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.common; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.ConverterUtils; /** * Utility class that allows HBase coprocessors to interact with unique @@ -99,7 +98,7 @@ public class TimestampGenerator { if (appIdStr == null) { return 0L; } - ApplicationId appId = ConverterUtils.toApplicationId(appIdStr); + ApplicationId appId = ApplicationId.fromString(appIdStr); long id = appId.getId() % TS_MULTIPLIER; return id; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java index 0e9578a..bb0e331 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/package-info.java @@ -26,4 +26,3 @@ package org.apache.hadoop.yarn.server.timelineservice.storage.entity; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; - http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java index 854e046..7bc89c5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestNMTimelineCollectorManager.java @@ -88,9 +88,9 @@ public class TestNMTimelineCollectorManager { @Test(timeout=60000) public void testMultithreadedAdd() throws Exception { - final int NUM_APPS = 5; + final int numApps = 5; List> tasks = new ArrayList>(); - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); Callable task = new Callable() { public Boolean call() { @@ -101,7 +101,7 @@ public class TestNMTimelineCollectorManager { }; tasks.add(task); } - ExecutorService executor = Executors.newFixedThreadPool(NUM_APPS); + ExecutorService executor = Executors.newFixedThreadPool(numApps); try { List> futures = executor.invokeAll(tasks); for (Future future: futures) { @@ -111,7 +111,7 @@ public class TestNMTimelineCollectorManager { executor.shutdownNow(); } // check the keys - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); assertTrue(collectorManager.containsTimelineCollector(appId)); } @@ -119,9 +119,9 @@ public class TestNMTimelineCollectorManager { @Test public void testMultithreadedAddAndRemove() throws Exception { - final int NUM_APPS = 5; + final int numApps = 5; List> tasks = new ArrayList>(); - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); Callable task = new Callable() { public Boolean call() { @@ -134,7 +134,7 @@ public class TestNMTimelineCollectorManager { }; tasks.add(task); } - ExecutorService executor = Executors.newFixedThreadPool(NUM_APPS); + ExecutorService executor = Executors.newFixedThreadPool(numApps); try { List> futures = executor.invokeAll(tasks); for (Future future: futures) { @@ -144,16 +144,16 @@ public class TestNMTimelineCollectorManager { executor.shutdownNow(); } // check the keys - for (int i = 0; i < NUM_APPS; i++) { + for (int i = 0; i < numApps; i++) { final ApplicationId appId = ApplicationId.newInstance(0L, i); assertFalse(collectorManager.containsTimelineCollector(appId)); } } private NodeTimelineCollectorManager createCollectorManager() { - final NodeTimelineCollectorManager collectorManager = + final NodeTimelineCollectorManager cm = spy(new NodeTimelineCollectorManager()); - doReturn(new Configuration()).when(collectorManager).getConfig(); + doReturn(new Configuration()).when(cm).getConfig(); CollectorNodemanagerProtocol nmCollectorService = mock(CollectorNodemanagerProtocol.class); GetTimelineCollectorContextResponse response = @@ -164,7 +164,7 @@ public class TestNMTimelineCollectorManager { } catch (YarnException | IOException e) { fail(); } - doReturn(nmCollectorService).when(collectorManager).getNMCollectorService(); - return collectorManager; + doReturn(nmCollectorService).when(cm).getNMCollectorService(); + return cm; } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java index 7c2a471..cb9ced0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/collector/TestPerNodeTimelineCollectorsAuxService.java @@ -111,7 +111,7 @@ public class TestPerNodeTimelineCollectorsAuxService { // a configured period assertTrue(auxService.hasApplication(appAttemptId.getApplicationId())); for (int i = 0; i < 4; i++) { - Thread.sleep(500l); + Thread.sleep(500L); if (!auxService.hasApplication(appAttemptId.getApplicationId())) { break; } @@ -154,7 +154,7 @@ public class TestPerNodeTimelineCollectorsAuxService { private PerNodeTimelineCollectorsAuxService createCollectorAndAddApplication() { - PerNodeTimelineCollectorsAuxService auxService = createCollector(); + PerNodeTimelineCollectorsAuxService service = createCollector(); // create an AM container ContainerId containerId = getAMContainerId(); ContainerInitializationContext context = @@ -162,17 +162,17 @@ public class TestPerNodeTimelineCollectorsAuxService { when(context.getContainerId()).thenReturn(containerId); when(context.getContainerType()).thenReturn( ContainerType.APPLICATION_MASTER); - auxService.initializeContainer(context); - return auxService; + service.initializeContainer(context); + return service; } private PerNodeTimelineCollectorsAuxService createCollector() { NodeTimelineCollectorManager collectorManager = createCollectorManager(); - PerNodeTimelineCollectorsAuxService auxService = + PerNodeTimelineCollectorsAuxService service = spy(new PerNodeTimelineCollectorsAuxService(collectorManager)); - auxService.init(conf); - auxService.start(); - return auxService; + service.init(conf); + service.start(); + return service; } private NodeTimelineCollectorManager createCollectorManager() { http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java index 791d6ab..bc5eb9c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderUtils.java @@ -45,11 +45,11 @@ public class TestTimelineReaderUtils { public void testJoinAndEscapeStrings() throws Exception { assertEquals("*!cluster!*!b**o***!xer!oozie**", TimelineReaderUtils.joinAndEscapeStrings( - new String[] { "!cluster", "!b*o*!xer", "oozie*"}, '!', '*')); + new String[] {"!cluster", "!b*o*!xer", "oozie*"}, '!', '*')); assertEquals("*!cluster!*!b**o***!xer!!", TimelineReaderUtils.joinAndEscapeStrings( - new String[] { "!cluster", "!b*o*!xer", "", ""}, '!', '*')); + new String[] {"!cluster", "!b*o*!xer", "", ""}, '!', '*')); assertNull(TimelineReaderUtils.joinAndEscapeStrings( - new String[] { "!cluster", "!b*o*!xer", null, ""}, '!', '*')); + new String[] {"!cluster", "!b*o*!xer", null, ""}, '!', '*')); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java index 4d65922..4ade024 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServices.java @@ -110,7 +110,8 @@ public class TestTimelineReaderWebServices { client.resource(uri).accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(resp); - assertEquals(resp.getClientResponseStatus(), expectedStatus); + assertEquals(resp.getStatusInfo().getStatusCode(), + expectedStatus.getStatusCode()); } private static Client createClient() { @@ -126,10 +127,11 @@ public class TestTimelineReaderWebServices { client.resource(uri).accept(MediaType.APPLICATION_JSON) .type(MediaType.APPLICATION_JSON).get(ClientResponse.class); if (resp == null || - resp.getClientResponseStatus() != ClientResponse.Status.OK) { + resp.getStatusInfo().getStatusCode() != + ClientResponse.Status.OK.getStatusCode()) { String msg = new String(); if (resp != null) { - msg = resp.getClientResponseStatus().toString(); + msg = String.valueOf(resp.getStatusInfo().getStatusCode()); } throw new IOException("Incorrect response from timeline reader. " + "Status=" + msg); @@ -141,7 +143,8 @@ public class TestTimelineReaderWebServices { implements HttpURLConnectionFactory { @Override - public HttpURLConnection getHttpURLConnection(final URL url) throws IOException { + public HttpURLConnection getHttpURLConnection(final URL url) + throws IOException { try { return (HttpURLConnection)url.openConnection(); } catch (UndeclaredThrowableException e) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java index e991d27..b2837c2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesUtils.java @@ -779,8 +779,8 @@ public class TestTimelineReaderWebServicesUtils { ), new TimelineFilterList( new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, - "type4", Sets.newHashSet((Object)"entity43","entity44", - "entity47","entity49")), + "type4", Sets.newHashSet((Object)"entity43", "entity44", + "entity47", "entity49")), new TimelineKeyValuesFilter(TimelineCompareOp.NOT_EQUAL, "type7", Sets.newHashSet((Object)"entity71")) ) http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java index 8d6235d..d5e791b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineUIDConverter.java @@ -60,7 +60,7 @@ public class TestTimelineUIDConverter { + "YARN_CONTAINER!container_1111111111_1111_01_000001", uid); assertEquals( context, TimelineUIDConverter.GENERIC_ENTITY_UID.decodeUID(uid)); - context = new TimelineReaderContext("yarn_cluster",null, null, null, + context = new TimelineReaderContext("yarn_cluster", null, null, null, "application_1111111111_1111", "YARN_CONTAINER", "container_1111111111_1111_01_000001"); uid = TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(context); http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6791363/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java index 2af7817..b58bbe3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java @@ -58,9 +58,9 @@ import org.junit.Test; public class TestFileSystemTimelineReaderImpl { - private static final String rootDir = + private static final String ROOT_DIR = FileSystemTimelineReaderImpl.DEFAULT_TIMELINE_SERVICE_STORAGE_DIR_ROOT; - FileSystemTimelineReaderImpl reader; + private FileSystemTimelineReaderImpl reader; @BeforeClass public static void setup() throws Exception { @@ -68,22 +68,22 @@ public class TestFileSystemTimelineReaderImpl { // Create app flow mapping file. CSVFormat format = CSVFormat.DEFAULT.withHeader("APP", "USER", "FLOW", "FLOWRUN"); - String appFlowMappingFile = rootDir + "/entities/cluster1/" + + String appFlowMappingFile = ROOT_DIR + "/entities/cluster1/" + FileSystemTimelineReaderImpl.APP_FLOW_MAPPING_FILE; try (PrintWriter out = new PrintWriter(new BufferedWriter( new FileWriter(appFlowMappingFile, true))); CSVPrinter printer = new CSVPrinter(out, format)){ printer.printRecord("app1", "user1", "flow1", 1); - printer.printRecord("app2","user1","flow1,flow",1); + printer.printRecord("app2", "user1", "flow1,flow", 1); printer.close(); } - (new File(rootDir)).deleteOnExit(); + (new File(ROOT_DIR)).deleteOnExit(); } @AfterClass public static void tearDown() throws Exception { - FileUtils.deleteDirectory(new File(rootDir)); + FileUtils.deleteDirectory(new File(ROOT_DIR)); } @Before @@ -91,7 +91,7 @@ public class TestFileSystemTimelineReaderImpl { reader = new FileSystemTimelineReaderImpl(); Configuration conf = new YarnConfiguration(); conf.set(FileSystemTimelineReaderImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, - rootDir); + ROOT_DIR); reader.init(conf); } @@ -112,7 +112,7 @@ public class TestFileSystemTimelineReaderImpl { } private static void loadEntityData() throws Exception { - File appDir = new File(rootDir + + File appDir = new File(ROOT_DIR + "/entities/cluster1/user1/flow1/1/app1/app/"); TimelineEntity entity11 = new TimelineEntity(); entity11.setId("id_1"); @@ -138,7 +138,7 @@ public class TestFileSystemTimelineReaderImpl { metric2.addValue(1425016502016L, 34); metrics.add(metric2); entity11.setMetrics(metrics); - Map configs = new HashMap(); + Map configs = new HashMap(); configs.put("config_1", "127"); entity11.setConfigs(configs); entity11.addRelatesToEntity("flow", "flow1"); @@ -179,7 +179,7 @@ public class TestFileSystemTimelineReaderImpl { Map info2 = new HashMap(); info1.put("info2", 4); entity2.addInfo(info2); - Map configs2 = new HashMap(); + Map configs2 = new HashMap(); configs2.put("config_1", "129"); configs2.put("config_3", "def"); entity2.setConfigs(configs2); @@ -216,7 +216,7 @@ public class TestFileSystemTimelineReaderImpl { info3.put("info2", 3.5); info3.put("info4", 20); entity3.addInfo(info3); - Map configs3 = new HashMap(); + Map configs3 = new HashMap(); configs3.put("config_1", "123"); configs3.put("config_3", "abc"); entity3.setConfigs(configs3); @@ -254,7 +254,7 @@ public class TestFileSystemTimelineReaderImpl { entity4.addEvent(event44); writeEntityFile(entity4, appDir); - File appDir2 = new File(rootDir + + File appDir2 = new File(ROOT_DIR + "/entities/cluster1/user1/flow1,flow/1/app2/app/"); TimelineEntity entity5 = new TimelineEntity(); entity5.setId("id_5"); @@ -298,7 +298,7 @@ public class TestFileSystemTimelineReaderImpl { Assert.assertEquals(0, result.getMetrics().size()); } - /** This test checks whether we can handle commas in app flow mapping csv */ + /** This test checks whether we can handle commas in app flow mapping csv. */ @Test public void testAppFlowMappingCsv() throws Exception { // Test getting an entity by cluster and app where flow entry @@ -317,7 +317,7 @@ public class TestFileSystemTimelineReaderImpl { public void testGetEntityCustomFields() throws Exception { // Specified fields in addition to default view will be returned. TimelineEntity result = reader.getEntity( - new TimelineReaderContext("cluster1","user1", "flow1", 1L, "app1", + new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS), null)); @@ -336,7 +336,7 @@ public class TestFileSystemTimelineReaderImpl { public void testGetEntityAllFields() throws Exception { // All fields of TimelineEntity will be returned. TimelineEntity result = reader.getEntity( - new TimelineReaderContext("cluster1","user1", "flow1", 1L, "app1", + new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", "id_1"), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null)); Assert.assertEquals( @@ -381,9 +381,9 @@ public class TestFileSystemTimelineReaderImpl { "app", null), new TimelineEntityFilters(3L, null, null, null, null, null, null, null, null), new TimelineDataToRetrieve()); - // Even though 2 entities out of 4 have same created time, one entity - // is left out due to limit - Assert.assertEquals(3, result.size()); + // Even though 2 entities out of 4 have same created time, one entity + // is left out due to limit + Assert.assertEquals(3, result.size()); } @Test @@ -474,9 +474,9 @@ public class TestFileSystemTimelineReaderImpl { // Get entities based on event filters. TimelineFilterList eventFilters = new TimelineFilterList(); eventFilters.addFilter( - new TimelineExistsFilter(TimelineCompareOp.EQUAL,"event_2")); + new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_2")); eventFilters.addFilter( - new TimelineExistsFilter(TimelineCompareOp.EQUAL,"event_4")); + new TimelineExistsFilter(TimelineCompareOp.EQUAL, "event_4")); result = reader.getEntities( new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1", "app", null), @@ -642,7 +642,7 @@ public class TestFileSystemTimelineReaderImpl { new TimelineEntityFilters(null, null, null, null, null, null, null, metricFilterList2, null), new TimelineDataToRetrieve()); - Assert.assertEquals(1, result.size()); + Assert.assertEquals(1, result.size()); for (TimelineEntity entity : result) { if (!entity.getId().equals("id_1")) { Assert.fail("Incorrect filtering based on metric filters"); @@ -757,7 +757,7 @@ public class TestFileSystemTimelineReaderImpl { Assert.fail("Incorrect filtering based on info filters"); } } - } + } @Test public void testGetEntitiesByRelations() throws Exception { --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org