Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1BB9219478 for ; Mon, 11 Apr 2016 15:48:49 +0000 (UTC) Received: (qmail 12656 invoked by uid 500); 11 Apr 2016 15:48:48 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 12590 invoked by uid 500); 11 Apr 2016 15:48:48 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 12581 invoked by uid 99); 11 Apr 2016 15:48:48 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 11 Apr 2016 15:48:48 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 64915DFBA3; Mon, 11 Apr 2016 15:48:48 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit From: junping_du@apache.org To: common-commits@hadoop.apache.org Message-Id: <7622a6fe7e4c447f94a9ca2e8a2f1bcc@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: =?utf-8?q?hadoop_git_commit=3A_YARN-4928=2E_Some_yarn=2Eserver=2Et?= =?utf-8?q?imeline=2E*_tests_fail_on_Windows_attempting_to_use_a_test_root_p?= =?utf-8?q?ath_containing_a_colon=2E_Contributed_by_Gergely_Nov=C3=A1k=2E?= Date: Mon, 11 Apr 2016 15:48:48 +0000 (UTC) Repository: hadoop Updated Branches: refs/heads/trunk 1ff27f9d1 -> 08ddb3ac6 YARN-4928. Some yarn.server.timeline.* tests fail on Windows attempting to use a test root path containing a colon. Contributed by Gergely Novák. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/08ddb3ac Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/08ddb3ac Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/08ddb3ac Branch: refs/heads/trunk Commit: 08ddb3ac6da634c293c638d76b53af163ede3057 Parents: 1ff27f9 Author: Junping Du Authored: Mon Apr 11 08:48:32 2016 -0700 Committer: Junping Du Committed: Mon Apr 11 08:48:32 2016 -0700 ---------------------------------------------------------------------- .../TestEntityGroupFSTimelineStore.java | 55 ++++++++++++-------- .../yarn/server/timeline/TestLogInfo.java | 28 +++++++--- 2 files changed, 52 insertions(+), 31 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/08ddb3ac/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java index 3e5bc06..4e491fc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestEntityGroupFSTimelineStore.java @@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.server.timeline; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileContextTestHelper; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.HdfsConfiguration; @@ -75,16 +76,16 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { = new Path(System.getProperty("test.build.data", System.getProperty("java.io.tmpdir")), TestEntityGroupFSTimelineStore.class.getSimpleName()); - private static final Path TEST_APP_DIR_PATH - = new Path(TEST_ROOT_DIR, TEST_APP_DIR_NAME); - private static final Path TEST_ATTEMPT_DIR_PATH - = new Path(TEST_APP_DIR_PATH, TEST_ATTEMPT_DIR_NAME); - private static final Path TEST_DONE_DIR_PATH - = new Path(TEST_ROOT_DIR, "done"); + private static Path testAppDirPath; + private static Path testAttemptDirPath; + private static Path testDoneDirPath; private static Configuration config = new YarnConfiguration(); private static MiniDFSCluster hdfsCluster; private static FileSystem fs; + private static FileContext fc; + private static FileContextTestHelper fileContextTestHelper = + new FileContextTestHelper("/tmp/TestEntityGroupFSTimelineStore"); private EntityGroupFSTimelineStore store; private TimelineEntity entityNew; @@ -98,13 +99,17 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { YarnConfiguration .TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES, "YARN_APPLICATION,YARN_APPLICATION_ATTEMPT,YARN_CONTAINER"); - config.set(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_DONE_DIR, - TEST_DONE_DIR_PATH.toString()); config.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEST_ROOT_DIR.toString()); HdfsConfiguration hdfsConfig = new HdfsConfiguration(); hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig).numDataNodes(1).build(); fs = hdfsCluster.getFileSystem(); + fc = FileContext.getFileContext(hdfsCluster.getURI(0), config); + testAppDirPath = getTestRootPath(TEST_APPLICATION_ID.toString()); + testAttemptDirPath = new Path(testAppDirPath, TEST_ATTEMPT_DIR_NAME); + testDoneDirPath = getTestRootPath("done"); + config.set(YarnConfiguration.TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_DONE_DIR, testDoneDirPath.toString()); + } @Before @@ -123,7 +128,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { @After public void tearDown() throws Exception { store.stop(); - fs.delete(TEST_APP_DIR_PATH, true); + fs.delete(testAppDirPath, true); } @AfterClass @@ -137,7 +142,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { @Test public void testAppLogsScanLogs() throws Exception { EntityGroupFSTimelineStore.AppLogs appLogs = - store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH, + store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath, AppState.COMPLETED); appLogs.scanForLogs(); List summaryLogs = appLogs.getSummaryLogs(); @@ -160,20 +165,20 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { @Test public void testMoveToDone() throws Exception { EntityGroupFSTimelineStore.AppLogs appLogs = - store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH, + store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath, AppState.COMPLETED); Path pathBefore = appLogs.getAppDirPath(); appLogs.moveToDone(); Path pathAfter = appLogs.getAppDirPath(); assertNotEquals(pathBefore, pathAfter); - assertTrue(pathAfter.toString().contains(TEST_DONE_DIR_PATH.toString())); + assertTrue(pathAfter.toString().contains(testDoneDirPath.toString())); } @Test public void testParseSummaryLogs() throws Exception { TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithMemStore(config); EntityGroupFSTimelineStore.AppLogs appLogs = - store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH, + store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath, AppState.COMPLETED); appLogs.scanForLogs(); appLogs.parseSummaryLogs(tdm); @@ -185,14 +190,14 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { // Create test dirs and files // Irrelevant file, should not be reclaimed Path irrelevantFilePath = new Path( - TEST_DONE_DIR_PATH, "irrelevant.log"); + testDoneDirPath, "irrelevant.log"); FSDataOutputStream stream = fs.create(irrelevantFilePath); stream.close(); // Irrelevant directory, should not be reclaimed - Path irrelevantDirPath = new Path(TEST_DONE_DIR_PATH, "irrelevant"); + Path irrelevantDirPath = new Path(testDoneDirPath, "irrelevant"); fs.mkdirs(irrelevantDirPath); - Path doneAppHomeDir = new Path(new Path(TEST_DONE_DIR_PATH, "0000"), "001"); + Path doneAppHomeDir = new Path(new Path(testDoneDirPath, "0000"), "001"); // First application, untouched after creation Path appDirClean = new Path(doneAppHomeDir, TEST_APP_DIR_NAME); Path attemptDirClean = new Path(appDirClean, TEST_ATTEMPT_DIR_NAME); @@ -222,7 +227,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { fs.mkdirs(dirPathEmpty); // Should retain all logs after this run - store.cleanLogs(TEST_DONE_DIR_PATH, fs, 10000); + store.cleanLogs(testDoneDirPath, fs, 10000); assertTrue(fs.exists(irrelevantDirPath)); assertTrue(fs.exists(irrelevantFilePath)); assertTrue(fs.exists(filePath)); @@ -239,7 +244,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { // Touch the third application by creating a new dir fs.mkdirs(new Path(dirPathHold, "holdByMe")); - store.cleanLogs(TEST_DONE_DIR_PATH, fs, 1000); + store.cleanLogs(testDoneDirPath, fs, 1000); // Verification after the second cleaner call assertTrue(fs.exists(irrelevantDirPath)); @@ -261,7 +266,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES)); // Load data and cache item, prepare timeline store by making a cache item EntityGroupFSTimelineStore.AppLogs appLogs = - store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH, + store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath, AppState.COMPLETED); EntityCacheItem cacheItem = new EntityCacheItem(config, fs); cacheItem.setAppLogs(appLogs); @@ -291,7 +296,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { public void testSummaryRead() throws Exception { // Load data EntityGroupFSTimelineStore.AppLogs appLogs = - store.new AppLogs(TEST_APPLICATION_ID, TEST_APP_DIR_PATH, + store.new AppLogs(TEST_APPLICATION_ID, testAppDirPath, AppState.COMPLETED); TimelineDataManager tdm = PluginStoreTestUtils.getTdmWithStore(config, store); @@ -314,7 +319,7 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { private void createTestFiles() throws IOException { TimelineEntities entities = PluginStoreTestUtils.generateTestEntities(); PluginStoreTestUtils.writeEntities(entities, - new Path(TEST_ATTEMPT_DIR_PATH, TEST_SUMMARY_LOG_FILE_NAME), fs); + new Path(testAttemptDirPath, TEST_SUMMARY_LOG_FILE_NAME), fs); entityNew = PluginStoreTestUtils .createEntity("id_3", "type_3", 789l, null, null, @@ -322,11 +327,15 @@ public class TestEntityGroupFSTimelineStore extends TimelineStoreTestUtils { TimelineEntities entityList = new TimelineEntities(); entityList.addEntity(entityNew); PluginStoreTestUtils.writeEntities(entityList, - new Path(TEST_ATTEMPT_DIR_PATH, TEST_ENTITY_LOG_FILE_NAME), fs); + new Path(testAttemptDirPath, TEST_ENTITY_LOG_FILE_NAME), fs); FSDataOutputStream out = fs.create( - new Path(TEST_ATTEMPT_DIR_PATH, TEST_DOMAIN_LOG_FILE_NAME)); + new Path(testAttemptDirPath, TEST_DOMAIN_LOG_FILE_NAME)); out.close(); } + private static Path getTestRootPath(String pathString) { + return fileContextTestHelper.getTestRootPath(fc, pathString); + } + } http://git-wip-us.apache.org/repos/asf/hadoop/blob/08ddb3ac/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java index fa6fcc7..2b49e7b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java @@ -18,6 +18,8 @@ package org.apache.hadoop.yarn.server.timeline; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileContextTestHelper; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; @@ -60,6 +62,8 @@ public class TestLogInfo { private Configuration config = new YarnConfiguration(); private MiniDFSCluster hdfsCluster; private FileSystem fs; + private FileContext fc; + private FileContextTestHelper fileContextTestHelper = new FileContextTestHelper("/tmp/TestLogInfo"); private ObjectMapper objMapper; private JsonFactory jsonFactory = new JsonFactory(); @@ -77,7 +81,8 @@ public class TestLogInfo { HdfsConfiguration hdfsConfig = new HdfsConfiguration(); hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig).numDataNodes(1).build(); fs = hdfsCluster.getFileSystem(); - Path testAppDirPath = new Path(TEST_ROOT_DIR, TEST_ATTEMPT_DIR_NAME); + fc = FileContext.getFileContext(hdfsCluster.getURI(0), config); + Path testAppDirPath = getTestRootPath(TEST_ATTEMPT_DIR_NAME); fs.mkdirs(testAppDirPath, new FsPermission(FILE_LOG_DIR_PERMISSIONS)); objMapper = PluginStoreTestUtils.createObjectMapper(); @@ -146,7 +151,7 @@ public class TestLogInfo { EntityLogInfo testLogInfo = new EntityLogInfo(TEST_ATTEMPT_DIR_NAME, TEST_ENTITY_FILE_NAME, UserGroupInformation.getLoginUser().getUserName()); - testLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper, + testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs); // Verify for the first batch PluginStoreTestUtils.verifyTestEntities(tdm); @@ -157,9 +162,8 @@ public class TestLogInfo { TimelineEntities entityList = new TimelineEntities(); entityList.addEntity(entityNew); writeEntitiesLeaveOpen(entityList, - new Path(new Path(TEST_ROOT_DIR, TEST_ATTEMPT_DIR_NAME), - TEST_ENTITY_FILE_NAME)); - testLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper, + new Path(getTestRootPath(TEST_ATTEMPT_DIR_NAME), TEST_ENTITY_FILE_NAME)); + testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs); // Verify the newly added data TimelineEntity entity3 = tdm.getEntity(entityNew.getEntityType(), @@ -182,9 +186,9 @@ public class TestLogInfo { TEST_BROKEN_FILE_NAME, UserGroupInformation.getLoginUser().getUserName()); // Try parse, should not fail - testLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper, + testLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs); - domainLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper, + domainLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs); tdm.close(); } @@ -196,7 +200,7 @@ public class TestLogInfo { DomainLogInfo domainLogInfo = new DomainLogInfo(TEST_ATTEMPT_DIR_NAME, TEST_DOMAIN_FILE_NAME, UserGroupInformation.getLoginUser().getUserName()); - domainLogInfo.parseForStore(tdm, TEST_ROOT_DIR, true, jsonFactory, objMapper, + domainLogInfo.parseForStore(tdm, getTestRootPath(), true, jsonFactory, objMapper, fs); // Verify domain data TimelineDomain resultDomain = tdm.getDomain("domain_1", @@ -250,4 +254,12 @@ public class TestLogInfo { outStreamDomain.hflush(); } + private Path getTestRootPath() { + return fileContextTestHelper.getTestRootPath(fc); + } + + private Path getTestRootPath(String pathString) { + return fileContextTestHelper.getTestRootPath(fc, pathString); + } + }