From common-commits-return-93672-archive-asf-public=cust-asf.ponee.io@hadoop.apache.org Wed Mar 13 04:33:32 2019 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id 4B5D7180600 for ; Wed, 13 Mar 2019 05:33:31 +0100 (CET) Received: (qmail 96594 invoked by uid 500); 13 Mar 2019 04:33:30 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 96585 invoked by uid 99); 13 Mar 2019 04:33:30 -0000 Received: from ec2-52-202-80-70.compute-1.amazonaws.com (HELO gitbox.apache.org) (52.202.80.70) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 13 Mar 2019 04:33:30 +0000 Received: by gitbox.apache.org (ASF Mail Server at gitbox.apache.org, from userid 33) id 80665879DA; Wed, 13 Mar 2019 04:33:29 +0000 (UTC) Date: Wed, 13 Mar 2019 04:33:29 +0000 To: "common-commits@hadoop.apache.org" Subject: [hadoop] branch trunk updated: YARN-9338 Timeline related testcases are failing. Contributed by Abhishek Modi. MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Message-ID: <155245160937.30807.8150419716985140501@gitbox.apache.org> From: vrushali@apache.org X-Git-Host: gitbox.apache.org X-Git-Repo: hadoop X-Git-Refname: refs/heads/trunk X-Git-Reftype: branch X-Git-Oldrev: 67cc24a7a429edd6132af6c4fd763e23fda29947 X-Git-Newrev: 17a3e14d25877af90ef6655750ce2b035c2982b5 X-Git-Rev: 17a3e14d25877af90ef6655750ce2b035c2982b5 X-Git-NotificationType: ref_changed_plus_diff X-Git-Multimail-Version: 1.5.dev Auto-Submitted: auto-generated This is an automated email from the ASF dual-hosted git repository. vrushali pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/trunk by this push: new 17a3e14 YARN-9338 Timeline related testcases are failing. Contributed by Abhishek Modi. 17a3e14 is described below commit 17a3e14d25877af90ef6655750ce2b035c2982b5 Author: Vrushali C AuthorDate: Tue Mar 12 21:33:17 2019 -0700 YARN-9338 Timeline related testcases are failing. Contributed by Abhishek Modi. --- .../security/TestTimelineAuthFilterForV2.java | 4 + .../storage/FileSystemTimelineWriterImpl.java | 30 +++--- .../storage/TestFileSystemTimelineWriterImpl.java | 119 +++++++++++++++++++++ 3 files changed, 139 insertions(+), 14 deletions(-) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java index c353cf0..95a008a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java @@ -44,7 +44,9 @@ import java.util.concurrent.Callable; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.io.Text; import org.apache.hadoop.minikdc.MiniKdc; @@ -144,6 +146,8 @@ public class TestTimelineAuthFilterForV2 { // Setup timeline service v2. try { conf = new Configuration(false); + conf.setClass("fs.file.impl", RawLocalFileSystem.class, + FileSystem.class); conf.setStrings(TimelineAuthenticationFilterInitializer.PREFIX + "type", "kerberos"); conf.set(TimelineAuthenticationFilterInitializer.PREFIX + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java index c284f8f..023d496 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java @@ -24,6 +24,7 @@ import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -78,6 +79,7 @@ public class FileSystemTimelineWriterImpl extends AbstractService private int fsNumRetries; private long fsRetryInterval; private Path entitiesPath; + private Configuration config; /** default value for storage location on local disk. */ private static final String STORAGE_DIR_ROOT = "timeline_service_data"; @@ -122,17 +124,13 @@ public class FileSystemTimelineWriterImpl extends AbstractService TimelineEntity entity, TimelineWriteResponse response) throws IOException { - Path clusterIdPath = new Path(entitiesPath, clusterId); - Path userIdPath = new Path(clusterIdPath, userId); - Path flowNamePath = new Path(userIdPath, escape(flowName)); - Path flowVersionPath = new Path(flowNamePath, escape(flowVersion)); - Path flowRunPath = new Path(flowVersionPath, String.valueOf(flowRun)); - Path appIdPath = new Path(flowRunPath, appId); - Path entityTypePath = new Path(appIdPath, entity.getType()); + String entityTypePathStr = clusterId + File.separator + userId + + File.separator + escape(flowName) + File.separator + + escape(flowVersion) + File.separator + flowRun + File.separator + appId + + File.separator + entity.getType(); + Path entityTypePath = new Path(entitiesPath, entityTypePathStr); try { - mkdirs(rootPath, entitiesPath, clusterIdPath, userIdPath, - flowNamePath, flowVersionPath, flowRunPath, appIdPath, - entityTypePath); + mkdirs(entityTypePath); Path filePath = new Path(entityTypePath, entity.getId() + TIMELINE_SERVICE_STORAGE_EXTENSION); @@ -181,7 +179,8 @@ public class FileSystemTimelineWriterImpl extends AbstractService DEFAULT_TIMELINE_FS_WRITER_NUM_RETRIES); fsRetryInterval = conf.getLong(TIMELINE_FS_WRITER_RETRY_INTERVAL_MS, DEFAULT_TIMELINE_FS_WRITER_RETRY_INTERVAL_MS); - fs = rootPath.getFileSystem(getConfig()); + config = conf; + fs = rootPath.getFileSystem(config); } @Override @@ -285,12 +284,15 @@ public class FileSystemTimelineWriterImpl extends AbstractService // final status. try { fsOut = fs.create(tempPath, true); + FSDataInputStream fsIn = fs.open(outputPath); + IOUtils.copyBytes(fsIn, fsOut, config, false); + fsIn.close(); + fs.delete(outputPath, false); fsOut.write(data); fsOut.close(); - fsOut = null; fs.rename(tempPath, outputPath); - } finally { - IOUtils.cleanupWithLogger(LOG, fsOut); + } catch (IOException ie) { + LOG.error("Got an exception while writing file", ie); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java index 4073b85..b880b9a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineWriterImpl.java @@ -146,6 +146,125 @@ public class TestFileSystemTimelineWriterImpl { } } + @Test + public void testWriteMultipleEntities() throws Exception { + String id = "appId"; + String type = "app"; + + TimelineEntities te1 = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setId(id); + entity.setType(type); + entity.setCreatedTime(1425016501000L); + te1.addEntity(entity); + + TimelineEntities te2 = new TimelineEntities(); + TimelineEntity entity2 = new TimelineEntity(); + entity2.setId(id); + entity2.setType(type); + entity2.setCreatedTime(1425016503000L); + te2.addEntity(entity2); + + FileSystemTimelineWriterImpl fsi = null; + try { + fsi = new FileSystemTimelineWriterImpl(); + Configuration conf = new YarnConfiguration(); + String outputRoot = tmpFolder.newFolder().getAbsolutePath(); + conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, + outputRoot); + fsi.init(conf); + fsi.start(); + fsi.write( + new TimelineCollectorContext("cluster_id", "user_id", "flow_name", + "flow_version", 12345678L, "app_id"), + te1, UserGroupInformation.createRemoteUser("user_id")); + fsi.write( + new TimelineCollectorContext("cluster_id", "user_id", "flow_name", + "flow_version", 12345678L, "app_id"), + te2, UserGroupInformation.createRemoteUser("user_id")); + + String fileName = outputRoot + File.separator + "entities" + + File.separator + "cluster_id" + File.separator + "user_id" + + File.separator + "flow_name" + File.separator + "flow_version" + + File.separator + "12345678" + File.separator + "app_id" + + File.separator + type + File.separator + id + + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; + Path path = new Path(fileName); + FileSystem fs = FileSystem.get(conf); + assertTrue("Specified path(" + fileName + ") should exist: ", + fs.exists(path)); + FileStatus fileStatus = fs.getFileStatus(path); + assertTrue("Specified path should be a file", + !fileStatus.isDirectory()); + List data = readFromFile(fs, path); + assertTrue("data size is:" + data.size(), data.size() == 3); + String d = data.get(0); + // confirm the contents same as what was written + assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity)); + + + String metricToString = data.get(1); + // confirm the contents same as what was written + assertEquals(metricToString, + TimelineUtils.dumpTimelineRecordtoJSON(entity2)); + } finally { + if (fsi != null) { + fsi.close(); + } + } + } + + @Test + public void testWriteEntitiesWithEmptyFlowName() throws Exception { + String id = "appId"; + String type = "app"; + + TimelineEntities te = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setId(id); + entity.setType(type); + entity.setCreatedTime(1425016501000L); + te.addEntity(entity); + + FileSystemTimelineWriterImpl fsi = null; + try { + fsi = new FileSystemTimelineWriterImpl(); + Configuration conf = new YarnConfiguration(); + String outputRoot = tmpFolder.newFolder().getAbsolutePath(); + conf.set(FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT, + outputRoot); + fsi.init(conf); + fsi.start(); + fsi.write( + new TimelineCollectorContext("cluster_id", "user_id", "", + "flow_version", 12345678L, "app_id"), + te, UserGroupInformation.createRemoteUser("user_id")); + + String fileName = outputRoot + File.separator + "entities" + + File.separator + "cluster_id" + File.separator + "user_id" + + File.separator + "" + File.separator + "flow_version" + + File.separator + "12345678" + File.separator + "app_id" + + File.separator + type + File.separator + id + + FileSystemTimelineWriterImpl.TIMELINE_SERVICE_STORAGE_EXTENSION; + Path path = new Path(fileName); + FileSystem fs = FileSystem.get(conf); + assertTrue("Specified path(" + fileName + ") should exist: ", + fs.exists(path)); + FileStatus fileStatus = fs.getFileStatus(path); + assertTrue("Specified path should be a file", + !fileStatus.isDirectory()); + List data = readFromFile(fs, path); + assertTrue("data size is:" + data.size(), data.size() == 2); + String d = data.get(0); + // confirm the contents same as what was written + assertEquals(d, TimelineUtils.dumpTimelineRecordtoJSON(entity)); + } finally { + if (fsi != null) { + fsi.close(); + } + } + } + private List readFromFile(FileSystem fs, Path path) throws IOException { BufferedReader br = new BufferedReader( --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org