Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id E588918740 for ; Thu, 25 Feb 2016 20:48:14 +0000 (UTC) Received: (qmail 7305 invoked by uid 500); 25 Feb 2016 20:48:14 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 7238 invoked by uid 500); 25 Feb 2016 20:48:14 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 7229 invoked by uid 99); 25 Feb 2016 20:48:14 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 25 Feb 2016 20:48:14 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 5709DE8F11; Thu, 25 Feb 2016 20:48:14 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: rkanter@apache.org To: common-commits@hadoop.apache.org Message-Id: <1e55c02386514eb2b7cf74709df89c47@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: YARN-4701. When task logs are not available, port 8041 is referenced instead of port 8042 (haibochen via rkanter) Date: Thu, 25 Feb 2016 20:48:14 +0000 (UTC) Repository: hadoop Updated Branches: refs/heads/trunk 8808779db -> c4d4df8de YARN-4701. When task logs are not available, port 8041 is referenced instead of port 8042 (haibochen via rkanter) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c4d4df8d Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c4d4df8d Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c4d4df8d Branch: refs/heads/trunk Commit: c4d4df8de09ee0c89ea8176bd8149900becd3c0c Parents: 8808779 Author: Robert Kanter Authored: Thu Feb 25 12:48:02 2016 -0800 Committer: Robert Kanter Committed: Thu Feb 25 12:48:02 2016 -0800 ---------------------------------------------------------------------- .../v2/hs/webapp/dao/AMAttemptInfo.java | 4 +-- .../v2/hs/webapp/TestHsWebServicesJobs.java | 11 +++----- hadoop-yarn-project/CHANGES.txt | 3 +++ .../yarn/webapp/log/AggregatedLogsBlock.java | 27 ++++++++++++++++++++ .../logaggregation/TestAggregatedLogsBlock.java | 20 +++++++++++++++ 5 files changed, 56 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d4df8d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java index 8cd0a6f..065f8c7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java @@ -68,8 +68,8 @@ public class AMAttemptInfo { if (containerId != null) { this.containerId = containerId.toString(); this.logsLink = join(host, pathPrefix, - ujoin("logs", this.nodeId, this.containerId, jobId, user)); - this.shortLogsLink = ujoin("logs", this.nodeId, this.containerId, + ujoin("logs", this.nodeHttpAddress, this.containerId, jobId, user)); + this.shortLogsLink = ujoin("logs", this.nodeHttpAddress, this.containerId, jobId, user); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d4df8d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java index 2659443..fc67c1f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java @@ -766,7 +766,7 @@ public class TestHsWebServicesJobs extends JerseyTest { for (int i = 0; i < attempts.length(); i++) { JSONObject attempt = attempts.getJSONObject(i); verifyHsJobAttemptsGeneric(job, attempt.getString("nodeHttpAddress"), - attempt.getString("nodeId"), attempt.getInt("id"), + attempt.getInt("id"), attempt.getLong("startTime"), attempt.getString("containerId"), attempt.getString("logsLink")); } @@ -779,7 +779,6 @@ public class TestHsWebServicesJobs extends JerseyTest { Element element = (Element) nodes.item(i); verifyHsJobAttemptsGeneric(job, WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"), - WebServicesTestUtils.getXmlString(element, "nodeId"), WebServicesTestUtils.getXmlInt(element, "id"), WebServicesTestUtils.getXmlLong(element, "startTime"), WebServicesTestUtils.getXmlString(element, "containerId"), @@ -788,7 +787,7 @@ public class TestHsWebServicesJobs extends JerseyTest { } public void verifyHsJobAttemptsGeneric(Job job, String nodeHttpAddress, - String nodeId, int id, long startTime, String containerId, String logsLink) { + int id, long startTime, String containerId, String logsLink) { boolean attemptFound = false; for (AMInfo amInfo : job.getAMInfos()) { if (amInfo.getAppAttemptId().getAttemptId() == id) { @@ -798,16 +797,14 @@ public class TestHsWebServicesJobs extends JerseyTest { int nmPort = amInfo.getNodeManagerPort(); WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":" + nmHttpPort, nodeHttpAddress); - WebServicesTestUtils.checkStringMatch("nodeId", - NodeId.newInstance(nmHost, nmPort).toString(), nodeId); assertTrue("startime not greater than 0", startTime > 0); WebServicesTestUtils.checkStringMatch("containerId", amInfo .getContainerId().toString(), containerId); String localLogsLink = join( "hsmockwebapp", - ujoin("logs", nodeId, containerId, MRApps.toString(job.getID()), - job.getUserName())); + ujoin("logs", nodeHttpAddress, containerId, + MRApps.toString(job.getID()), job.getUserName())); assertTrue("logsLink", logsLink.contains(localLogsLink)); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d4df8d/hadoop-yarn-project/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 7f26f8e..14837a8 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -230,6 +230,9 @@ Release 2.9.0 - UNRELEASED YARN-4729. SchedulerApplicationAttempt#getTotalRequiredResources can throw an NPE. (kasha) + YARN-4701. When task logs are not available, port 8041 is referenced + instead of port 8042 (haibochen via rkanter) + Release 2.8.0 - UNRELEASED INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d4df8d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java index 69fc347..2fc8dfc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java @@ -80,11 +80,17 @@ public class AggregatedLogsBlock extends HtmlBlock { logEntity = containerId.toString(); } + String nmApplicationLogUrl = getApplicationLogURL(applicationId); if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) { html.h1() ._("Aggregation is not enabled. Try the nodemanager at " + nodeId) ._(); + if(nmApplicationLogUrl != null) { + html.h1() + ._("Or see application log at " + nmApplicationLogUrl) + ._(); + } return; } @@ -107,6 +113,11 @@ public class AggregatedLogsBlock extends HtmlBlock { ._("Logs not available for " + logEntity + ". Aggregation may not be complete, " + "Check back later or try the nodemanager at " + nodeId)._(); + if(nmApplicationLogUrl != null) { + html.h1() + ._("Or see application log at " + nmApplicationLogUrl) + ._(); + } return; } catch (Exception ex) { html.h1() @@ -353,4 +364,20 @@ public class AggregatedLogsBlock extends HtmlBlock { limits.end = end; return limits; } + + private String getApplicationLogURL(ApplicationId applicationId) { + String appId = applicationId.toString(); + if (appId == null || appId.isEmpty()) { + return null; + } + String nodeId = $(NM_NODENAME); + if(nodeId == null || nodeId.isEmpty()) { + return null; + } + StringBuilder sb = new StringBuilder(); + String scheme = YarnConfiguration.useHttps(this.conf) ? "https://": + "http://"; + sb.append(scheme).append(nodeId).append("/node/application/").append(appId); + return sb.toString(); + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d4df8d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java index 798406d..594f186 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java @@ -88,6 +88,26 @@ public class TestAggregatedLogsBlock { } + @Test + public void testBlockContainsPortNumForUnavailableAppLog() { + FileUtil.fullyDelete(new File("target/logs")); + Configuration configuration = getConfiguration(); + + String nodeName = configuration.get(YarnConfiguration.NM_WEBAPP_ADDRESS, + YarnConfiguration.DEFAULT_NM_WEBAPP_ADDRESS); + AggregatedLogsBlockForTest aggregatedBlock = getAggregatedLogsBlockForTest( + configuration, "admin", "container_0_0001_01_000001", nodeName); + ByteArrayOutputStream data = new ByteArrayOutputStream(); + PrintWriter printWriter = new PrintWriter(data); + HtmlBlock html = new HtmlBlockForTest(); + HtmlBlock.Block block = new BlockForTest(html, printWriter, 10, false); + aggregatedBlock.render(block); + + block.getWriter().flush(); + String out = data.toString(); + assertTrue(out.contains(nodeName)); + } + /** * try to read bad logs *