Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id CEBDB200D69 for ; Wed, 13 Dec 2017 00:58:53 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id CD4FC160C27; Tue, 12 Dec 2017 23:58:53 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id D0292160C24 for ; Wed, 13 Dec 2017 00:58:52 +0100 (CET) Received: (qmail 25762 invoked by uid 500); 12 Dec 2017 23:58:40 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 24318 invoked by uid 99); 12 Dec 2017 23:58:39 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 12 Dec 2017 23:58:39 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id CDD5EF17D9; Tue, 12 Dec 2017 23:58:37 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: xyao@apache.org To: common-commits@hadoop.apache.org Date: Tue, 12 Dec 2017 23:59:24 -0000 Message-Id: <9b36aa6a2bb440ca9f4ec9529625ea48@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [49/50] hadoop git commit: YARN-7595. Container launching code suppresses close exceptions after writes. Contributed by Jim Brennan archived-at: Tue, 12 Dec 2017 23:58:53 -0000 YARN-7595. Container launching code suppresses close exceptions after writes. Contributed by Jim Brennan Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2abab1d7 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2abab1d7 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2abab1d7 Branch: refs/heads/HDFS-7240 Commit: 2abab1d7c53e64c160384fd5a3ac4cd8ffa57af4 Parents: 3ebe6a7 Author: Jason Lowe Authored: Tue Dec 12 16:04:15 2017 -0600 Committer: Jason Lowe Committed: Tue Dec 12 16:04:15 2017 -0600 ---------------------------------------------------------------------- .../nodemanager/DefaultContainerExecutor.java | 25 +++------ .../launcher/ContainerLaunch.java | 54 +++++++++----------- .../JavaSandboxLinuxContainerRuntime.java | 17 +++--- 3 files changed, 41 insertions(+), 55 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/2abab1d7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java index 5d78f9d..5772403 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java @@ -42,12 +42,10 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.CommandExecutor; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -414,15 +412,11 @@ public class DefaultContainerExecutor extends ContainerExecutor { */ public void writeLocalWrapperScript(Path launchDst, Path pidFile) throws IOException { - DataOutputStream out = null; - PrintStream pout = null; - - try { - out = lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE)); - pout = new PrintStream(out, false, "UTF-8"); + try (DataOutputStream out = + lfs.create(wrapperScriptPath, EnumSet.of(CREATE, OVERWRITE)); + PrintStream pout = + new PrintStream(out, false, "UTF-8")) { writeLocalWrapperScript(launchDst, pidFile, pout); - } finally { - IOUtils.cleanupWithLogger(LOG, pout, out); } } @@ -489,11 +483,10 @@ public class DefaultContainerExecutor extends ContainerExecutor { private void writeSessionScript(Path launchDst, Path pidFile) throws IOException { - DataOutputStream out = null; - PrintStream pout = null; - try { - out = lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE)); - pout = new PrintStream(out, false, "UTF-8"); + try (DataOutputStream out = + lfs.create(sessionScriptPath, EnumSet.of(CREATE, OVERWRITE)); + PrintStream pout = + new PrintStream(out, false, "UTF-8")) { // We need to do a move as writing to a file is not atomic // Process reading a file being written to may get garbled data // hence write pid to tmp file first followed by a mv @@ -503,8 +496,6 @@ public class DefaultContainerExecutor extends ContainerExecutor { pout.println("/bin/mv -f " + pidFile.toString() + ".tmp " + pidFile); String exec = Shell.isSetsidAvailable? "exec setsid" : "exec"; pout.printf("%s /bin/bash \"%s\"", exec, launchDst.toUri().getPath()); - } finally { - IOUtils.cleanupWithLogger(LOG, pout, out); } lfs.setPermission(sessionScriptPath, ContainerExecutor.TASK_LAUNCH_SCRIPT_PERMISSION); http://git-wip-us.apache.org/repos/asf/hadoop/blob/2abab1d7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java index f1c826e..db90215 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java @@ -220,15 +220,13 @@ public class ContainerLaunch implements Callable { containerIdStr)); Path nmPrivateClasspathJarDir = dirsHandler.getLocalPathForWrite( getContainerPrivateDir(appIdStr, containerIdStr)); - DataOutputStream containerScriptOutStream = null; - DataOutputStream tokensOutStream = null; // Select the working directory for the container Path containerWorkDir = deriveContainerWorkDir(); recordContainerWorkDir(containerID, containerWorkDir.toString()); String pidFileSubpath = getPidFileSubpath(appIdStr, containerIdStr); - // pid file should be in nm private dir so that it is not + // pid file should be in nm private dir so that it is not // accessible by users pidFilePath = dirsHandler.getLocalPathForWrite(pidFileSubpath); List localDirs = dirsHandler.getLocalDirs(); @@ -243,24 +241,24 @@ public class ContainerLaunch implements Callable { throw new IOException("Most of the disks failed. " + dirsHandler.getDisksHealthReport(false)); } - try { - // /////////// Write out the container-script in the nmPrivate space. - List appDirs = new ArrayList(localDirs.size()); - for (String localDir : localDirs) { - Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE); - Path userdir = new Path(usersdir, user); - Path appsdir = new Path(userdir, ContainerLocalizer.APPCACHE); - appDirs.add(new Path(appsdir, appIdStr)); - } - containerScriptOutStream = - lfs.create(nmPrivateContainerScriptPath, - EnumSet.of(CREATE, OVERWRITE)); - - // Set the token location too. - environment.put( - ApplicationConstants.CONTAINER_TOKEN_FILE_ENV_NAME, - new Path(containerWorkDir, - FINAL_CONTAINER_TOKENS_FILE).toUri().getPath()); + List appDirs = new ArrayList(localDirs.size()); + for (String localDir : localDirs) { + Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE); + Path userdir = new Path(usersdir, user); + Path appsdir = new Path(userdir, ContainerLocalizer.APPCACHE); + appDirs.add(new Path(appsdir, appIdStr)); + } + + // Set the token location too. + environment.put( + ApplicationConstants.CONTAINER_TOKEN_FILE_ENV_NAME, + new Path(containerWorkDir, + FINAL_CONTAINER_TOKENS_FILE).toUri().getPath()); + + // /////////// Write out the container-script in the nmPrivate space. + try (DataOutputStream containerScriptOutStream = + lfs.create(nmPrivateContainerScriptPath, + EnumSet.of(CREATE, OVERWRITE))) { // Sanitize the container's environment sanitizeEnv(environment, containerWorkDir, appDirs, userLocalDirs, containerLogDirs, localResources, nmPrivateClasspathJarDir); @@ -271,18 +269,16 @@ public class ContainerLaunch implements Callable { exec.writeLaunchEnv(containerScriptOutStream, environment, localResources, launchContext.getCommands(), new Path(containerLogDirs.get(0)), user); - // /////////// End of writing out container-script + } + // /////////// End of writing out container-script - // /////////// Write out the container-tokens in the nmPrivate space. - tokensOutStream = - lfs.create(nmPrivateTokensPath, EnumSet.of(CREATE, OVERWRITE)); + // /////////// Write out the container-tokens in the nmPrivate space. + try (DataOutputStream tokensOutStream = + lfs.create(nmPrivateTokensPath, EnumSet.of(CREATE, OVERWRITE))) { Credentials creds = container.getCredentials(); creds.writeTokenStorageToStream(tokensOutStream); - // /////////// End of writing out container-tokens - } finally { - IOUtils.cleanupWithLogger(LOG, containerScriptOutStream, - tokensOutStream); } + // /////////// End of writing out container-tokens ret = launchContainer(new ContainerStartContext.Builder() .setContainer(container) http://git-wip-us.apache.org/repos/asf/hadoop/blob/2abab1d7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java index 245b38f..1ab1fc5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java @@ -231,7 +231,6 @@ public class JavaSandboxLinuxContainerRuntime throw new ContainerExecutionException("hadoop.tmp.dir not set!"); } - OutputStream policyOutputStream = null; try { String containerID = ctx.getExecutionAttribute(CONTAINER_ID_STR); initializePolicyDir(); @@ -242,19 +241,19 @@ public class JavaSandboxLinuxContainerRuntime Paths.get(policyFileDir.toString(), containerID + "-" + NMContainerPolicyUtils.POLICY_FILE), POLICY_ATTR); - policyOutputStream = Files.newOutputStream(policyFilePath); - containerPolicies.put(containerID, policyFilePath); + try(OutputStream policyOutputStream = + Files.newOutputStream(policyFilePath)) { - NMContainerPolicyUtils.generatePolicyFile(policyOutputStream, - localDirs, groupPolicyFiles, resources, configuration); - NMContainerPolicyUtils.appendSecurityFlags( - commands, env, policyFilePath, sandboxMode); + containerPolicies.put(containerID, policyFilePath); + NMContainerPolicyUtils.generatePolicyFile(policyOutputStream, + localDirs, groupPolicyFiles, resources, configuration); + NMContainerPolicyUtils.appendSecurityFlags( + commands, env, policyFilePath, sandboxMode); + } } catch (IOException e) { throw new ContainerExecutionException(e); - } finally { - IOUtils.cleanupWithLogger(LOG, policyOutputStream); } } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org