Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 5058418AB2 for ; Fri, 2 Oct 2015 15:57:04 +0000 (UTC) Received: (qmail 27945 invoked by uid 500); 2 Oct 2015 15:56:43 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 27817 invoked by uid 500); 2 Oct 2015 15:56:43 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 25127 invoked by uid 99); 2 Oct 2015 15:56:41 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 02 Oct 2015 15:56:41 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 667D8E00AA; Fri, 2 Oct 2015 15:56:41 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: eclark@apache.org To: common-commits@hadoop.apache.org Date: Fri, 02 Oct 2015 15:57:24 -0000 Message-Id: <2c608192606b475b9cf008f323f84074@git.apache.org> In-Reply-To: <83dc4c38b9934a28b2166a83e9091d33@git.apache.org> References: <83dc4c38b9934a28b2166a83e9091d33@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [45/52] [abbrv] hadoop git commit: MAPREDUCE-6494. Permission issue when running archive-logs tool as different users (rkanter) MAPREDUCE-6494. Permission issue when running archive-logs tool as different users (rkanter) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5db371f5 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5db371f5 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5db371f5 Branch: refs/heads/HADOOP-11890 Commit: 5db371f52f5c6e894a7e6a5d523084f4b316a7ab Parents: c7e03c3 Author: Robert Kanter Authored: Wed Sep 30 17:33:53 2015 -0700 Committer: Robert Kanter Committed: Wed Sep 30 17:33:53 2015 -0700 ---------------------------------------------------------------------- hadoop-mapreduce-project/CHANGES.txt | 3 + .../apache/hadoop/tools/HadoopArchiveLogs.java | 91 +++++++++++++------- .../hadoop/tools/TestHadoopArchiveLogs.java | 40 +++++++++ 3 files changed, 103 insertions(+), 31 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/5db371f5/hadoop-mapreduce-project/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index da5ee0b..39c3b7e 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -585,6 +585,9 @@ Release 2.8.0 - UNRELEASED MAPREDUCE-6480. archive-logs tool may miss applications (rkanter) + MAPREDUCE-6494. Permission issue when running archive-logs tool as + different users (rkanter) + Release 2.7.2 - UNRELEASED INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/5db371f5/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index 0879d41..8b8e77b 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -76,6 +76,7 @@ public class HadoopArchiveLogs implements Tool { private static final String MAX_TOTAL_LOGS_SIZE_OPTION = "maxTotalLogsSize"; private static final String MEMORY_OPTION = "memory"; private static final String VERBOSE_OPTION = "verbose"; + private static final String FORCE_OPTION = "force"; private static final int DEFAULT_MAX_ELIGIBLE = -1; private static final int DEFAULT_MIN_NUM_LOG_FILES = 20; @@ -91,6 +92,8 @@ public class HadoopArchiveLogs implements Tool { @VisibleForTesting long memory = DEFAULT_MEMORY; private boolean verbose = false; + @VisibleForTesting + boolean force = false; @VisibleForTesting Set eligibleApplications; @@ -126,6 +129,8 @@ public class HadoopArchiveLogs implements Tool { @Override public int run(String[] args) throws Exception { + int exitCode = 1; + handleOpts(args); FileSystem fs = null; @@ -141,44 +146,41 @@ public class HadoopArchiveLogs implements Tool { } try { fs = FileSystem.get(conf); - checkFilesAndSeedApps(fs, remoteRootLogDir, suffix); + if (prepareWorkingDir(fs, workingDir)) { - // Prepare working directory - if (fs.exists(workingDir)) { - fs.delete(workingDir, true); - } - fs.mkdirs(workingDir); - fs.setPermission(workingDir, - new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE)); - } finally { - if (fs != null) { - fs.close(); - } - } - - filterAppsByAggregatedStatus(); + checkFilesAndSeedApps(fs, remoteRootLogDir, suffix); - checkMaxEligible(); + filterAppsByAggregatedStatus(); - if (eligibleApplications.isEmpty()) { - LOG.info("No eligible applications to process"); - System.exit(0); - } + checkMaxEligible(); - StringBuilder sb = - new StringBuilder("Will process the following applications:"); - for (AppInfo app : eligibleApplications) { - sb.append("\n\t").append(app.getAppId()); - } - LOG.info(sb.toString()); + if (eligibleApplications.isEmpty()) { + LOG.info("No eligible applications to process"); + exitCode = 0; + } else { + StringBuilder sb = + new StringBuilder("Will process the following applications:"); + for (AppInfo app : eligibleApplications) { + sb.append("\n\t").append(app.getAppId()); + } + LOG.info(sb.toString()); - File localScript = File.createTempFile("hadoop-archive-logs-", ".sh"); - generateScript(localScript, workingDir, remoteRootLogDir, suffix); + File localScript = File.createTempFile("hadoop-archive-logs-", ".sh"); + generateScript(localScript, workingDir, remoteRootLogDir, suffix); - if (runDistributedShell(localScript)) { - return 0; + exitCode = runDistributedShell(localScript) ? 0 : 1; + } + } + } finally { + if (fs != null) { + // Cleanup working directory + if (fs.exists(workingDir)) { + fs.delete(workingDir, true); + } + fs.close(); + } } - return -1; + return exitCode; } private void handleOpts(String[] args) throws ParseException { @@ -202,12 +204,17 @@ public class HadoopArchiveLogs implements Tool { memoryOpt.setArgName("megabytes"); Option verboseOpt = new Option(VERBOSE_OPTION, false, "Print more details."); + Option forceOpt = new Option(FORCE_OPTION, false, + "Force recreating the working directory if an existing one is found. " + + "This should only be used if you know that another instance is " + + "not currently running"); opts.addOption(helpOpt); opts.addOption(maxEligibleOpt); opts.addOption(minNumLogFilesOpt); opts.addOption(maxTotalLogsSizeOpt); opts.addOption(memoryOpt); opts.addOption(verboseOpt); + opts.addOption(forceOpt); try { CommandLineParser parser = new GnuParser(); @@ -242,6 +249,9 @@ public class HadoopArchiveLogs implements Tool { if (commandLine.hasOption(VERBOSE_OPTION)) { verbose = true; } + if (commandLine.hasOption(FORCE_OPTION)) { + force = true; + } } catch (ParseException pe) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("yarn archive-logs", opts); @@ -250,6 +260,25 @@ public class HadoopArchiveLogs implements Tool { } @VisibleForTesting + boolean prepareWorkingDir(FileSystem fs, Path workingDir) throws IOException { + if (fs.exists(workingDir)) { + if (force) { + LOG.info("Existing Working Dir detected: -" + FORCE_OPTION + + " specified -> recreating Working Dir"); + fs.delete(workingDir, true); + } else { + LOG.info("Existing Working Dir detected: -" + FORCE_OPTION + + " not specified -> exiting"); + return false; + } + } + fs.mkdirs(workingDir); + fs.setPermission(workingDir, + new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE)); + return true; + } + + @VisibleForTesting void filterAppsByAggregatedStatus() throws IOException, YarnException { YarnClient client = YarnClient.createYarnClient(); try { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5db371f5/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java index 7423f79..3b8a40f 100644 --- a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java @@ -23,6 +23,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; @@ -309,6 +311,44 @@ public class TestHadoopArchiveLogs { Assert.assertArrayEquals(statuses, LogAggregationStatus.values()); } + @Test(timeout = 5000) + public void testPrepareWorkingDir() throws Exception { + Configuration conf = new Configuration(); + HadoopArchiveLogs hal = new HadoopArchiveLogs(conf); + FileSystem fs = FileSystem.getLocal(conf); + Path workingDir = new Path("target", "testPrepareWorkingDir"); + fs.delete(workingDir, true); + Assert.assertFalse(fs.exists(workingDir)); + // -force is false and the dir doesn't exist so it will create one + hal.force = false; + boolean dirPrepared = hal.prepareWorkingDir(fs, workingDir); + Assert.assertTrue(dirPrepared); + Assert.assertTrue(fs.exists(workingDir)); + Assert.assertEquals( + new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE), + fs.getFileStatus(workingDir).getPermission()); + // Throw a file in the dir + Path dummyFile = new Path(workingDir, "dummy.txt"); + fs.createNewFile(dummyFile); + Assert.assertTrue(fs.exists(dummyFile)); + // -force is false and the dir exists, so nothing will happen and the dummy + // still exists + dirPrepared = hal.prepareWorkingDir(fs, workingDir); + Assert.assertFalse(dirPrepared); + Assert.assertTrue(fs.exists(workingDir)); + Assert.assertTrue(fs.exists(dummyFile)); + // -force is true and the dir exists, so it will recreate it and the dummy + // won't exist anymore + hal.force = true; + dirPrepared = hal.prepareWorkingDir(fs, workingDir); + Assert.assertTrue(dirPrepared); + Assert.assertTrue(fs.exists(workingDir)); + Assert.assertEquals( + new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.NONE), + fs.getFileStatus(workingDir).getPermission()); + Assert.assertFalse(fs.exists(dummyFile)); + } + private static void createFile(FileSystem fs, Path p, long sizeMultiple) throws IOException { FSDataOutputStream out = null;