Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 943C0200BC0 for ; Tue, 15 Nov 2016 20:42:43 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 92BAD160B03; Tue, 15 Nov 2016 19:42:43 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id C6037160AF2 for ; Tue, 15 Nov 2016 20:42:41 +0100 (CET) Received: (qmail 59741 invoked by uid 500); 15 Nov 2016 19:42:41 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 59727 invoked by uid 99); 15 Nov 2016 19:42:40 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 15 Nov 2016 19:42:40 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id C9ED1E0209; Tue, 15 Nov 2016 19:42:40 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: liuml07@apache.org To: common-commits@hadoop.apache.org Message-Id: <7e3133a8c97e4f4da2edec738a318934@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HADOOP-13427. Eliminate needless uses of FileSystem#{exists(), isFile(), isDirectory()}. Contributed by Steve Loughran and Mingliang Liu Date: Tue, 15 Nov 2016 19:42:40 +0000 (UTC) archived-at: Tue, 15 Nov 2016 19:42:43 -0000 Repository: hadoop Updated Branches: refs/heads/branch-2 260f3a9dc -> 4e0fcff8a HADOOP-13427. Eliminate needless uses of FileSystem#{exists(), isFile(), isDirectory()}. Contributed by Steve Loughran and Mingliang Liu (cherry picked from commit 5af572b6443715b7a741296c1bd520a1840f9a7c) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4e0fcff8 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4e0fcff8 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4e0fcff8 Branch: refs/heads/branch-2 Commit: 4e0fcff8ab8210469248e63710461c49e3849fb5 Parents: 260f3a9 Author: Mingliang Liu Authored: Wed Nov 9 14:48:56 2016 -0800 Committer: Mingliang Liu Committed: Tue Nov 15 11:36:08 2016 -0800 ---------------------------------------------------------------------- .../hadoop/crypto/key/JavaKeyStoreProvider.java | 31 +++++++++----------- .../java/org/apache/hadoop/fs/FileUtil.java | 19 ++++++++---- .../apache/hadoop/fs/RawLocalFileSystem.java | 12 +++----- .../apache/hadoop/fs/TrashPolicyDefault.java | 5 ++-- .../hadoop/util/GenericOptionsParser.java | 17 +++++------ .../hadoop/hdfs/DistributedFileSystem.java | 3 +- .../apache/hadoop/hdfs/client/HdfsAdmin.java | 9 ++++-- .../jobhistory/JobHistoryEventHandler.java | 8 +---- .../hadoop/mapreduce/JobResourceUploader.java | 14 ++++----- .../hadoop/mapreduce/JobSubmissionFiles.java | 5 ++-- .../lib/output/FileOutputCommitter.java | 12 ++++---- .../lib/output/PartialFileOutputCommitter.java | 2 +- .../mapreduce/lib/partition/InputSampler.java | 6 ++-- .../TestPreemptableFileOutputCommitter.java | 2 +- ...istoryServerFileSystemStateStoreService.java | 2 +- .../examples/terasort/TeraOutputFormat.java | 4 ++- .../apache/hadoop/tools/HadoopArchiveLogs.java | 4 +-- .../org/apache/hadoop/tools/HadoopArchives.java | 17 +++-------- .../org/apache/hadoop/fs/azure/WasbFsck.java | 8 +++-- .../org/apache/hadoop/tools/DistCpSync.java | 4 +-- .../apache/hadoop/tools/SimpleCopyListing.java | 16 ++++++---- .../hadoop/tools/mapred/CopyCommitter.java | 4 +-- .../apache/hadoop/tools/mapred/CopyMapper.java | 5 ++-- .../tools/mapred/RetriableFileCopyCommand.java | 2 +- .../apache/hadoop/tools/util/DistCpUtils.java | 4 +-- .../java/org/apache/hadoop/tools/DistTool.java | 10 +++---- .../hadoop/fs/swift/util/SwiftTestUtils.java | 9 ++++-- .../hadoop/tools/rumen/state/StatePool.java | 25 +++++++--------- .../FileSystemBasedConfigurationProvider.java | 4 +-- .../api/impl/FileSystemTimelineWriter.java | 17 ++++------- .../nodelabels/FileSystemNodeLabelsStore.java | 26 ++++++++++------ .../NonAppendableFSNodeLabelStore.java | 8 ++--- .../TestFileSystemNodeLabelsStore.java | 4 +-- .../FileSystemApplicationHistoryStore.java | 16 +++++----- .../TestFileSystemApplicationHistoryStore.java | 7 +++-- .../sharedcache/SharedCacheUploader.java | 6 ++-- .../store/InMemorySCMStore.java | 8 +++-- 37 files changed, 171 insertions(+), 184 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java index 1827c27..5beda0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java @@ -36,6 +36,7 @@ import com.google.common.annotations.VisibleForTesting; import javax.crypto.spec.SecretKeySpec; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; @@ -167,9 +168,9 @@ public class JavaKeyStoreProvider extends KeyProvider { // rewrite the keystore in flush() permissions = perm; } catch (KeyStoreException e) { - throw new IOException("Can't create keystore", e); + throw new IOException("Can't create keystore: " + e, e); } catch (GeneralSecurityException e) { - throw new IOException("Can't load keystore " + path, e); + throw new IOException("Can't load keystore " + path + " : " + e , e); } } @@ -190,9 +191,7 @@ public class JavaKeyStoreProvider extends KeyProvider { try { perm = loadFromPath(path, password); // Remove _OLD if exists - if (fs.exists(backupPath)) { - fs.delete(backupPath, true); - } + fs.delete(backupPath, true); LOG.debug("KeyStore loaded successfully !!"); } catch (IOException ioe) { // If file is corrupted for some reason other than @@ -260,9 +259,7 @@ public class JavaKeyStoreProvider extends KeyProvider { LOG.debug(String.format("KeyStore loaded successfully from '%s'!!", pathToLoad)); } - if (fs.exists(pathToDelete)) { - fs.delete(pathToDelete, true); - } + fs.delete(pathToDelete, true); } catch (IOException e) { // Check for password issue : don't want to trash file due // to wrong password @@ -539,13 +536,15 @@ public class JavaKeyStoreProvider extends KeyProvider { return; } // Might exist if a backup has been restored etc. - if (fs.exists(newPath)) { + try { renameOrFail(newPath, new Path(newPath.toString() + "_ORPHANED_" + System.currentTimeMillis())); + } catch (FileNotFoundException ignored) { } - if (fs.exists(oldPath)) { + try { renameOrFail(oldPath, new Path(oldPath.toString() + "_ORPHANED_" + System.currentTimeMillis())); + } catch (FileNotFoundException ignored) { } // put all of the updates into the keystore for(Map.Entry entry: cache.entrySet()) { @@ -601,9 +600,7 @@ public class JavaKeyStoreProvider extends KeyProvider { // Rename _NEW to CURRENT renameOrFail(newPath, path); // Delete _OLD - if (fs.exists(oldPath)) { - fs.delete(oldPath, true); - } + fs.delete(oldPath, true); } protected void writeToNew(Path newPath) throws IOException { @@ -623,12 +620,12 @@ public class JavaKeyStoreProvider extends KeyProvider { protected boolean backupToOld(Path oldPath) throws IOException { - boolean fileExisted = false; - if (fs.exists(path)) { + try { renameOrFail(path, oldPath); - fileExisted = true; + return true; + } catch (FileNotFoundException e) { + return false; } - return fileExisted; } private void revertFromOld(Path oldPath, boolean fileExisted) http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 4051aaf..0e8741d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -22,6 +22,7 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; @@ -326,14 +327,15 @@ public class FileUtil { return copy(srcFS, srcs[0], dstFS, dst, deleteSource, overwrite, conf); // Check if dest is directory - if (!dstFS.exists(dst)) { - throw new IOException("`" + dst +"': specified destination directory " + - "does not exist"); - } else { + try { FileStatus sdst = dstFS.getFileStatus(dst); if (!sdst.isDirectory()) throw new IOException("copying multiple files, but last argument `" + dst + "' is not a directory"); + } catch (FileNotFoundException e) { + throw new IOException( + "`" + dst + "': specified destination directory " + + "does not exist", e); } for (Path src : srcs) { @@ -518,8 +520,13 @@ public class FileUtil { private static Path checkDest(String srcName, FileSystem dstFS, Path dst, boolean overwrite) throws IOException { - if (dstFS.exists(dst)) { - FileStatus sdst = dstFS.getFileStatus(dst); + FileStatus sdst; + try { + sdst = dstFS.getFileStatus(dst); + } catch (FileNotFoundException e) { + sdst = null; + } + if (null != sdst) { if (sdst.isDirectory()) { if (null == srcName) { throw new IOException("Target " + dst + " is a directory"); http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index fc6a91a..827381b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -208,9 +208,7 @@ public class RawLocalFileSystem extends FileSystem { @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { - if (!exists(f)) { - throw new FileNotFoundException(f.toString()); - } + getFileStatus(f); return new FSDataInputStream(new BufferedFSInputStream( new LocalFSFileInputStream(f), bufferSize)); } @@ -274,9 +272,6 @@ public class RawLocalFileSystem extends FileSystem { @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { - if (!exists(f)) { - throw new FileNotFoundException("File " + f + " not found"); - } FileStatus status = getFileStatus(f); if (status.isDirectory()) { throw new IOException("Cannot append to a diretory (=" + f + " )"); @@ -383,17 +378,18 @@ public class RawLocalFileSystem extends FileSystem { // platforms (notably Windows) do not provide this behavior, so the Java API // call renameTo(dstFile) fails. Delete destination and attempt rename // again. - if (this.exists(dst)) { + try { FileStatus sdst = this.getFileStatus(dst); if (sdst.isDirectory() && dstFile.list().length == 0) { if (LOG.isDebugEnabled()) { LOG.debug("Deleting empty destination and renaming " + src + " to " + - dst); + dst); } if (this.delete(dst, false) && srcFile.renameTo(dstFile)) { return true; } } + } catch (FileNotFoundException ignored) { } return false; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java index 4f4c937..c65e16a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java @@ -121,9 +121,8 @@ public class TrashPolicyDefault extends TrashPolicy { if (!path.isAbsolute()) // make path absolute path = new Path(fs.getWorkingDirectory(), path); - if (!fs.exists(path)) // check that path exists - throw new FileNotFoundException(path.toString()); - + // check that path exists + fs.getFileStatus(path); String qpath = fs.makeQualified(path).toString(); Path trashRoot = fs.getTrashRoot(path); http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index fea9728..04e8024 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -328,9 +329,7 @@ public class GenericOptionsParser { // check if the local file exists FileSystem localFs = FileSystem.getLocal(conf); Path p = localFs.makeQualified(new Path(fileName)); - if (!localFs.exists(p)) { - throw new FileNotFoundException("File "+fileName+" does not exist."); - } + localFs.getFileStatus(p); if(LOG.isDebugEnabled()) { LOG.debug("setting conf tokensFile: " + fileName); } @@ -437,9 +436,7 @@ public class GenericOptionsParser { if (pathURI.getScheme() == null) { //default to the local file system //check if the file exists or not first - if (!localFs.exists(path)) { - throw new FileNotFoundException("File " + tmp + " does not exist."); - } + localFs.getFileStatus(path); if (isWildcard) { expandWildcard(finalPaths, path, localFs); } else { @@ -452,9 +449,8 @@ public class GenericOptionsParser { // these files to the file system ResourceManager is running // on. FileSystem fs = path.getFileSystem(conf); - if (!fs.exists(path)) { - throw new FileNotFoundException("File " + tmp + " does not exist."); - } + // existence check + fs.getFileStatus(path); if (isWildcard) { expandWildcard(finalPaths, path, fs); } else { @@ -476,7 +472,8 @@ public class GenericOptionsParser { private void expandWildcard(List finalPaths, Path path, FileSystem fs) throws IOException { - if (!fs.isDirectory(path)) { + FileStatus status = fs.getFileStatus(path); + if (!status.isDirectory()) { throw new FileNotFoundException(path + " is not a directory."); } // get all the jars in the directory http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index 8bda7fa..8080a3f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -2526,8 +2526,9 @@ public class DistributedFileSystem extends FileSystem { } else { Path userTrash = new Path(ezTrashRoot, System.getProperty( "user.name")); - if (exists(userTrash)) { + try { ret.add(getFileStatus(userTrash)); + } catch (FileNotFoundException ignored) { } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java index 3cee861..64f0b86 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java @@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockStoragePolicySpi; import org.apache.hadoop.fs.CacheFlag; +import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -476,10 +477,10 @@ public class HdfsAdmin { Path trashPath = new Path(ez.getPath(), FileSystem.TRASH_PREFIX); - if (dfs.exists(trashPath)) { + try { + FileStatus trashFileStatus = dfs.getFileStatus(trashPath); String errMessage = "Will not provision new trash directory for " + "encryption zone " + ez.getPath() + ". Path already exists."; - FileStatus trashFileStatus = dfs.getFileStatus(trashPath); if (!trashFileStatus.isDirectory()) { errMessage += "\r\n" + "Warning: " + trashPath.toString() + " is not a directory"; @@ -489,7 +490,9 @@ public class HdfsAdmin { "Warning: the permission of " + trashPath.toString() + " is not " + TRASH_PERMISSION; } - throw new IOException(errMessage); + throw new FileAlreadyExistsException(errMessage); + } catch (FileNotFoundException ignored) { + // no trash path } // Update the permission bits http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 1e258ac..345e3d4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -1374,9 +1374,7 @@ public class JobHistoryEventHandler extends AbstractService if (stagingDirFS.exists(fromPath)) { LOG.info("Copying " + fromPath.toString() + " to " + toPath.toString()); // TODO temporarily removing the existing dst - if (doneDirFS.exists(toPath)) { - doneDirFS.delete(toPath, true); - } + doneDirFS.delete(toPath, true); boolean copied = FileUtil.copy(stagingDirFS, fromPath, doneDirFS, toPath, false, getConfig()); @@ -1389,10 +1387,6 @@ public class JobHistoryEventHandler extends AbstractService } } - boolean pathExists(FileSystem fileSys, Path path) throws IOException { - return fileSys.exists(path); - } - private String getTempFileName(String srcFile) { return srcFile + "_tmp"; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java index 15dbc13..4c48ff4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobResourceUploader.java @@ -435,9 +435,11 @@ class JobResourceUploader { LOG.debug("default FileSystem: " + jtFs.getUri()); FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION); - if (!jtFs.exists(submitJobDir)) { + try { + jtFs.getFileStatus(submitJobDir); + } catch (FileNotFoundException e) { throw new IOException("Cannot find job submission directory! " - + "It should just be created, so something wrong here."); + + "It should just be created, so something wrong here.", e); } Path fileDir = JobSubmissionFiles.getJobLog4jFile(submitJobDir); @@ -488,9 +490,7 @@ class JobResourceUploader { if (pathURI.getScheme() == null) { // default to the local file system // check if the file exists or not first - if (!localFs.exists(path)) { - throw new FileNotFoundException("File " + file + " does not exist."); - } + localFs.getFileStatus(path); finalPath = path.makeQualified(localFs.getUri(), localFs.getWorkingDirectory()) .toString(); @@ -500,9 +500,7 @@ class JobResourceUploader { // these files to the file system ResourceManager is running // on. FileSystem fs = path.getFileSystem(conf); - if (!fs.exists(path)) { - throw new FileNotFoundException("File " + file + " does not exist."); - } + fs.getFileStatus(path); finalPath = path.makeQualified(fs.getUri(), fs.getWorkingDirectory()).toString(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java index 9dd45c3..ae914c3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmissionFiles.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapreduce; +import java.io.FileNotFoundException; import java.io.IOException; import com.google.common.annotations.VisibleForTesting; @@ -130,7 +131,7 @@ public class JobSubmissionFiles { Path stagingArea = cluster.getStagingAreaDir(); FileSystem fs = stagingArea.getFileSystem(conf); UserGroupInformation currentUser = realUser.getCurrentUser(); - if (fs.exists(stagingArea)) { + try { FileStatus fsStatus = fs.getFileStatus(stagingArea); String fileOwner = fsStatus.getOwner(); if (!(fileOwner.equals(currentUser.getShortUserName()) || fileOwner @@ -156,7 +157,7 @@ public class JobSubmissionFiles { "to correct value " + JOB_DIR_PERMISSION); fs.setPermission(stagingArea, JOB_DIR_PERMISSION); } - } else { + } catch (FileNotFoundException e) { fs.mkdirs(stagingArea, new FsPermission(JOB_DIR_PERMISSION)); } return stagingArea; http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java index bc20307..2291197 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputCommitter.java @@ -674,10 +674,9 @@ public class FileOutputCommitter extends OutputCommitter { if (algorithmVersion == 1) { if (fs.exists(previousCommittedTaskPath)) { Path committedTaskPath = getCommittedTaskPath(context); - if (fs.exists(committedTaskPath)) { - if (!fs.delete(committedTaskPath, true)) { - throw new IOException("Could not delete "+committedTaskPath); - } + if (!fs.delete(committedTaskPath, true) && + fs.exists(committedTaskPath)) { + throw new IOException("Could not delete " + committedTaskPath); } //Rename can fail if the parent directory does not yet exist. Path committedParent = committedTaskPath.getParent(); @@ -693,11 +692,12 @@ public class FileOutputCommitter extends OutputCommitter { // essentially a no-op, but for backwards compatibility // after upgrade to the new fileOutputCommitter, // check if there are any output left in committedTaskPath - if (fs.exists(previousCommittedTaskPath)) { + try { + FileStatus from = fs.getFileStatus(previousCommittedTaskPath); LOG.info("Recovering task for upgrading scenario, moving files from " + previousCommittedTaskPath + " to " + outputPath); - FileStatus from = fs.getFileStatus(previousCommittedTaskPath); mergePaths(fs, from, outputPath); + } catch (FileNotFoundException ignored) { } LOG.info("Done recovering task " + attemptId); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PartialFileOutputCommitter.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PartialFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PartialFileOutputCommitter.java index 1d15370..238a2ea 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PartialFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/PartialFileOutputCommitter.java @@ -97,7 +97,7 @@ public class PartialFileOutputCommitter for (int i = 0; i < taid.getId(); ++i) { TaskAttemptID oldId = new TaskAttemptID(tid, i); Path pTask = new Path(pCommit, oldId.toString()); - if (fs.exists(pTask) && !fs.delete(pTask, true)) { + if (!fs.delete(pTask, true) && fs.exists(pTask)) { throw new IOException("Failed to delete " + pTask); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java index cce9f37..df4e919 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/InputSampler.java @@ -322,10 +322,8 @@ public class InputSampler extends Configured implements Tool { Arrays.sort(samples, comparator); Path dst = new Path(TotalOrderPartitioner.getPartitionFile(conf)); FileSystem fs = dst.getFileSystem(conf); - if (fs.exists(dst)) { - fs.delete(dst, false); - } - SequenceFile.Writer writer = SequenceFile.createWriter(fs, + fs.delete(dst, false); + SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, dst, job.getMapOutputKeyClass(), NullWritable.class); NullWritable nullValue = NullWritable.get(); float stepSize = samples.length / (float) numPartitions; http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestPreemptableFileOutputCommitter.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestPreemptableFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestPreemptableFileOutputCommitter.java index 09ac286..e989bf4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestPreemptableFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestPreemptableFileOutputCommitter.java @@ -80,7 +80,7 @@ public class TestPreemptableFileOutputCommitter { foc.cleanUpPartialOutputForTask(context); verify(fs).delete(eq(p0), eq(true)); verify(fs).delete(eq(p1), eq(true)); - verify(fs, never()).delete(eq(p3), eq(true)); + verify(fs, times(1)).delete(eq(p3), eq(true)); verify(fs, never()).delete(eq(p2), eq(true)); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryServerFileSystemStateStoreService.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryServerFileSystemStateStoreService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryServerFileSystemStateStoreService.java index 9902f5e..47d6583 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryServerFileSystemStateStoreService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryServerFileSystemStateStoreService.java @@ -182,7 +182,7 @@ public class HistoryServerFileSystemStateStoreService Path keyPath = new Path(tokenKeysStatePath, TOKEN_MASTER_KEY_FILE_PREFIX + key.getKeyId()); if (fs.exists(keyPath)) { - throw new IOException(keyPath + " already exists"); + throw new FileAlreadyExistsException(keyPath + " already exists"); } ByteArrayOutputStream memStream = new ByteArrayOutputStream(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java index 915acde..9bfd7eb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraOutputFormat.java @@ -18,6 +18,7 @@ package org.apache.hadoop.examples.terasort; +import java.io.FileNotFoundException; import java.io.IOException; import org.apache.hadoop.conf.Configuration; @@ -100,7 +101,7 @@ public class TeraOutputFormat extends FileOutputFormat { final FileSystem fs = outDir.getFileSystem(jobConf); - if (fs.exists(outDir)) { + try { // existing output dir is considered empty iff its only content is the // partition file. // @@ -116,6 +117,7 @@ public class TeraOutputFormat extends FileOutputFormat { throw new FileAlreadyExistsException("Output directory " + outDir + " already exists"); } + } catch (FileNotFoundException ignored) { } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index 2d3e43b..db0370f 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -178,9 +178,7 @@ public class HadoopArchiveLogs implements Tool { } finally { if (fs != null) { // Cleanup working directory - if (fs.exists(workingDir)) { - fs.delete(workingDir, true); - } + fs.delete(workingDir, true); fs.close(); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java index ee14850..c2097dc 100644 --- a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java +++ b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java @@ -20,7 +20,6 @@ package org.apache.hadoop.tools; import java.io.DataInput; import java.io.DataOutput; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; @@ -149,9 +148,7 @@ public class HadoopArchives implements Tool { IOException { for (Path p : paths) { FileSystem fs = p.getFileSystem(conf); - if (!fs.exists(p)) { - throw new FileNotFoundException("Source " + p + " does not exist."); - } + fs.getFileStatus(p); } } @@ -619,9 +616,7 @@ public class HadoopArchives implements Tool { try { destFs = tmpOutput.getFileSystem(conf); //this was a stale copy - if (destFs.exists(tmpOutput)) { - destFs.delete(tmpOutput, false); - } + destFs.delete(tmpOutput, false); partStream = destFs.create(tmpOutput, false, conf.getInt("io.file.buffer.size", 4096), destFs.getDefaultReplication(tmpOutput), blockSize); } catch(IOException ie) { @@ -747,12 +742,8 @@ public class HadoopArchives implements Tool { replication = conf.getInt(HAR_REPLICATION_LABEL, 3); try { fs = masterIndex.getFileSystem(conf); - if (fs.exists(masterIndex)) { - fs.delete(masterIndex, false); - } - if (fs.exists(index)) { - fs.delete(index, false); - } + fs.delete(masterIndex, false); + fs.delete(index, false); indexStream = fs.create(index); outStream = fs.create(masterIndex); String version = VERSION + " \n"; http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/WasbFsck.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/WasbFsck.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/WasbFsck.java index d311550..f512489 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/WasbFsck.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/WasbFsck.java @@ -18,6 +18,7 @@ package org.apache.hadoop.fs.azure; +import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; import java.util.List; @@ -139,12 +140,15 @@ public class WasbFsck extends Configured implements Tool { if (p == null) { return true; } - if (!fs.exists(p)) { + FileStatus status; + try { + status = fs.getFileStatus(p); + } catch (FileNotFoundException e) { System.out.println("Path " + p + " does not exist!"); return true; } - if (fs.isFile(p)) { + if (status.isFile()) { if (containsColon(p)) { System.out.println("Warning: file " + p + " has a colon in its name."); return false; http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java index f1fae11..bcae96a 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java @@ -328,9 +328,7 @@ class DistCpSync { Arrays.sort(diffs, DiffInfo.targetComparator); for (DiffInfo diff : diffs) { if (diff.getTarget() != null) { - if (!targetFs.exists(diff.getTarget().getParent())) { - targetFs.mkdirs(diff.getTarget().getParent()); - } + targetFs.mkdirs(diff.getTarget().getParent()); targetFs.rename(diff.getTmp(), diff.getTarget()); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java index 0002d4f..105e4f2 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java @@ -126,7 +126,13 @@ public class SimpleCopyListing extends CopyListing { Path targetPath = options.getTargetPath(); FileSystem targetFS = targetPath.getFileSystem(getConf()); - boolean targetIsFile = targetFS.isFile(targetPath); + boolean targetExists = false; + boolean targetIsFile = false; + try { + targetIsFile = targetFS.getFileStatus(targetPath).isFile(); + targetExists = true; + } catch (FileNotFoundException ignored) { + } targetPath = targetFS.makeQualified(targetPath); final boolean targetIsReservedRaw = Path.getPathWithoutSchemeAndAuthority(targetPath).toString(). @@ -147,7 +153,7 @@ public class SimpleCopyListing extends CopyListing { } } - if (options.shouldAtomicCommit() && targetFS.exists(targetPath)) { + if (options.shouldAtomicCommit() && targetExists) { throw new InvalidInputException("Target path for atomic-commit already exists: " + targetPath + ". Cannot atomic-commit to pre-existing target-path."); } @@ -448,7 +454,7 @@ public class SimpleCopyListing extends CopyListing { && !sourceStatus.isDirectory(); if (solitaryFile) { - if (targetFS.isFile(target) || !targetPathExists) { + if (!targetPathExists || targetFS.isFile(target)) { return sourceStatus.getPath(); } else { return sourceStatus.getPath().getParent(); @@ -495,9 +501,7 @@ public class SimpleCopyListing extends CopyListing { private SequenceFile.Writer getWriter(Path pathToListFile) throws IOException { FileSystem fs = pathToListFile.getFileSystem(getConf()); - if (fs.exists(pathToListFile)) { - fs.delete(pathToListFile, false); - } + fs.delete(pathToListFile, false); return SequenceFile.createWriter(getConf(), SequenceFile.Writer.file(pathToListFile), SequenceFile.Writer.keyClass(Text.class), http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java index dd653b2..75cefb4 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java @@ -279,8 +279,8 @@ public class CopyCommitter extends FileOutputCommitter { if (srcAvailable && trgtRelPath.equals(srcRelPath)) continue; // Target doesn't exist at source. Delete. - boolean result = (!targetFS.exists(trgtFileStatus.getPath()) || - targetFS.delete(trgtFileStatus.getPath(), true)); + boolean result = targetFS.delete(trgtFileStatus.getPath(), true) + || !targetFS.exists(trgtFileStatus.getPath()); if (result) { LOG.info("Deleted " + trgtFileStatus.getPath() + " - Missing at source"); deletedEntries++; http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java index a979a29..6d93d81 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java @@ -113,8 +113,9 @@ public class CopyMapper extends Mapper DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH)); targetFS = targetFinalPath.getFileSystem(conf); - if (targetFS.exists(targetFinalPath) && targetFS.isFile(targetFinalPath)) { - overWrite = true; // When target is an existing file, overwrite it. + try { + overWrite = overWrite || targetFS.getFileStatus(targetFinalPath).isFile(); + } catch (FileNotFoundException ignored) { } if (conf.get(DistCpConstants.CONF_LABEL_SSL_CONF) != null) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java index acb30ee..ba2e0af 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java @@ -140,7 +140,7 @@ public class RetriableFileCopyCommand extends RetriableCommand { // note that for append case, it is possible that we append partial data // and then fail. In that case, for the next retry, we either reuse the // partial appended data if it is good or we overwrite the whole file - if (!toAppend && targetFS.exists(targetPath)) { + if (!toAppend) { targetFS.delete(targetPath, false); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java index 1784c5d..c308e6f 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java @@ -356,9 +356,7 @@ public class DistCpUtils { CopyListingFileStatus.class, conf); Path output = new Path(sourceListing.toString() + "_sorted"); - if (fs.exists(output)) { - fs.delete(output, false); - } + fs.delete(output, false); sorter.sort(sourceListing, output); return output; http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java index 2c89cb0..cdd7cac 100644 --- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java +++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java @@ -20,7 +20,6 @@ package org.apache.hadoop.tools; import java.io.BufferedReader; import java.io.DataInput; import java.io.DataOutput; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; @@ -68,11 +67,10 @@ abstract class DistTool implements org.apache.hadoop.util.Tool { List ioes = new ArrayList(); for(Path p : srcs) { try { - if (!p.getFileSystem(conf).exists(p)) { - ioes.add(new FileNotFoundException("Source "+p+" does not exist.")); - } + p.getFileSystem(conf).getFileStatus(p); + } catch(IOException e) { + ioes.add(e); } - catch(IOException e) {ioes.add(e);} } if (!ioes.isEmpty()) { throw new InvalidInputException(ioes); @@ -113,4 +111,4 @@ abstract class DistTool implements org.apache.hadoop.util.Tool { public static final int ERROR_CODE = -2; DuplicationException(String message) {super(message);} } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java index 01ec739..cfc5dee 100644 --- a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java +++ b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java @@ -489,10 +489,13 @@ public class SwiftTestUtils extends org.junit.Assert { */ public static void assertPathExists(FileSystem fileSystem, String message, Path path) throws IOException { - if (!fileSystem.exists(path)) { + try { + fileSystem.getFileStatus(path); + } catch (FileNotFoundException e) { //failure, report it - fail(message + ": not found " + path + " in " + path.getParent()); - ls(fileSystem, path.getParent()); + throw (IOException)new FileNotFoundException(message + ": not found " + + path + " in " + path.getParent() + ": " + e + " -- " + + ls(fileSystem, path.getParent())).initCause(e); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java index 576a3c0..ab29372 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/state/StatePool.java @@ -21,6 +21,7 @@ import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; +import java.io.FileNotFoundException; import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; @@ -186,27 +187,23 @@ public class StatePool { if (reload) { // Reload persisted entries Path stateFilename = new Path(persistDirPath, COMMIT_STATE_FILENAME); - FileSystem fs = stateFilename.getFileSystem(conf); - if (fs.exists(stateFilename)) { - reloadState(stateFilename, conf); - } else { - throw new RuntimeException("No latest state persist directory found!" + if (!reloadState(stateFilename, conf)) { + throw new RuntimeException("No latest state persist directory found!" + " Disable persistence and run."); } } } - private void reloadState(Path stateFile, Configuration conf) - throws Exception { - FileSystem fs = stateFile.getFileSystem(conf); - if (fs.exists(stateFile)) { + private boolean reloadState(Path stateFile, Configuration configuration) + throws Exception { + FileSystem fs = stateFile.getFileSystem(configuration); + try (FSDataInputStream in = fs.open(stateFile)) { System.out.println("Reading state from " + stateFile.toString()); - FSDataInputStream in = fs.open(stateFile); - read(in); - in.close(); - } else { + return true; + } catch (FileNotFoundException e) { System.out.println("No state information found for " + stateFile); + return false; } } @@ -342,4 +339,4 @@ public class StatePool { //TODO Should we do a clone? this.pool = states; } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java index bf50cad..cef03b9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/FileSystemBasedConfigurationProvider.java @@ -74,9 +74,7 @@ public class FileSystemBasedConfigurationProvider new Path(bootstrapConf.get(YarnConfiguration.FS_BASED_RM_CONF_STORE, YarnConfiguration.DEFAULT_FS_BASED_RM_CONF_STORE)); fs = configDir.getFileSystem(bootstrapConf); - if (!fs.exists(configDir)) { - fs.mkdirs(configDir); - } + fs.mkdirs(configDir); } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java index 55d6bd2..230d98a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java @@ -19,7 +19,6 @@ package org.apache.hadoop.yarn.client.api.impl; import java.io.Closeable; -import java.io.FileNotFoundException; import java.io.Flushable; import java.io.IOException; import java.net.URI; @@ -121,10 +120,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{ .TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_ACTIVE_DIR_DEFAULT)); fs = FileSystem.newInstance(activePath.toUri(), fsConf); - if (!fs.exists(activePath)) { - throw new FileNotFoundException(activePath + " does not exist"); - } - + // raise FileNotFoundException if the path is not found + fs.getFileStatus(activePath); summaryEntityTypes = new HashSet( conf.getStringCollection(YarnConfiguration .TIMELINE_SERVICE_ENTITYGROUP_FS_STORE_SUMMARY_ENTITY_TYPES)); @@ -992,9 +989,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{ Path appDir = createApplicationDir(appAttemptId.getApplicationId()); Path attemptDir = new Path(appDir, appAttemptId.toString()); - if (!fs.exists(attemptDir)) { - FileSystem.mkdirs(fs, attemptDir, new FsPermission( - APP_LOG_DIR_PERMISSIONS)); + if (FileSystem.mkdirs(fs, attemptDir, + new FsPermission(APP_LOG_DIR_PERMISSIONS))) { if (LOG.isDebugEnabled()) { LOG.debug("New attempt directory created - " + attemptDir); } @@ -1005,9 +1001,8 @@ public class FileSystemTimelineWriter extends TimelineWriter{ private Path createApplicationDir(ApplicationId appId) throws IOException { Path appDir = new Path(activePath, appId.toString()); - if (!fs.exists(appDir)) { - FileSystem.mkdirs(fs, appDir, - new FsPermission(APP_LOG_DIR_PERMISSIONS)); + if (FileSystem.mkdirs(fs, appDir, + new FsPermission(APP_LOG_DIR_PERMISSIONS))) { if (LOG.isDebugEnabled()) { LOG.debug("New app directory created - " + appDir); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java index a65349b..69b2316 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java @@ -19,6 +19,7 @@ package org.apache.hadoop.yarn.nodelabels; import java.io.EOFException; +import java.io.FileNotFoundException; import java.io.IOException; import java.util.Collection; import java.util.List; @@ -83,9 +84,7 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore { setFileSystem(conf); // mkdir of root dir path - if (!fs.exists(fsWorkingPath)) { - fs.mkdirs(fsWorkingPath); - } + fs.mkdirs(fsWorkingPath); } @Override @@ -160,12 +159,15 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore { throws IOException { // If mirror.new exists, read from mirror.new, FSDataInputStream is = null; - if (fs.exists(newMirrorPath)) { + try { is = fs.open(newMirrorPath); - } else if (fs.exists(oldMirrorPath)) { - is = fs.open(oldMirrorPath); - } + } catch (FileNotFoundException e) { + try { + is = fs.open(oldMirrorPath); + } catch (FileNotFoundException ignored) { + } + } if (null != is) { List labels = new AddToClusterNodeLabelsRequestPBImpl( AddToClusterNodeLabelsRequestProto.parseDelimitedFrom(is)) @@ -209,8 +211,13 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore { // Open and process editlog editLogPath = new Path(fsWorkingPath, EDITLOG_FILENAME); - if (fs.exists(editLogPath)) { - FSDataInputStream is = fs.open(editLogPath); + FSDataInputStream is; + try { + is = fs.open(editLogPath); + } catch (FileNotFoundException e) { + is = null; + } + if (null != is) { while (true) { try { @@ -255,6 +262,7 @@ public class FileSystemNodeLabelsStore extends NodeLabelsStore { break; } } + is.close(); } // Serialize current mirror to mirror.writing http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java index 6be5715..989f027 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NonAppendableFSNodeLabelStore.java @@ -116,16 +116,12 @@ public class NonAppendableFSNodeLabelStore extends FileSystemNodeLabelsStore { // Rename mirror.new.tmp to mirror.new (will remove .new if it's existed) Path newPath = new Path(fsWorkingPath, MIRROR_FILENAME + ".new"); - if (fs.exists(newPath)) { - fs.delete(newPath, false); - } + fs.delete(newPath, false); fs.rename(newTmpPath, newPath); // Remove existing mirror and rename mirror.new to mirror Path mirrorPath = new Path(fsWorkingPath, MIRROR_FILENAME); - if (fs.exists(mirrorPath)) { - fs.delete(mirrorPath, false); - } + fs.delete(mirrorPath, false); fs.rename(newPath, mirrorPath); } finally { readLock.unlock(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java index 82e4e11..ed2f4aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java @@ -348,10 +348,10 @@ public class TestFileSystemNodeLabelsStore extends NodeLabelTestBase { }; mockStore.setNodeLabelsManager(mgr); mockStore.fs = mockFs; - verifyMkdirsCount(mockStore, true, 0); - verifyMkdirsCount(mockStore, false, 1); verifyMkdirsCount(mockStore, true, 1); verifyMkdirsCount(mockStore, false, 2); + verifyMkdirsCount(mockStore, true, 3); + verifyMkdirsCount(mockStore, false, 4); } private void verifyMkdirsCount(FileSystemNodeLabelsStore store, http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java index bb52b55..be7bc6d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; +import java.io.FileNotFoundException; import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -123,12 +124,7 @@ public class FileSystemApplicationHistoryStore extends AbstractService rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME); try { fs = getFileSystem(fsWorkingPath, conf); - - if (!fs.isDirectory(rootDirPath)) { - fs.mkdirs(rootDirPath); - fs.setPermission(rootDirPath, ROOT_DIR_UMASK); - } - + fs.mkdirs(rootDirPath, ROOT_DIR_UMASK); } catch (IOException e) { LOG.error("Error when initializing FileSystemHistoryStorage", e); throw e; @@ -659,9 +655,11 @@ public class FileSystemApplicationHistoryStore extends AbstractService private HistoryFileReader getHistoryFileReader(ApplicationId appId) throws IOException { Path applicationHistoryFile = new Path(rootDirPath, appId.toString()); - if (!fs.exists(applicationHistoryFile)) { - throw new IOException("History file for application " + appId - + " is not found"); + try { + fs.getFileStatus(applicationHistoryFile); + } catch (FileNotFoundException e) { + throw (FileNotFoundException) new FileNotFoundException("History file for" + + " application " + appId + " is not found: " + e).initCause(e); } // The history file is still under writing if (outstandingWriters.containsKey(appId)) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java index c91d9f5..bd6bea3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java @@ -27,6 +27,7 @@ import org.junit.Assert; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -279,8 +280,8 @@ public class TestFileSystemApplicationHistoryStore extends } // Make sure that directory creation was not attempted - verify(fs, times(1)).isDirectory(any(Path.class)); - verify(fs, times(0)).mkdirs(any(Path.class)); + verify(fs, never()).isDirectory(any(Path.class)); + verify(fs, times(1)).mkdirs(any(Path.class)); } @Test @@ -301,7 +302,7 @@ public class TestFileSystemApplicationHistoryStore extends } // Make sure that directory creation was attempted - verify(fs, times(1)).isDirectory(any(Path.class)); + verify(fs, never()).isDirectory(any(Path.class)); verify(fs, times(1)).mkdirs(any(Path.class)); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java index b034e7a..e077f89 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/sharedcache/SharedCacheUploader.java @@ -192,10 +192,12 @@ class SharedCacheUploader implements Callable { private void deleteTempFile(Path tempPath) { try { - if (tempPath != null && fs.exists(tempPath)) { + if (tempPath != null) { fs.delete(tempPath, false); } - } catch (IOException ignore) {} + } catch (IOException ioe) { + LOG.debug("Exception received while deleting temp files", ioe); + } } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e0fcff8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/store/InMemorySCMStore.java ---------------------------------------------------------------------- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/store/InMemorySCMStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/store/InMemorySCMStore.java index 54d736f..7b769a7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/store/InMemorySCMStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/store/InMemorySCMStore.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.server.sharedcachemanager.store; +import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -189,11 +190,14 @@ public class InMemorySCMStore extends SCMStore { conf.get(YarnConfiguration.SHARED_CACHE_ROOT, YarnConfiguration.DEFAULT_SHARED_CACHE_ROOT); Path root = new Path(location); - if (!fs.exists(root)) { + try { + fs.getFileStatus(root); + } catch (FileNotFoundException e) { String message = "The shared cache root directory " + location + " was not found"; LOG.error(message); - throw new IOException(message); + throw (IOException)new FileNotFoundException(message) + .initCause(e); } int nestedLevel = SharedCacheUtil.getCacheDepth(conf); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org