hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ji...@apache.org
Subject [02/50] [abbrv] hadoop git commit: HDFS-9799. Reimplement getCurrentTrashDir to remove incompatibility. (zhz)
Date Wed, 24 Feb 2016 20:02:44 GMT
HDFS-9799. Reimplement getCurrentTrashDir to remove incompatibility. (zhz)

Change-Id: I7834bcebffed38cb384db5395ddb8b6dd9e79a0b


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0fb14aac
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0fb14aac
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0fb14aac

Branch: refs/heads/HDFS-7240
Commit: 0fb14aaced7afdbbb72e59316c186ebf6ec2d091
Parents: 76fab26
Author: Zhe Zhang <zhz@apache.org>
Authored: Wed Feb 17 13:30:50 2016 -0800
Committer: Zhe Zhang <zhz@apache.org>
Committed: Wed Feb 17 13:30:50 2016 -0800

----------------------------------------------------------------------
 .../java/org/apache/hadoop/fs/FileSystem.java   | 41 ++++++-------
 .../org/apache/hadoop/fs/FilterFileSystem.java  |  5 +-
 .../java/org/apache/hadoop/fs/TrashPolicy.java  |  2 +-
 .../apache/hadoop/fs/TrashPolicyDefault.java    |  9 +--
 .../hadoop/hdfs/DistributedFileSystem.java      | 63 +++++++++++---------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  2 +
 6 files changed, 64 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0fb14aac/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index f4a2e7d..3e26f68 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -2689,9 +2689,8 @@ public abstract class FileSystem extends Configured implements Closeable
{
    *
    * @param path the trash root of the path to be determined.
    * @return the default implementation returns "/user/$USER/.Trash".
-   * @throws IOException
    */
-  public Path getTrashRoot(Path path) throws IOException {
+  public Path getTrashRoot(Path path) {
     return this.makeQualified(new Path(getHomeDirectory().toUri().getPath(),
         TRASH_PREFIX));
   }
@@ -2703,29 +2702,31 @@ public abstract class FileSystem extends Configured implements Closeable
{
    * @return all the trash root directories.
    *         Default FileSystem returns .Trash under users' home directories if
    *         /user/$USER/.Trash exists.
-   * @throws IOException
    */
-  public Collection<FileStatus> getTrashRoots(boolean allUsers)
-      throws IOException {
+  public Collection<FileStatus> getTrashRoots(boolean allUsers) {
     Path userHome = new Path(getHomeDirectory().toUri().getPath());
-    List<FileStatus> ret = new ArrayList<FileStatus>();
-    if (!allUsers) {
-      Path userTrash = new Path(userHome, TRASH_PREFIX);
-      if (exists(userTrash)) {
-        ret.add(getFileStatus(userTrash));
-      }
-    } else {
-      Path homeParent = userHome.getParent();
-      if (exists(homeParent)) {
-        FileStatus[] candidates = listStatus(homeParent);
-        for (FileStatus candidate : candidates) {
-          Path userTrash = new Path(candidate.getPath(), TRASH_PREFIX);
-          if (exists(userTrash)) {
-            candidate.setPath(userTrash);
-            ret.add(candidate);
+    List<FileStatus> ret = new ArrayList<>();
+    try {
+      if (!allUsers) {
+        Path userTrash = new Path(userHome, TRASH_PREFIX);
+        if (exists(userTrash)) {
+          ret.add(getFileStatus(userTrash));
+        }
+      } else {
+        Path homeParent = userHome.getParent();
+        if (exists(homeParent)) {
+          FileStatus[] candidates = listStatus(homeParent);
+          for (FileStatus candidate : candidates) {
+            Path userTrash = new Path(candidate.getPath(), TRASH_PREFIX);
+            if (exists(userTrash)) {
+              candidate.setPath(userTrash);
+              ret.add(candidate);
+            }
           }
         }
       }
+    } catch (IOException e) {
+      LOG.warn("Cannot get all trash roots", e);
     }
     return ret;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0fb14aac/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
index 53678e0..4ee7514 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
@@ -645,13 +645,12 @@ public class FilterFileSystem extends FileSystem {
   }
 
   @Override
-  public Path getTrashRoot(Path path) throws IOException {
+  public Path getTrashRoot(Path path) {
     return fs.getTrashRoot(path);
   }
 
   @Override
-  public Collection<FileStatus> getTrashRoots(boolean allUsers)
-      throws IOException {
+  public Collection<FileStatus> getTrashRoots(boolean allUsers) {
     return fs.getTrashRoots(allUsers);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0fb14aac/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
index 1d901c1..92a4d1f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
@@ -89,7 +89,7 @@ public abstract class TrashPolicy extends Configured {
    * It returns the trash location correctly for the path specified no matter
    * the path is in encryption zone or not.
    */
-  public abstract Path getCurrentTrashDir() throws IOException;
+  public abstract Path getCurrentTrashDir();
 
   /**
    * Get the current trash directory for path specified based on the Trash

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0fb14aac/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
index b5d245b..012ce32 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
@@ -196,7 +196,7 @@ public class TrashPolicyDefault extends TrashPolicy {
   }
 
   @Override
-  public Path getCurrentTrashDir() throws IOException {
+  public Path getCurrentTrashDir() {
     return new Path(fs.getTrashRoot(null), CURRENT);
   }
 
@@ -250,12 +250,7 @@ public class TrashPolicyDefault extends TrashPolicy {
           now = Time.now();
           if (now >= end) {
             Collection<FileStatus> trashRoots;
-            try {
-              trashRoots = fs.getTrashRoots(true);      // list all home dirs
-            } catch (IOException e) {
-              LOG.warn("Trash can't list all trash roots: "+e+" Sleeping.");
-              continue;
-            }
+            trashRoots = fs.getTrashRoots(true);      // list all trash dirs
 
             for (FileStatus trashRoot : trashRoots) {   // dump each trash
               if (!trashRoot.isDirectory())

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0fb14aac/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
index 6de7659..69f3871 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
@@ -2338,27 +2338,31 @@ public class DistributedFileSystem extends FileSystem {
   /**
    * Get the root directory of Trash for a path in HDFS.
    * 1. File in encryption zone returns /ez1/.Trash/username
-   * 2. File not in encryption zone returns /users/username/.Trash
+   * 2. File not in encryption zone, or encountered exception when checking
+   *    the encryption zone of the path, returns /users/username/.Trash
    * Caller appends either Current or checkpoint timestamp for trash destination
    * @param path the trash root of the path to be determined.
    * @return trash root
-   * @throws IOException
    */
   @Override
-  public Path getTrashRoot(Path path) throws IOException {
+  public Path getTrashRoot(Path path) {
     if ((path == null) || path.isRoot() || !dfs.isHDFSEncryptionEnabled()) {
       return super.getTrashRoot(path);
     }
 
     String parentSrc = path.getParent().toUri().getPath();
-    EncryptionZone ez = dfs.getEZForPath(parentSrc);
-    if ((ez != null)) {
-      return this.makeQualified(
-          new Path(ez.getPath() + "/" + FileSystem.TRASH_PREFIX +
-              dfs.ugi.getShortUserName()));
-    } else {
-      return super.getTrashRoot(path);
+    try {
+      EncryptionZone ez = dfs.getEZForPath(parentSrc);
+      if ((ez != null)) {
+        return this.makeQualified(
+            new Path(ez.getPath() + "/" + FileSystem.TRASH_PREFIX +
+                dfs.ugi.getShortUserName()));
+      }
+    } catch (IOException e) {
+      DFSClient.LOG.warn("Exception in checking the encryption zone for the " +
+          "path " + parentSrc + ". " + e.getMessage());
     }
+    return super.getTrashRoot(path);
   }
 
   /**
@@ -2366,32 +2370,37 @@ public class DistributedFileSystem extends FileSystem {
    * 1. File deleted from non-encryption zone /user/username/.Trash
    * 2. File deleted from encryption zones
    *    e.g., ez1 rooted at /ez1 has its trash root at /ez1/.Trash/$USER
-   * @allUsers return trashRoots of all users if true, used by emptier
+   * @param allUsers return trashRoots of all users if true, used by emptier
    * @return trash roots of HDFS
-   * @throws IOException
    */
   @Override
-  public Collection<FileStatus> getTrashRoots(boolean allUsers) throws IOException
{
-    List<FileStatus> ret = new ArrayList<FileStatus>();
+  public Collection<FileStatus> getTrashRoots(boolean allUsers) {
+    List<FileStatus> ret = new ArrayList<>();
     // Get normal trash roots
     ret.addAll(super.getTrashRoots(allUsers));
 
-    // Get EZ Trash roots
-    final RemoteIterator<EncryptionZone> it = dfs.listEncryptionZones();
-    while (it.hasNext()) {
-      Path ezTrashRoot = new Path(it.next().getPath(), FileSystem.TRASH_PREFIX);
-      if (allUsers) {
-        for (FileStatus candidate : listStatus(ezTrashRoot)) {
-          if (exists(candidate.getPath())) {
-            ret.add(candidate);
+    try {
+      // Get EZ Trash roots
+      final RemoteIterator<EncryptionZone> it = dfs.listEncryptionZones();
+      while (it.hasNext()) {
+        Path ezTrashRoot = new Path(it.next().getPath(),
+            FileSystem.TRASH_PREFIX);
+        if (allUsers) {
+          for (FileStatus candidate : listStatus(ezTrashRoot)) {
+            if (exists(candidate.getPath())) {
+              ret.add(candidate);
+            }
+          }
+        } else {
+          Path userTrash = new Path(ezTrashRoot, System.getProperty(
+              "user.name"));
+          if (exists(userTrash)) {
+            ret.add(getFileStatus(userTrash));
           }
-        }
-      } else {
-        Path userTrash = new Path(ezTrashRoot, System.getProperty("user.name"));
-        if (exists(userTrash)) {
-          ret.add(getFileStatus(userTrash));
         }
       }
+    } catch (IOException e){
+      DFSClient.LOG.warn("Cannot get all encrypted trash roots", e);
     }
     return ret;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0fb14aac/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 36bb60d..89fc187 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -2780,6 +2780,8 @@ Release 2.8.0 - UNRELEASED
     HDFS-9815. Move o.a.h.fs.Hdfs to hadoop-hdfs-client.
     (Vinayakumar B via wheat9)
 
+    HDFS-9799. Reimplement getCurrentTrashDir to remove incompatibility. (zhz)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES


Mime
View raw message