hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r612200 - in /lucene/hadoop/trunk: CHANGES.txt conf/hadoop-default.xml src/java/org/apache/hadoop/fs/Trash.java src/test/org/apache/hadoop/dfs/TestTrash.java
Date Tue, 15 Jan 2008 20:16:19 GMT
Author: cutting
Date: Tue Jan 15 12:16:17 2008
New Revision: 612200

URL: http://svn.apache.org/viewvc?rev=612200&view=rev
Log:
HADOOP-2514.  Change trash feature to keep per-user trash directory.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/conf/hadoop-default.xml
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Trash.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestTrash.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=612200&r1=612199&r2=612200&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Jan 15 12:16:17 2008
@@ -48,6 +48,11 @@
     to, e.g., pass LocalFileSystem#getWorkingDir().toString() directly
     to java.io methods that accept file names. (cutting)
 
+    HADOOP-2514.  Change trash feature to maintain a per-user trash
+    directory, named ".Trash" in the user's home directory.  The
+    "fs.trash.root" parameter is no longer used.  Full source paths
+    are also no longer reproduced within the trash.
+
   NEW FEATURES
 
     HADOOP-1857.  Ability to run a script when a task fails to capture stack

Modified: lucene/hadoop/trunk/conf/hadoop-default.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/conf/hadoop-default.xml?rev=612200&r1=612199&r2=612200&view=diff
==============================================================================
--- lucene/hadoop/trunk/conf/hadoop-default.xml (original)
+++ lucene/hadoop/trunk/conf/hadoop-default.xml Tue Jan 15 12:16:17 2008
@@ -112,13 +112,6 @@
 </property>
 
 <property>
-  <name>fs.trash.root</name>
-  <value>${hadoop.tmp.dir}/Trash</value>
-  <description>The trash directory, used by FsShell's 'rm' command.
-  </description>
-</property>
-
-<property>
   <name>fs.trash.interval</name>
   <value>0</value>
   <description>Number of minutes between trash checkpoints.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Trash.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Trash.java?rev=612200&r1=612199&r2=612200&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Trash.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Trash.java Tue Jan 15 12:16:17 2008
@@ -25,25 +25,33 @@
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.conf.*;
+import org.apache.hadoop.fs.permission.*;
 
-/** Provides a <i>trash</i> feature.  Files may be moved to a trash directory.
- * They're initially stored in a <i>current</i> sub-directory of the trash
- * directory.  Within that sub-directory their original path is preserved.
- * Periodically one may checkpoint the current trash and remove older
- * checkpoints.  (This design permits trash management without enumeration of
- * the full trash content, without date support in the filesystem, and without
- * clock synchronization.)
+/** Provides a <i>trash</i> feature.  Files are moved to a user's trash
+ * directory, a subdirectory of their home directory named ".Trash".  Files are
+ * initially moved to a <i>current</i> sub-directory of the trash directory.
+ * Within that sub-directory their original path is preserved.  Periodically
+ * one may checkpoint the current trash and remove older checkpoints.  (This
+ * design permits trash management without enumeration of the full trash
+ * content, without date support in the filesystem, and without clock
+ * synchronization.)
  */
 public class Trash extends Configured {
   private static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.fs.Trash");
 
-  private static final String CURRENT = "Current";
+  private static final Path CURRENT = new Path("Current");
+  private static final Path TRASH = new Path(".Trash/");
+  private static final Path HOMES = new Path("/user/");
+
+  private static final FsPermission PERMISSION =
+    new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
+
   private static final DateFormat CHECKPOINT = new SimpleDateFormat("yyMMddHHmm");
   private static final int MSECS_PER_MINUTE = 60*1000;
 
   private FileSystem fs;
-  private Path root;
+  private Path trash;
   private Path current;
   private long interval;
 
@@ -51,17 +59,15 @@
    * @param conf a Configuration
    */
   public Trash(Configuration conf) throws IOException {
-    super(conf);
-
-    Path root = new Path(conf.get("fs.trash.root", "/tmp/Trash"));
-
-    this.fs = root.getFileSystem(conf);
+    this(FileSystem.get(conf).getHomeDirectory(), conf);
+  }
 
-    if (!root.isAbsolute())
-      root = new Path(fs.getWorkingDirectory(), root);
+  private Trash(Path home, Configuration conf) throws IOException {
+    super(conf);
 
-    this.root = root;
-    this.current = new Path(root, CURRENT);
+    this.fs = home.getFileSystem(conf);
+    this.trash = new Path(home, TRASH);
+    this.current = new Path(trash, CURRENT);
     this.interval = conf.getLong("fs.trash.interval", 60) * MSECS_PER_MINUTE;
   }
 
@@ -78,24 +84,24 @@
     if (!fs.exists(path))                         // check that path exists
       throw new FileNotFoundException(path.toString());
 
-    URI rootUri = root.toUri();
-    String dirPath = path.toUri().getPath();
-
-    if (dirPath.startsWith(rootUri.getPath())) {  // already in trash
-      return false;
+    if (path.toString().startsWith(trash.toString())) {
+      return false;                               // already in trash
     }
 
-    Path trashPath =                              // create path in current
-      new Path(rootUri.getScheme(), rootUri.getAuthority(),
-               current.toUri().getPath()+dirPath);
+    Path trashPath = new Path(current, path.getName());
 
     IOException cause = null;
-    
+
     // try twice, in case checkpoint between the mkdirs() & rename()
     for (int i = 0; i < 2; i++) {
-      Path trashDir = trashPath.getParent();
-      if (!fs.mkdirs(trashDir)) {                 // make parent directory
-        throw new IOException("Failed to create trash directory: "+trashDir);
+      try {
+        if (!fs.mkdirs(current, PERMISSION)) {      // create current
+          LOG.warn("Can't create trash directory: "+current);
+          return false;
+        }
+      } catch (IOException e) {
+        LOG.warn("Can't create trash directory: "+current);
+        return false;
       }
       try {
         //
@@ -123,11 +129,11 @@
 
     Path checkpoint;
     synchronized (CHECKPOINT) {
-      checkpoint = new Path(root, CHECKPOINT.format(new Date()));
+      checkpoint = new Path(trash, CHECKPOINT.format(new Date()));
     }
 
     if (fs.rename(current, checkpoint)) {
-      LOG.info("Created trash checkpoint: "+checkpoint);
+      LOG.info("Created trash checkpoint: "+checkpoint.toUri().getPath());
     } else {
       throw new IOException("Failed to checkpoint trash: "+checkpoint);
     }
@@ -135,12 +141,13 @@
 
   /** Delete old checkpoints. */
   public void expunge() throws IOException {
-    Path[] dirs = fs.listPaths(root);             // scan trash sub-directories
+    Path[] dirs = fs.listPaths(trash);            // scan trash sub-directories
     long now = System.currentTimeMillis();
     for (int i = 0; i < dirs.length; i++) {
-      Path dir = dirs[i];
-      String name = dir.getName();
-      if (name.equals(CURRENT))                   // skip current
+      Path path = dirs[i];
+      String dir = path.toUri().getPath();
+      String name = path.getName();
+      if (name.equals(CURRENT.getName()))         // skip current
         continue;
 
       long time;
@@ -154,7 +161,7 @@
       }
 
       if ((now - interval) > time) {
-        if (fs.delete(dir)) {
+        if (fs.delete(path)) {
           LOG.info("Deleted trash checkpoint: "+dir);
         } else {
           LOG.warn("Couldn't delete checkpoint: "+dir+" Ignoring.");
@@ -170,22 +177,34 @@
     return current;
   }
 
-  /** Return a {@link Runnable} that periodically empties the trash.
-   * Only one checkpoint is kept at a time.
+  /** Return a {@link Runnable} that periodically empties the trash of all
+   * users, intended to be run by the superuser.  Only one checkpoint is kept
+   * at a time.
    */
-  public Runnable getEmptier() {
-    return new Emptier();
+  public Runnable getEmptier() throws IOException {
+    return new Emptier(getConf());
   }
 
-  private class Emptier implements Runnable {
+  private static class Emptier implements Runnable {
+
+    private Configuration conf;
+    private FileSystem fs;
+    private long interval;
+
+    public Emptier(Configuration conf) throws IOException {
+      this.conf = conf;
+      this.interval = conf.getLong("fs.trash.interval", 60) * MSECS_PER_MINUTE;
+      this.fs = FileSystem.get(conf);
+    }
 
     public void run() {
       if (interval == 0)
         return;                                   // trash disabled
 
       long now = System.currentTimeMillis();
-      long end = ceiling(now, interval);
+      long end;
       while (true) {
+        end = ceiling(now, interval);
         try {                                     // sleep for interval
           Thread.sleep(end - now);
         } catch (InterruptedException e) {
@@ -195,19 +214,28 @@
         now = System.currentTimeMillis();
         if (now >= end) {
 
+          FileStatus[] homes = null;
           try {
-            expunge();
+            homes = fs.listStatus(HOMES);         // list all home dirs
           } catch (IOException e) {
-            LOG.warn("Trash expunge caught: "+e+". Ignoring.");
+            LOG.warn("Trash can't list homes: "+e+" Sleeping.");
+            continue;
           }
 
-          try {
-            checkpoint();
-          } catch (IOException e) {
-            LOG.warn("Trash checkpoint caught: "+e+". Ignoring.");
-          }
+          if (homes == null)
+            continue;
 
-          end = ceiling(now, interval);
+          for (FileStatus home : homes) {         // dump each trash
+            if (!home.isDir())
+              continue;
+            try {
+              Trash trash = new Trash(home.getPath(), conf);
+              trash.expunge();
+              trash.checkpoint();
+            } catch (IOException e) {
+              LOG.warn("Trash caught: "+e+". Skipping "+home.getPath()+".");
+            }
+          }
         }
       }
     }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestTrash.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestTrash.java?rev=612200&r1=612199&r2=612200&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestTrash.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestTrash.java Tue Jan 15 12:16:17
2008
@@ -48,14 +48,14 @@
   // check that the specified file is in Trash
   static void checkTrash(FileSystem fs, Path trashRoot, String pathname)
                          throws IOException {
-    Path p = new Path(trashRoot + pathname);
+    Path p = new Path(trashRoot+"/"+new Path(pathname).getName());
     assertTrue(fs.exists(p));
   }
 
   // check that the specified file is not in Trash
   static void checkNotInTrash(FileSystem fs, Path trashRoot, String pathname)
                               throws IOException {
-    Path p = new Path(trashRoot + pathname);
+    Path p = new Path(trashRoot+"/"+ new Path(pathname).getName());
     assertTrue(!fs.exists(p));
   }
 
@@ -69,8 +69,6 @@
   public void testTrash() throws IOException {
     Configuration conf = new Configuration();
     conf.set("fs.trash.interval", 10); // 10 minute
-    conf.set("fs.trash.root", 
-             conf.get("hadoop.tmp.dir") + "/Trash");
     MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
     FileSystem fs = cluster.getFileSystem();
     DistributedFileSystem fileSys = (DistributedFileSystem) fs;



Mime
View raw message