hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ma...@apache.org
Subject svn commit: r1150987 - in /hadoop/common/trunk/common: CHANGES.txt src/java/org/apache/hadoop/fs/shell/Delete.java src/java/org/apache/hadoop/fs/shell/PathExceptions.java src/test/core/org/apache/hadoop/cli/testConf.xml
Date Tue, 26 Jul 2011 04:57:10 GMT
Author: mattf
Date: Tue Jul 26 04:57:09 2011
New Revision: 1150987

URL: http://svn.apache.org/viewvc?rev=1150987&view=rev
Log:
HADOOP-6385. dfs should support -rmdir (was HDFS-639). Contributed by Daryn Sharp.

Modified:
    hadoop/common/trunk/common/CHANGES.txt
    hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/Delete.java
    hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/PathExceptions.java
    hadoop/common/trunk/common/src/test/core/org/apache/hadoop/cli/testConf.xml

Modified: hadoop/common/trunk/common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/common/CHANGES.txt?rev=1150987&r1=1150986&r2=1150987&view=diff
==============================================================================
--- hadoop/common/trunk/common/CHANGES.txt (original)
+++ hadoop/common/trunk/common/CHANGES.txt Tue Jul 26 04:57:09 2011
@@ -60,6 +60,9 @@ Trunk (unreleased changes)
 
     HADOOP-7460. Support pluggable trash policies. (Usman Masoon via suresh)
 
+    HADOOP-6385. dfs should support -rmdir (was HDFS-639). (Daryn Sharp
+    via mattf)
+
   IMPROVEMENTS
 
     HADOOP-7042. Updates to test-patch.sh to include failed test names and

Modified: hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/Delete.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/Delete.java?rev=1150987&r1=1150986&r2=1150987&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/Delete.java (original)
+++ hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/Delete.java Tue Jul 26
04:57:09 2011
@@ -27,6 +27,8 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIOException;
 import org.apache.hadoop.fs.shell.PathExceptions.PathIsDirectoryException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotDirectoryException;
+import org.apache.hadoop.fs.shell.PathExceptions.PathIsNotEmptyDirectoryException;
 
 /**
  * Classes that delete paths
@@ -34,9 +36,10 @@ import org.apache.hadoop.fs.shell.PathEx
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 
-class Delete extends FsCommand {
+class Delete {
   public static void registerCommands(CommandFactory factory) {
     factory.addClass(Rm.class, "-rm");
+    factory.addClass(Rmdir.class, "-rmdir");
     factory.addClass(Rmr.class, "-rmr");
     factory.addClass(Expunge.class, "-expunge");
   }
@@ -44,26 +47,35 @@ class Delete extends FsCommand {
   /** remove non-directory paths */
   public static class Rm extends FsCommand {
     public static final String NAME = "rm";
-    public static final String USAGE = "[-r|-R] [-skipTrash] <src> ...";
+    public static final String USAGE = "[-f] [-r|-R] [-skipTrash] <src> ...";
     public static final String DESCRIPTION =
       "Delete all files that match the specified file pattern.\n" +
       "Equivalent to the Unix command \"rm <src>\"\n" +
       "-skipTrash option bypasses trash, if enabled, and immediately\n" +
       "deletes <src>\n" +
+      "  -f     If the file does not exist, do not display a diagnostic\n" +
+      "         message or modify the exit status to reflect an error.\n" +
       "  -[rR]  Recursively deletes directories";
 
     private boolean skipTrash = false;
     private boolean deleteDirs = false;
+    private boolean ignoreFNF = false;
     
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
       CommandFormat cf = new CommandFormat(
-          1, Integer.MAX_VALUE, "r", "R", "skipTrash");
+          1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash");
       cf.parse(args);
+      ignoreFNF = cf.getOpt("f");
       deleteDirs = cf.getOpt("r") || cf.getOpt("R");
       skipTrash = cf.getOpt("skipTrash");
     }
-    
+
+    @Override
+    protected void processNonexistentPath(PathData item) throws IOException {
+      if (!ignoreFNF) super.processNonexistentPath(item);
+    }
+
     @Override
     protected void processPath(PathData item) throws IOException {
       if (item.stat.isDirectory() && !deleteDirs) {
@@ -112,7 +124,40 @@ class Delete extends FsCommand {
       return "rm -r";
     }
   }
-  
+
+  /** remove only empty directories */
+  static class Rmdir extends FsCommand {
+    public static final String NAME = "rmdir";
+    public static final String USAGE =
+      "[--ignore-fail-on-non-empty] <dir> ...";
+    public static final String DESCRIPTION =
+      "Removes the directory entry specified by each directory argument,\n" +
+      "provided it is empty.\n"; 
+    
+    private boolean ignoreNonEmpty = false;
+    
+    protected void processOptions(LinkedList<String> args) throws IOException {
+      CommandFormat cf = new CommandFormat(
+          1, Integer.MAX_VALUE, "-ignore-fail-on-non-empty");
+      cf.parse(args);
+      ignoreNonEmpty = cf.getOpt("-ignore-fail-on-non-empty");
+    }
+
+    @Override
+    protected void processPath(PathData item) throws IOException {
+      if (!item.stat.isDirectory()) {
+        throw new PathIsNotDirectoryException(item.toString());
+      }      
+      if (item.fs.listStatus(item.path).length == 0) {
+        if (!item.fs.delete(item.path, false)) {
+          throw new PathIOException(item.toString());
+        }
+      } else if (!ignoreNonEmpty) {
+        throw new PathIsNotEmptyDirectoryException(item.toString());
+      }
+    }
+  }
+
   /** empty the trash */
   static class Expunge extends FsCommand {
     public static final String NAME = "expunge";

Modified: hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/PathExceptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/PathExceptions.java?rev=1150987&r1=1150986&r2=1150987&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/PathExceptions.java (original)
+++ hadoop/common/trunk/common/src/java/org/apache/hadoop/fs/shell/PathExceptions.java Tue
Jul 26 04:57:09 2011
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path;
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 
+@SuppressWarnings("serial")
 public class PathExceptions {
 
   /** EIO */
@@ -165,6 +166,14 @@ public class PathExceptions {
     }
   }
 
+  /** Generated by rm commands */
+  public static class PathIsNotEmptyDirectoryException extends PathExistsException {
+    /** @param path for the exception */
+    public PathIsNotEmptyDirectoryException(String path) {
+      super(path, "Directory is not empty");
+    }
+  }  
+
   /** EACCES */
   public static class PathAccessDeniedException extends PathIOException {
     static final long serialVersionUID = 0L;

Modified: hadoop/common/trunk/common/src/test/core/org/apache/hadoop/cli/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/common/src/test/core/org/apache/hadoop/cli/testConf.xml?rev=1150987&r1=1150986&r2=1150987&view=diff
==============================================================================
--- hadoop/common/trunk/common/src/test/core/org/apache/hadoop/cli/testConf.xml (original)
+++ hadoop/common/trunk/common/src/test/core/org/apache/hadoop/cli/testConf.xml Tue Jul 26
04:57:09 2011
@@ -283,7 +283,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^-rm \[-r\|-R\] \[-skipTrash\] &lt;src&gt; \.\.\.:(
|\t)*Delete all files that match the specified file pattern.( )*</expected-output>
+          <expected-output>^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] &lt;src&gt;
\.\.\.:( |\t)*Delete all files that match the specified file pattern.( )*</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
@@ -297,6 +297,37 @@
           <type>RegexpComparator</type>
           <expected-output>^( |\t)*deletes &lt;src&gt;( )*</expected-output>
         </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^\s+-f\s+If the file does not exist, do not display a diagnostic</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^\s+message or modify the exit status to reflect an error\.</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^\s+-\[rR\]\s+Recursively deletes directories</expected-output>
+        </comparator>
+      </comparators>
+    </test>
+
+    <test> <!-- TESTED -->
+      <description>help: help for rmdir</description>
+      <test-commands>
+        <command>-help rmdir</command>
+      </test-commands>
+      <cleanup-commands>
+      </cleanup-commands>
+      <comparators>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^-rmdir \[--ignore-fail-on-non-empty\] &lt;dir&gt;
\.\.\.:\s+Removes the directory entry specified by each directory argument,</expected-output>
+        </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>\s+provided it is empty.</expected-output>
+        </comparator>
       </comparators>
     </test>
 



Mime
View raw message