hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r712288 - in /hadoop/core/branches/branch-0.19: CHANGES.txt src/core/org/apache/hadoop/fs/FsShell.java src/test/org/apache/hadoop/hdfs/TestDFSShell.java
Date Fri, 07 Nov 2008 22:02:30 GMT
Author: szetszwo
Date: Fri Nov  7 14:02:29 2008
New Revision: 712288

URL: http://svn.apache.org/viewvc?rev=712288&view=rev
Log:
HADOOP-3121. lsr should keep listing the remaining items but not terminate if there is any
IOException. (szetszwo)

Modified:
    hadoop/core/branches/branch-0.19/CHANGES.txt
    hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FsShell.java
    hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestDFSShell.java

Modified: hadoop/core/branches/branch-0.19/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/CHANGES.txt?rev=712288&r1=712287&r2=712288&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.19/CHANGES.txt Fri Nov  7 14:02:29 2008
@@ -976,6 +976,9 @@
     HADOOP-4597. Calculate mis-replicated blocks when safe-mode is turned
     off manually. (shv)
 
+    HADOOP-3121. lsr should keep listing the remaining items but not
+    terminate if there is any IOException. (szetszwo)
+
 Release 0.18.2 - 2008-11-03
 
   BUG FIXES

Modified: hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FsShell.java?rev=712288&r1=712287&r2=712288&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/core/branches/branch-0.19/src/core/org/apache/hadoop/fs/FsShell.java Fri Nov  7
14:02:29 2008
@@ -569,7 +569,7 @@
    * @throws IOException  
    * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
    */
-  void ls(String srcf, boolean recursive) throws IOException {
+  private int ls(String srcf, boolean recursive) throws IOException {
     Path srcPath = new Path(srcf);
     FileSystem srcFs = srcPath.getFileSystem(this.getConf());
     FileStatus[] srcs = srcFs.globStatus(srcPath);
@@ -579,20 +579,24 @@
     }
  
     boolean printHeader = (srcs.length == 1) ? true: false;
+    int numOfErrors = 0;
     for(int i=0; i<srcs.length; i++) {
-      ls(srcs[i].getPath(), srcFs, recursive, printHeader);
+      numOfErrors += ls(srcs[i].getPath(), srcFs, recursive, printHeader);
     }
+    return numOfErrors == 0 ? 0 : -1;
   }
 
   /* list all files under the directory <i>src</i>
    * ideally we should provide "-l" option, that lists like "ls -l".
    */
-  private void ls(Path src, FileSystem srcFs, boolean recursive, boolean printHeader) throws
IOException {
-    FileStatus items[] = srcFs.listStatus(src);
-    if ((items == null) || ((items.length == 0) 
-        && (!srcFs.exists(src)))) {
-      throw new FileNotFoundException(src + ": No such file or directory.");
+  private int ls(Path src, FileSystem srcFs, boolean recursive,
+      boolean printHeader) throws IOException {
+    final String cmd = recursive? "lsr": "ls";
+    final FileStatus[] items = shellListStatus(cmd, srcFs, src);
+    if (items == null) {
+      return 1;
     } else {
+      int numOfErrors = 0;
       if (!recursive && printHeader) {
         if (items.length != 0) {
           System.out.println("Found " + items.length + " items");
@@ -631,9 +635,10 @@
         System.out.print(mdate + " ");
         System.out.println(cur.toUri().getPath());
         if (recursive && stat.isDir()) {
-          ls(cur,srcFs, recursive, printHeader);
+          numOfErrors += ls(cur,srcFs, recursive, printHeader);
         }
       }
+      return numOfErrors;
     }
   }
 
@@ -1134,19 +1139,19 @@
     public abstract void run(FileStatus file, FileSystem fs) throws IOException;
   }
   
-  ///helper for runCmdHandler*() returns listStatus()
-  private static FileStatus[] cmdHandlerListStatus(CmdHandler handler, 
+  /** helper returns listStatus() */
+  private static FileStatus[] shellListStatus(String cmd, 
                                                    FileSystem srcFs,
                                                    Path path) {
     try {
       FileStatus[] files = srcFs.listStatus(path);
       if ( files == null ) {
-        System.err.println(handler.getName() + 
+        System.err.println(cmd + 
                            ": could not get listing for '" + path + "'");
       }
       return files;
     } catch (IOException e) {
-      System.err.println(handler.getName() + 
+      System.err.println(cmd + 
                          ": could not get get listing for '" + path + "' : " +
                          e.getMessage().split("\n")[0]);
     }
@@ -1164,7 +1169,7 @@
     int errors = 0;
     handler.run(stat, srcFs);
     if (recursive && stat.isDir() && handler.okToContinue()) {
-      FileStatus[] files = cmdHandlerListStatus(handler, srcFs, 
+      FileStatus[] files = shellListStatus(handler.getName(), srcFs, 
                                                 stat.getPath());
       if (files == null) {
         return 1;
@@ -1525,9 +1530,9 @@
         } else if (Count.matches(cmd)) {
           new Count(argv, i, getConf()).runAll();
         } else if ("-ls".equals(cmd)) {
-          ls(argv[i], false);
+          exitCode = ls(argv[i], false);
         } else if ("-lsr".equals(cmd)) {
-          ls(argv[i], true);
+          exitCode = ls(argv[i], true);
         } else if ("-touchz".equals(cmd)) {
           touchz(argv[i]);
         } else if ("-text".equals(cmd)) {
@@ -1742,13 +1747,13 @@
         if (i < argv.length) {
           exitCode = doall(cmd, argv, i);
         } else {
-          ls(Path.CUR_DIR, false);
+          exitCode = ls(Path.CUR_DIR, false);
         } 
       } else if ("-lsr".equals(cmd)) {
         if (i < argv.length) {
           exitCode = doall(cmd, argv, i);
         } else {
-          ls(Path.CUR_DIR, true);
+          exitCode = ls(Path.CUR_DIR, true);
         } 
       } else if ("-mv".equals(cmd)) {
         exitCode = rename(argv, getConf());

Modified: hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestDFSShell.java?rev=712288&r1=712287&r2=712288&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestDFSShell.java (original)
+++ hadoop/core/branches/branch-0.19/src/test/org/apache/hadoop/hdfs/TestDFSShell.java Fri
Nov  7 14:02:29 2008
@@ -17,21 +17,36 @@
  */
 package org.apache.hadoop.hdfs;
 
-import junit.framework.TestCase;
-import java.io.*;
-import java.security.*;
-import java.util.*;
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.security.Permission;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Random;
+import java.util.Scanner;
 import java.util.zip.GZIPOutputStream;
 
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FSInputChecker;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.fs.shell.*;
+import org.apache.hadoop.fs.shell.Count;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.FSDataset;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -1221,4 +1236,52 @@
       cluster.shutdown();
     }
   }
+
+  public void testLsr() throws Exception {
+    Configuration conf = new Configuration();
+    MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
+    DistributedFileSystem dfs = (DistributedFileSystem)cluster.getFileSystem();
+
+    try {
+      final String root = createTree(dfs, "lsr");
+      dfs.mkdirs(new Path(root, "zzz"));
+      
+      runLsr(new FsShell(conf), root, 0);
+      
+      final Path sub = new Path(root, "sub");
+      dfs.setPermission(sub, new FsPermission((short)0));
+
+      final UserGroupInformation ugi = UserGroupInformation.getCurrentUGI();
+      final String tmpusername = ugi.getUserName() + "1";
+      UnixUserGroupInformation tmpUGI = new UnixUserGroupInformation(
+          tmpusername, new String[] {tmpusername});
+      UnixUserGroupInformation.saveToConf(conf,
+            UnixUserGroupInformation.UGI_PROPERTY_NAME, tmpUGI);
+      String results = runLsr(new FsShell(conf), root, -1);
+      assertTrue(results.contains("zzz"));
+    } finally {
+      cluster.shutdown();
+    }
+  }
+  private static String runLsr(final FsShell shell, String root, int returnvalue
+      ) throws Exception {
+    System.out.println("root=" + root + ", returnvalue=" + returnvalue);
+    final ByteArrayOutputStream bytes = new ByteArrayOutputStream(); 
+    final PrintStream out = new PrintStream(bytes);
+    final PrintStream oldOut = System.out;
+    final PrintStream oldErr = System.err;
+    System.setOut(out);
+    System.setErr(out);
+    final String results;
+    try {
+      assertEquals(returnvalue, shell.run(new String[]{"-lsr", root}));
+      results = bytes.toString();
+    } finally {
+      IOUtils.closeStream(out);
+      System.setOut(oldOut);
+      System.setErr(oldErr);
+    }
+    System.out.println("results:\n" + results);
+    return results;
+  }
 }



Mime
View raw message