hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cmcc...@apache.org
Subject svn commit: r1619012 [6/14] - in /hadoop/common/branches/HADOOP-10388/hadoop-common-project: ./ hadoop-auth/ hadoop-auth/dev-support/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apache/hadoo...
Date Tue, 19 Aug 2014 23:50:11 GMT
Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java Tue Aug 19 23:49:39 2014
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.LinkedList;
 
+import org.apache.commons.lang.WordUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -31,6 +32,7 @@ import org.apache.hadoop.conf.Configured
 import org.apache.hadoop.fs.shell.Command;
 import org.apache.hadoop.fs.shell.CommandFactory;
 import org.apache.hadoop.fs.shell.FsCommand;
+import org.apache.hadoop.tools.TableListing;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -40,6 +42,8 @@ public class FsShell extends Configured 
   
   static final Log LOG = LogFactory.getLog(FsShell.class);
 
+  private static final int MAX_LINE_WIDTH = 80;
+
   private FileSystem fs;
   private Trash trash;
   protected CommandFactory commandFactory;
@@ -117,7 +121,7 @@ public class FsShell extends Configured 
     public static final String NAME = "usage";
     public static final String USAGE = "[cmd ...]";
     public static final String DESCRIPTION =
-      "Displays the usage for given command or all commands if none\n" +
+      "Displays the usage for given command or all commands if none " +
       "is specified.";
     
     @Override
@@ -137,7 +141,7 @@ public class FsShell extends Configured 
     public static final String NAME = "help";
     public static final String USAGE = "[cmd ...]";
     public static final String DESCRIPTION =
-      "Displays help for given command or all commands if none\n" +
+      "Displays help for given command or all commands if none " +
       "is specified.";
     
     @Override
@@ -197,7 +201,7 @@ public class FsShell extends Configured 
       for (String name : commandFactory.getNames()) {
         Command instance = commandFactory.getInstance(name);
         if (!instance.isDeprecated()) {
-          System.out.println("\t[" + instance.getUsage() + "]");
+          out.println("\t[" + instance.getUsage() + "]");
           instances.add(instance);
         }
       }
@@ -217,20 +221,48 @@ public class FsShell extends Configured 
     out.println(usagePrefix + " " + instance.getUsage());
   }
 
-  // TODO: will eventually auto-wrap the text, but this matches the expected
-  // output for the hdfs tests...
   private void printInstanceHelp(PrintStream out, Command instance) {
-    boolean firstLine = true;
+    out.println(instance.getUsage() + " :");
+    TableListing listing = null;
+    final String prefix = "  ";
     for (String line : instance.getDescription().split("\n")) {
-      String prefix;
-      if (firstLine) {
-        prefix = instance.getUsage() + ":\t";
-        firstLine = false;
-      } else {
-        prefix = "\t\t";
+      if (line.matches("^[ \t]*[-<].*$")) {
+        String[] segments = line.split(":");
+        if (segments.length == 2) {
+          if (listing == null) {
+            listing = createOptionTableListing();
+          }
+          listing.addRow(segments[0].trim(), segments[1].trim());
+          continue;
+        }
+      }
+
+      // Normal literal description.
+      if (listing != null) {
+        for (String listingLine : listing.toString().split("\n")) {
+          out.println(prefix + listingLine);
+        }
+        listing = null;
+      }
+
+      for (String descLine : WordUtils.wrap(
+          line, MAX_LINE_WIDTH, "\n", true).split("\n")) {
+        out.println(prefix + descLine);
+      }
+    }
+
+    if (listing != null) {
+      for (String listingLine : listing.toString().split("\n")) {
+        out.println(prefix + listingLine);
       }
-      System.out.println(prefix + line);
-    }    
+    }
+  }
+
+  // Creates a two-row table, the first row is for the command line option,
+  // the second row is for the option description.
+  private TableListing createOptionTableListing() {
+    return new TableListing.Builder().addField("").addField("", true)
+        .wrapWidth(MAX_LINE_WIDTH).build();
   }
 
   /**

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java Tue Aug 19 23:49:39 2014
@@ -63,18 +63,18 @@ public class FsShellPermissions extends 
     public static final String NAME = "chmod";
     public static final String USAGE = "[-R] <MODE[,MODE]... | OCTALMODE> PATH...";
     public static final String DESCRIPTION =
-      "Changes permissions of a file.\n" +
-      "\tThis works similar to shell's chmod with a few exceptions.\n\n" +
-      "-R\tmodifies the files recursively. This is the only option\n" +
-      "\tcurrently supported.\n\n" +
-      "MODE\tMode is same as mode used for chmod shell command.\n" +
-      "\tOnly letters recognized are 'rwxXt'. E.g. +t,a+r,g-w,+rwx,o=r\n\n" +
-      "OCTALMODE Mode specifed in 3 or 4 digits. If 4 digits, the first may\n" +
-      "be 1 or 0 to turn the sticky bit on or off, respectively.  Unlike " +
-      "shell command, it is not possible to specify only part of the mode\n" +
-      "\tE.g. 754 is same as u=rwx,g=rx,o=r\n\n" +
-      "\tIf none of 'augo' is specified, 'a' is assumed and unlike\n" +
-      "\tshell command, no umask is applied.";
+      "Changes permissions of a file. " +
+      "This works similar to the shell's chmod command with a few exceptions.\n" +
+      "-R: modifies the files recursively. This is the only option" +
+      " currently supported.\n" +
+      "<MODE>: Mode is the same as mode used for the shell's command. " +
+      "The only letters recognized are 'rwxXt', e.g. +t,a+r,g-w,+rwx,o=r.\n" +
+      "<OCTALMODE>: Mode specifed in 3 or 4 digits. If 4 digits, the first " +
+      "may be 1 or 0 to turn the sticky bit on or off, respectively.  Unlike " +
+      "the shell command, it is not possible to specify only part of the " +
+      "mode, e.g. 754 is same as u=rwx,g=rx,o=r.\n\n" +
+      "If none of 'augo' is specified, 'a' is assumed and unlike the " +
+      "shell command, no umask is applied.";
 
     protected ChmodParser pp;
 
@@ -121,18 +121,18 @@ public class FsShellPermissions extends 
     public static final String NAME = "chown";
     public static final String USAGE = "[-R] [OWNER][:[GROUP]] PATH...";
     public static final String DESCRIPTION =
-      "Changes owner and group of a file.\n" +
-      "\tThis is similar to shell's chown with a few exceptions.\n\n" +
-      "\t-R\tmodifies the files recursively. This is the only option\n" +
-      "\tcurrently supported.\n\n" +
-      "\tIf only owner or group is specified then only owner or\n" +
-      "\tgroup is modified.\n\n" +
-      "\tThe owner and group names may only consist of digits, alphabet,\n"+
-      "\tand any of " + allowedChars + ". The names are case sensitive.\n\n" +
-      "\tWARNING: Avoid using '.' to separate user name and group though\n" +
-      "\tLinux allows it. If user names have dots in them and you are\n" +
-      "\tusing local file system, you might see surprising results since\n" +
-      "\tshell command 'chown' is used for local files.";
+      "Changes owner and group of a file. " +
+      "This is similar to the shell's chown command with a few exceptions.\n" +
+      "-R: modifies the files recursively. This is the only option " +
+      "currently supported.\n\n" +
+      "If only the owner or group is specified, then only the owner or " +
+      "group is modified. " +
+      "The owner and group names may only consist of digits, alphabet, "+
+      "and any of " + allowedChars + ". The names are case sensitive.\n\n" +
+      "WARNING: Avoid using '.' to separate user name and group though " +
+      "Linux allows it. If user names have dots in them and you are " +
+      "using local file system, you might see surprising results since " +
+      "the shell command 'chown' is used for local files.";
 
     ///allows only "allowedChars" above in names for owner and group
     static private final Pattern chownPattern = Pattern.compile(

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java Tue Aug 19 23:49:39 2014
@@ -687,7 +687,7 @@ public class HarFileSystem extends FileS
    * @return null since no checksum algorithm is implemented.
    */
   @Override
-  public FileChecksum getFileChecksum(Path f) {
+  public FileChecksum getFileChecksum(Path f, long length) {
     return null;
   }
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java Tue Aug 19 23:49:39 2014
@@ -22,10 +22,13 @@ import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.InputStreamReader;
+import java.io.StringReader;
 import java.util.Arrays;
 
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.Shell.ExitCodeException;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
 
 /**
  * Class for creating hardlinks.
@@ -89,7 +92,6 @@ public class HardLink { 
      *            to the source directory
      * @param linkDir - target directory where the hardlinks will be put
      * @return - an array of Strings suitable for use as a single shell command
-     *            with {@link Runtime.exec()}
      * @throws IOException - if any of the file or path names misbehave
      */
     abstract String[] linkMult(String[] fileBaseNames, File linkDir) 
@@ -230,17 +232,17 @@ public class HardLink { 
     //package-private ("default") access instead of "private" to assist 
     //unit testing (sort of) on non-Win servers
 
+    static String CMD_EXE = "cmd.exe";
     static String[] hardLinkCommand = {
                         Shell.WINUTILS,"hardlink","create", null, null};
     static String[] hardLinkMultPrefix = {
-                        "cmd","/q","/c","for", "%f", "in", "("};
+        CMD_EXE, "/q", "/c", "for", "%f", "in", "("};
     static String   hardLinkMultDir = "\\%f";
     static String[] hardLinkMultSuffix = {
-                        ")", "do", Shell.WINUTILS, "hardlink", "create", null,
-                        "%f", "1>NUL"};
+        ")", "do", Shell.WINUTILS, "hardlink", "create", null,
+        "%f"};
     static String[] getLinkCountCommand = {
-                        Shell.WINUTILS, "hardlink",
-                        "stat", null};
+        Shell.WINUTILS, "hardlink", "stat", null};
     //Windows guarantees only 8K - 1 bytes cmd length.
     //Subtract another 64b to allow for Java 'exec' overhead
     static final int maxAllowedCmdArgLength = 8*1024 - 65;
@@ -278,7 +280,7 @@ public class HardLink { 
       System.arraycopy(hardLinkMultSuffix, 0, buf, mark, 
                        hardLinkMultSuffix.length);
       mark += hardLinkMultSuffix.length;
-      buf[mark - 3] = td;
+      buf[mark - 2] = td;
       return buf;
     }
     
@@ -310,8 +312,8 @@ public class HardLink { 
                linkDir.getCanonicalPath().length();
       //add the fixed overhead of the hardLinkMult command 
       //(prefix, suffix, and Dir suffix)
-      sum += ("cmd.exe /q /c for %f in ( ) do "
-              + Shell.WINUTILS + " hardlink create \\%f %f 1>NUL ").length();
+      sum += (CMD_EXE + " /q /c for %f in ( ) do "
+              + Shell.WINUTILS + " hardlink create \\%f %f").length();
       return sum;
     }
     
@@ -379,21 +381,14 @@ public class HardLink { 
     }
 	  // construct and execute shell command
     String[] hardLinkCommand = getHardLinkCommand.linkOne(file, linkName);
-    Process process = Runtime.getRuntime().exec(hardLinkCommand);
+    ShellCommandExecutor shexec = new ShellCommandExecutor(hardLinkCommand);
     try {
-      if (process.waitFor() != 0) {
-        String errMsg = new BufferedReader(new InputStreamReader(
-            process.getInputStream())).readLine();
-        if (errMsg == null)  errMsg = "";
-        String inpMsg = new BufferedReader(new InputStreamReader(
-            process.getErrorStream())).readLine();
-        if (inpMsg == null)  inpMsg = "";
-        throw new IOException(errMsg + inpMsg);
-      }
-    } catch (InterruptedException e) {
-      throw new IOException(e);
-    } finally {
-      process.destroy();
+      shexec.execute();
+    } catch (ExitCodeException e) {
+      throw new IOException("Failed to execute command " +
+          Arrays.toString(hardLinkCommand) +
+          "; command output: \"" + shexec.getOutput() + "\"" +
+          "; WrappedException: \"" + e.getMessage() + "\"");
     }
   }
 
@@ -466,22 +461,12 @@ public class HardLink { 
     // construct and execute shell command
     String[] hardLinkCommand = getHardLinkCommand.linkMult(fileBaseNames, 
         linkDir);
-    Process process = Runtime.getRuntime().exec(hardLinkCommand, null, 
-        parentDir);
+    ShellCommandExecutor shexec = new ShellCommandExecutor(hardLinkCommand,
+      parentDir, null, 0L);
     try {
-      if (process.waitFor() != 0) {
-        String errMsg = new BufferedReader(new InputStreamReader(
-            process.getInputStream())).readLine();
-        if (errMsg == null)  errMsg = "";
-        String inpMsg = new BufferedReader(new InputStreamReader(
-            process.getErrorStream())).readLine();
-        if (inpMsg == null)  inpMsg = "";
-        throw new IOException(errMsg + inpMsg);
-      }
-    } catch (InterruptedException e) {
-      throw new IOException(e);
-    } finally {
-      process.destroy();
+      shexec.execute();
+    } catch (ExitCodeException e) {
+      throw new IOException(shexec.getOutput() + e.getMessage());
     }
     return callCount;
   }
@@ -504,17 +489,13 @@ public class HardLink { 
     String errMsg = null;
     int exitValue = -1;
     BufferedReader in = null;
-    BufferedReader err = null;
 
-    Process process = Runtime.getRuntime().exec(cmd);
+    ShellCommandExecutor shexec = new ShellCommandExecutor(cmd);
     try {
-      exitValue = process.waitFor();
-      in = new BufferedReader(new InputStreamReader(
-                                  process.getInputStream()));
+      shexec.execute();
+      in = new BufferedReader(new StringReader(shexec.getOutput()));
       inpMsg = in.readLine();
-      err = new BufferedReader(new InputStreamReader(
-                                   process.getErrorStream()));
-      errMsg = err.readLine();
+      exitValue = shexec.getExitCode();
       if (inpMsg == null || exitValue != 0) {
         throw createIOException(fileName, inpMsg, errMsg, exitValue, null);
       }
@@ -524,14 +505,15 @@ public class HardLink { 
       } else {
         return Integer.parseInt(inpMsg);
       }
-    } catch (NumberFormatException e) {
+    } catch (ExitCodeException e) {
+      inpMsg = shexec.getOutput();
+      errMsg = e.getMessage();
+      exitValue = e.getExitCode();
       throw createIOException(fileName, inpMsg, errMsg, exitValue, e);
-    } catch (InterruptedException e) {
+    } catch (NumberFormatException e) {
       throw createIOException(fileName, inpMsg, errMsg, exitValue, e);
     } finally {
-      process.destroy();
-      if (in != null) in.close();
-      if (err != null) err.close();
+      IOUtils.closeStream(in);
     }
   }
   

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PathIOException.java Tue Aug 19 23:49:39 2014
@@ -40,7 +40,7 @@ public class PathIOException extends IOE
    *  @param path for the exception
    */
   public PathIOException(String path) {
-    this(path, EIO, null);
+    this(path, EIO);
   }
 
   /**
@@ -59,7 +59,8 @@ public class PathIOException extends IOE
    * @param error custom string to use an the error text
    */
   public PathIOException(String path, String error) {
-    this(path, error, null);
+    super(error);
+    this.path = path;
   }
 
   protected PathIOException(String path, String error, Throwable cause) {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Tue Aug 19 23:49:39 2014
@@ -23,6 +23,7 @@ import com.google.common.annotations.Vis
 
 import java.io.BufferedOutputStream;
 import java.io.DataOutput;
+import java.io.EOFException;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -105,6 +106,10 @@ public class RawLocalFileSystem extends 
     
     @Override
     public void seek(long pos) throws IOException {
+      if (pos < 0) {
+        throw new EOFException(
+          FSExceptionMessages.NEGATIVE_SEEK);
+      }
       fis.getChannel().position(pos);
       this.position = pos;
     }
@@ -256,7 +261,7 @@ public class RawLocalFileSystem extends 
       boolean createParent, int bufferSize, short replication, long blockSize,
       Progressable progress) throws IOException {
     if (exists(f) && !overwrite) {
-      throw new IOException("File already exists: "+f);
+      throw new FileAlreadyExistsException("File already exists: " + f);
     }
     Path parent = f.getParent();
     if (parent != null && !mkdirs(parent)) {
@@ -272,7 +277,7 @@ public class RawLocalFileSystem extends 
       EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
       Progressable progress) throws IOException {
     if (exists(f) && !flags.contains(CreateFlag.OVERWRITE)) {
-      throw new IOException("File already exists: "+f);
+      throw new FileAlreadyExistsException("File already exists: " + f);
     }
     return new FSDataOutputStream(new BufferedOutputStream(
         new LocalFSFileOutputStream(f, false), bufferSize), statistics);
@@ -344,6 +349,10 @@ public class RawLocalFileSystem extends 
   @Override
   public boolean delete(Path p, boolean recursive) throws IOException {
     File f = pathToFile(p);
+    if (!f.exists()) {
+      //no path, return false "nothing to delete"
+      return false;
+    }
     if (f.isFile()) {
       return f.delete();
     } else if (!recursive && f.isDirectory() && 
@@ -412,10 +421,14 @@ public class RawLocalFileSystem extends 
     if(parent != null) {
       File parent2f = pathToFile(parent);
       if(parent2f != null && parent2f.exists() && !parent2f.isDirectory()) {
-        throw new FileAlreadyExistsException("Parent path is not a directory: " 
+        throw new ParentNotDirectoryException("Parent path is not a directory: "
             + parent);
       }
     }
+    if (p2f.exists() && !p2f.isDirectory()) {
+      throw new FileNotFoundException("Destination exists" +
+              " and is not a directory: " + p2f.getCanonicalPath());
+    }
     return (parent == null || mkdirs(parent)) &&
       (p2f.mkdir() || p2f.isDirectory());
   }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java Tue Aug 19 23:49:39 2014
@@ -128,6 +128,8 @@ public class Stat extends Shell {
           " link " + original);
     }
     // 6,symbolic link,6,1373584236,1373584236,lrwxrwxrwx,andrew,andrew,`link' -> `target'
+    // OR
+    // 6,symbolic link,6,1373584236,1373584236,lrwxrwxrwx,andrew,andrew,'link' -> 'target'
     StringTokenizer tokens = new StringTokenizer(line, ",");
     try {
       long length = Long.parseLong(tokens.nextToken());
@@ -147,18 +149,17 @@ public class Stat extends Shell {
       String group = tokens.nextToken();
       String symStr = tokens.nextToken();
       // 'notalink'
-      // 'link' -> `target'
+      // `link' -> `target' OR 'link' -> 'target'
       // '' -> ''
       Path symlink = null;
-      StringTokenizer symTokens = new StringTokenizer(symStr, "`");
-      symTokens.nextToken();
+      String parts[] = symStr.split(" -> ");      
       try {
-        String target = symTokens.nextToken();
-        target = target.substring(0, target.length()-1);
+        String target = parts[1];
+        target = target.substring(1, target.length()-1);
         if (!target.isEmpty()) {
           symlink = new Path(target);
         }
-      } catch (NoSuchElementException e) {
+      } catch (ArrayIndexOutOfBoundsException e) {
         // null if not a symlink
       }
       // Set stat

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java Tue Aug 19 23:49:39 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.ftp;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.net.ConnectException;
 import java.net.URI;
 
 import org.apache.commons.logging.Log;
@@ -33,11 +34,14 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.ParentNotDirectoryException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.Progressable;
 
 /**
@@ -56,6 +60,12 @@ public class FTPFileSystem extends FileS
   public static final int DEFAULT_BUFFER_SIZE = 1024 * 1024;
 
   public static final int DEFAULT_BLOCK_SIZE = 4 * 1024;
+  public static final String FS_FTP_USER_PREFIX = "fs.ftp.user.";
+  public static final String FS_FTP_HOST = "fs.ftp.host";
+  public static final String FS_FTP_HOST_PORT = "fs.ftp.host.port";
+  public static final String FS_FTP_PASSWORD_PREFIX = "fs.ftp.password.";
+  public static final String E_SAME_DIRECTORY_ONLY =
+      "only same directory renames are supported";
 
   private URI uri;
 
@@ -75,11 +85,11 @@ public class FTPFileSystem extends FileS
     super.initialize(uri, conf);
     // get host information from uri (overrides info in conf)
     String host = uri.getHost();
-    host = (host == null) ? conf.get("fs.ftp.host", null) : host;
+    host = (host == null) ? conf.get(FS_FTP_HOST, null) : host;
     if (host == null) {
       throw new IOException("Invalid host specified");
     }
-    conf.set("fs.ftp.host", host);
+    conf.set(FS_FTP_HOST, host);
 
     // get port information from uri, (overrides info in conf)
     int port = uri.getPort();
@@ -96,11 +106,11 @@ public class FTPFileSystem extends FileS
       }
     }
     String[] userPasswdInfo = userAndPassword.split(":");
-    conf.set("fs.ftp.user." + host, userPasswdInfo[0]);
+    conf.set(FS_FTP_USER_PREFIX + host, userPasswdInfo[0]);
     if (userPasswdInfo.length > 1) {
-      conf.set("fs.ftp.password." + host, userPasswdInfo[1]);
+      conf.set(FS_FTP_PASSWORD_PREFIX + host, userPasswdInfo[1]);
     } else {
-      conf.set("fs.ftp.password." + host, null);
+      conf.set(FS_FTP_PASSWORD_PREFIX + host, null);
     }
     setConf(conf);
     this.uri = uri;
@@ -115,23 +125,24 @@ public class FTPFileSystem extends FileS
   private FTPClient connect() throws IOException {
     FTPClient client = null;
     Configuration conf = getConf();
-    String host = conf.get("fs.ftp.host");
-    int port = conf.getInt("fs.ftp.host.port", FTP.DEFAULT_PORT);
-    String user = conf.get("fs.ftp.user." + host);
-    String password = conf.get("fs.ftp.password." + host);
+    String host = conf.get(FS_FTP_HOST);
+    int port = conf.getInt(FS_FTP_HOST_PORT, FTP.DEFAULT_PORT);
+    String user = conf.get(FS_FTP_USER_PREFIX + host);
+    String password = conf.get(FS_FTP_PASSWORD_PREFIX + host);
     client = new FTPClient();
     client.connect(host, port);
     int reply = client.getReplyCode();
     if (!FTPReply.isPositiveCompletion(reply)) {
-      throw new IOException("Server - " + host
-          + " refused connection on port - " + port);
+      throw NetUtils.wrapException(host, port,
+                   NetUtils.UNKNOWN_HOST, 0,
+                   new ConnectException("Server response " + reply));
     } else if (client.login(user, password)) {
       client.setFileTransferMode(FTP.BLOCK_TRANSFER_MODE);
       client.setFileType(FTP.BINARY_FILE_TYPE);
       client.setBufferSize(DEFAULT_BUFFER_SIZE);
     } else {
       throw new IOException("Login failed on server - " + host + ", port - "
-          + port);
+          + port + " as user '" + user + "'");
     }
 
     return client;
@@ -179,7 +190,7 @@ public class FTPFileSystem extends FileS
     FileStatus fileStat = getFileStatus(client, absolute);
     if (fileStat.isDirectory()) {
       disconnect(client);
-      throw new IOException("Path " + file + " is a directory.");
+      throw new FileNotFoundException("Path " + file + " is a directory.");
     }
     client.allocate(bufferSize);
     Path parent = absolute.getParent();
@@ -214,12 +225,18 @@ public class FTPFileSystem extends FileS
     final FTPClient client = connect();
     Path workDir = new Path(client.printWorkingDirectory());
     Path absolute = makeAbsolute(workDir, file);
-    if (exists(client, file)) {
-      if (overwrite) {
-        delete(client, file);
+    FileStatus status;
+    try {
+      status = getFileStatus(client, file);
+    } catch (FileNotFoundException fnfe) {
+      status = null;
+    }
+    if (status != null) {
+      if (overwrite && !status.isDirectory()) {
+        delete(client, file, false);
       } else {
         disconnect(client);
-        throw new IOException("File already exists: " + file);
+        throw new FileAlreadyExistsException("File already exists: " + file);
       }
     }
     
@@ -272,14 +289,13 @@ public class FTPFileSystem extends FileS
    * Convenience method, so that we don't open a new connection when using this
    * method from within another method. Otherwise every API invocation incurs
    * the overhead of opening/closing a TCP connection.
+   * @throws IOException on IO problems other than FileNotFoundException
    */
-  private boolean exists(FTPClient client, Path file) {
+  private boolean exists(FTPClient client, Path file) throws IOException {
     try {
       return getFileStatus(client, file) != null;
     } catch (FileNotFoundException fnfe) {
       return false;
-    } catch (IOException ioe) {
-      throw new FTPException("Failed to get file status", ioe);
     }
   }
 
@@ -294,12 +310,6 @@ public class FTPFileSystem extends FileS
     }
   }
 
-  /** @deprecated Use delete(Path, boolean) instead */
-  @Deprecated
-  private boolean delete(FTPClient client, Path file) throws IOException {
-    return delete(client, file, false);
-  }
-
   /**
    * Convenience method, so that we don't open a new connection when using this
    * method from within another method. Otherwise every API invocation incurs
@@ -310,9 +320,14 @@ public class FTPFileSystem extends FileS
     Path workDir = new Path(client.printWorkingDirectory());
     Path absolute = makeAbsolute(workDir, file);
     String pathName = absolute.toUri().getPath();
-    FileStatus fileStat = getFileStatus(client, absolute);
-    if (fileStat.isFile()) {
-      return client.deleteFile(pathName);
+    try {
+      FileStatus fileStat = getFileStatus(client, absolute);
+      if (fileStat.isFile()) {
+        return client.deleteFile(pathName);
+      }
+    } catch (FileNotFoundException e) {
+      //the file is not there
+      return false;
     }
     FileStatus[] dirEntries = listStatus(client, absolute);
     if (dirEntries != null && dirEntries.length > 0 && !(recursive)) {
@@ -491,7 +506,7 @@ public class FTPFileSystem extends FileS
         created = created && client.makeDirectory(pathName);
       }
     } else if (isFile(client, absolute)) {
-      throw new IOException(String.format(
+      throw new ParentNotDirectoryException(String.format(
           "Can't make directory for path %s since it is a file.", absolute));
     }
     return created;
@@ -528,6 +543,23 @@ public class FTPFileSystem extends FileS
   }
 
   /**
+   * Probe for a path being a parent of another
+   * @param parent parent path
+   * @param child possible child path
+   * @return true if the parent's path matches the start of the child's
+   */
+  private boolean isParentOf(Path parent, Path child) {
+    URI parentURI = parent.toUri();
+    String parentPath = parentURI.getPath();
+    if (!parentPath.endsWith("/")) {
+      parentPath += "/";
+    }
+    URI childURI = child.toUri();
+    String childPath = childURI.getPath();
+    return childPath.startsWith(parentPath);
+  }
+
+  /**
    * Convenience method, so that we don't open a new connection when using this
    * method from within another method. Otherwise every API invocation incurs
    * the overhead of opening/closing a TCP connection.
@@ -544,20 +576,31 @@ public class FTPFileSystem extends FileS
     Path absoluteSrc = makeAbsolute(workDir, src);
     Path absoluteDst = makeAbsolute(workDir, dst);
     if (!exists(client, absoluteSrc)) {
-      throw new IOException("Source path " + src + " does not exist");
+      throw new FileNotFoundException("Source path " + src + " does not exist");
+    }
+    if (isDirectory(absoluteDst)) {
+      // destination is a directory: rename goes underneath it with the
+      // source name
+      absoluteDst = new Path(absoluteDst, absoluteSrc.getName());
     }
     if (exists(client, absoluteDst)) {
-      throw new IOException("Destination path " + dst
-          + " already exist, cannot rename!");
+      throw new FileAlreadyExistsException("Destination path " + dst
+          + " already exists");
     }
     String parentSrc = absoluteSrc.getParent().toUri().toString();
     String parentDst = absoluteDst.getParent().toUri().toString();
-    String from = src.getName();
-    String to = dst.getName();
+    if (isParentOf(absoluteSrc, absoluteDst)) {
+      throw new IOException("Cannot rename " + absoluteSrc + " under itself"
+      + " : "+ absoluteDst);
+    }
+
     if (!parentSrc.equals(parentDst)) {
-      throw new IOException("Cannot rename parent(source): " + parentSrc
-          + ", parent(destination):  " + parentDst);
+      throw new IOException("Cannot rename source: " + absoluteSrc
+          + " to " + absoluteDst
+          + " -"+ E_SAME_DIRECTORY_ONLY);
     }
+    String from = absoluteSrc.getName();
+    String to = absoluteDst.getName();
     client.changeWorkingDirectory(parentSrc);
     boolean renamed = client.rename(from, to);
     return renamed;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPInputStream.java Tue Aug 19 23:49:39 2014
@@ -103,7 +103,7 @@ public class FTPInputStream extends FSIn
   @Override
   public synchronized void close() throws IOException {
     if (closed) {
-      throw new IOException("Stream closed");
+      return;
     }
     super.close();
     closed = true;

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java Tue Aug 19 23:49:39 2014
@@ -278,7 +278,7 @@ public class AclEntry {
     }
 
     if (includePermission) {
-      if (split.length < index) {
+      if (split.length <= index) {
         throw new HadoopIllegalArgumentException("Invalid <aclSpec> : "
             + aclStr);
       }
@@ -298,4 +298,18 @@ public class AclEntry {
     AclEntry aclEntry = builder.build();
     return aclEntry;
   }
+
+  /**
+   * Convert a List of AclEntries into a string - the reverse of parseAclSpec.
+   * @param aclSpec List of AclEntries to convert
+   * @return String representation of aclSpec
+   */
+  public static String aclSpecToString(List<AclEntry> aclSpec) {
+    StringBuilder buf = new StringBuilder();
+    for ( AclEntry e : aclSpec ) {
+      buf.append(e.toString());
+      buf.append(",");
+    }
+    return buf.substring(0, buf.length()-1);  // remove last ,
+  }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java Tue Aug 19 23:49:39 2014
@@ -158,6 +158,17 @@ public class FsPermission implements Wri
     return (short)s;
   }
 
+  /**
+   * Encodes the object to a short.  Unlike {@link #toShort()}, this method may
+   * return values outside the fixed range 00000 - 01777 if extended features
+   * are encoded into this permission, such as the ACL bit.
+   *
+   * @return short extended short representation of this permission
+   */
+  public short toExtendedShort() {
+    return toShort();
+  }
+
   @Override
   public boolean equals(Object obj) {
     if (obj instanceof FsPermission) {
@@ -273,6 +284,16 @@ public class FsPermission implements Wri
     return stickyBit;
   }
 
+  /**
+   * Returns true if there is also an ACL (access control list).
+   *
+   * @return boolean true if there is also an ACL (access control list).
+   */
+  public boolean getAclBit() {
+    // File system subclasses that support the ACL bit would override this.
+    return false;
+  }
+
   /** Set the user file creation mask (umask) */
   public static void setUMask(Configuration conf, FsPermission umask) {
     conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort()));

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/S3FileSystem.java Tue Aug 19 23:49:39 2014
@@ -32,6 +32,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -226,7 +227,7 @@ public class S3FileSystem extends FileSy
       if (overwrite) {
         delete(file, true);
       } else {
-        throw new IOException("File already exists: " + file);
+        throw new FileAlreadyExistsException("File already exists: " + file);
       }
     } else {
       Path parent = file.getParent();

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java Tue Aug 19 23:49:39 2014
@@ -22,6 +22,7 @@ import static org.apache.hadoop.fs.s3nat
 
 import java.io.BufferedInputStream;
 import java.io.ByteArrayInputStream;
+import java.io.EOFException;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -32,17 +33,19 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSExceptionMessages;
 import org.apache.hadoop.fs.s3.S3Credentials;
 import org.apache.hadoop.fs.s3.S3Exception;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.security.AccessControlException;
 import org.jets3t.service.S3Service;
 import org.jets3t.service.S3ServiceException;
 import org.jets3t.service.ServiceException;
 import org.jets3t.service.StorageObjectsChunk;
+import org.jets3t.service.impl.rest.HttpException;
 import org.jets3t.service.impl.rest.httpclient.RestS3Service;
 import org.jets3t.service.model.MultipartPart;
 import org.jets3t.service.model.MultipartUpload;
@@ -51,6 +54,8 @@ import org.jets3t.service.model.S3Object
 import org.jets3t.service.model.StorageObject;
 import org.jets3t.service.security.AWSCredentials;
 import org.jets3t.service.utils.MultipartUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
@@ -63,9 +68,11 @@ class Jets3tNativeFileSystemStore implem
   private boolean multipartEnabled;
   private long multipartCopyBlockSize;
   static final long MAX_PART_SIZE = (long)5 * 1024 * 1024 * 1024;
+
+  private String serverSideEncryptionAlgorithm;
   
-  public static final Log LOG =
-      LogFactory.getLog(Jets3tNativeFileSystemStore.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(Jets3tNativeFileSystemStore.class);
 
   @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
@@ -77,7 +84,7 @@ class Jets3tNativeFileSystemStore implem
             s3Credentials.getSecretAccessKey());
       this.s3Service = new RestS3Service(awsCredentials);
     } catch (S3ServiceException e) {
-      handleS3ServiceException(e);
+      handleException(e);
     }
     multipartEnabled =
         conf.getBoolean("fs.s3n.multipart.uploads.enabled", false);
@@ -87,6 +94,7 @@ class Jets3tNativeFileSystemStore implem
     multipartCopyBlockSize = Math.min(
         conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE),
         MAX_PART_SIZE);
+    serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm");
 
     bucket = new S3Bucket(uri.getHost());
   }
@@ -107,20 +115,15 @@ class Jets3tNativeFileSystemStore implem
       object.setDataInputStream(in);
       object.setContentType("binary/octet-stream");
       object.setContentLength(file.length());
+      object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
       if (md5Hash != null) {
         object.setMd5Hash(md5Hash);
       }
       s3Service.putObject(bucket, object);
-    } catch (S3ServiceException e) {
-      handleS3ServiceException(e);
+    } catch (ServiceException e) {
+      handleException(e, key);
     } finally {
-      if (in != null) {
-        try {
-          in.close();
-        } catch (IOException e) {
-          // ignore
-        }
-      }
+      IOUtils.closeStream(in);
     }
   }
 
@@ -130,6 +133,7 @@ class Jets3tNativeFileSystemStore implem
     object.setDataInputFile(file);
     object.setContentType("binary/octet-stream");
     object.setContentLength(file.length());
+    object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
     if (md5Hash != null) {
       object.setMd5Hash(md5Hash);
     }
@@ -142,10 +146,8 @@ class Jets3tNativeFileSystemStore implem
     try {
       mpUtils.uploadObjects(bucket.getName(), s3Service,
                             objectsToUploadAsMultipart, null);
-    } catch (ServiceException e) {
-      handleServiceException(e);
     } catch (Exception e) {
-      throw new S3Exception(e);
+      handleException(e, key);
     }
   }
   
@@ -156,9 +158,10 @@ class Jets3tNativeFileSystemStore implem
       object.setDataInputStream(new ByteArrayInputStream(new byte[0]));
       object.setContentType("binary/octet-stream");
       object.setContentLength(0);
+      object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
       s3Service.putObject(bucket, object);
-    } catch (S3ServiceException e) {
-      handleS3ServiceException(e);
+    } catch (ServiceException e) {
+      handleException(e, key);
     }
   }
 
@@ -166,20 +169,21 @@ class Jets3tNativeFileSystemStore implem
   public FileMetadata retrieveMetadata(String key) throws IOException {
     StorageObject object = null;
     try {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Getting metadata for key: " + key + " from bucket:" + bucket.getName());
-      }
+      LOG.debug("Getting metadata for key: {} from bucket: {}",
+          key, bucket.getName());
       object = s3Service.getObjectDetails(bucket.getName(), key);
       return new FileMetadata(key, object.getContentLength(),
           object.getLastModifiedDate().getTime());
 
     } catch (ServiceException e) {
-      // Following is brittle. Is there a better way?
-      if ("NoSuchKey".equals(e.getErrorCode())) {
-        return null; //return null if key not found
+      try {
+        // process
+        handleException(e, key);
+        return null;
+      } catch (FileNotFoundException fnfe) {
+        // and downgrade missing files
+        return null;
       }
-      handleServiceException(e);
-      return null; //never returned - keep compiler happy
     } finally {
       if (object != null) {
         object.closeDataInputStream();
@@ -198,13 +202,12 @@ class Jets3tNativeFileSystemStore implem
   @Override
   public InputStream retrieve(String key) throws IOException {
     try {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Getting key: " + key + " from bucket:" + bucket.getName());
-      }
+      LOG.debug("Getting key: {} from bucket: {}",
+          key, bucket.getName());
       S3Object object = s3Service.getObject(bucket.getName(), key);
       return object.getDataInputStream();
     } catch (ServiceException e) {
-      handleServiceException(key, e);
+      handleException(e, key);
       return null; //return null if key not found
     }
   }
@@ -222,15 +225,14 @@ class Jets3tNativeFileSystemStore implem
   public InputStream retrieve(String key, long byteRangeStart)
           throws IOException {
     try {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Getting key: " + key + " from bucket:" + bucket.getName() + " with byteRangeStart: " + byteRangeStart);
-      }
+      LOG.debug("Getting key: {} from bucket: {} with byteRangeStart: {}",
+          key, bucket.getName(), byteRangeStart);
       S3Object object = s3Service.getObject(bucket, key, null, null, null,
                                             null, byteRangeStart, null);
       return object.getDataInputStream();
     } catch (ServiceException e) {
-      handleServiceException(key, e);
-      return null; //return null if key not found
+      handleException(e, key);
+      return null;
     }
   }
 
@@ -248,17 +250,19 @@ class Jets3tNativeFileSystemStore implem
   }
 
   /**
-   *
-   * @return
-   * This method returns null if the list could not be populated
-   * due to S3 giving ServiceException
-   * @throws IOException
+   * list objects
+   * @param prefix prefix
+   * @param delimiter delimiter
+   * @param maxListingLength max no. of entries
+   * @param priorLastKey last key in any previous search
+   * @return a list of matches
+   * @throws IOException on any reported failure
    */
 
   private PartialListing list(String prefix, String delimiter,
       int maxListingLength, String priorLastKey) throws IOException {
     try {
-      if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) {
+      if (!prefix.isEmpty() && !prefix.endsWith(PATH_DELIMITER)) {
         prefix += PATH_DELIMITER;
       }
       StorageObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(),
@@ -273,24 +277,20 @@ class Jets3tNativeFileSystemStore implem
       }
       return new PartialListing(chunk.getPriorLastKey(), fileMetadata,
           chunk.getCommonPrefixes());
-    } catch (S3ServiceException e) {
-      handleS3ServiceException(e);
-      return null; //never returned - keep compiler happy
     } catch (ServiceException e) {
-      handleServiceException(e);
-      return null; //return null if list could not be populated
+      handleException(e, prefix);
+      return null; // never returned - keep compiler happy
     }
   }
 
   @Override
   public void delete(String key) throws IOException {
     try {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Deleting key:" + key + "from bucket" + bucket.getName());
-      }
+      LOG.debug("Deleting key: {} from bucket: {}",
+          key, bucket.getName());
       s3Service.deleteObject(bucket, key);
     } catch (ServiceException e) {
-      handleServiceException(key, e);
+      handleException(e, key);
     }
   }
 
@@ -298,7 +298,7 @@ class Jets3tNativeFileSystemStore implem
     try {
       s3Service.renameObject(bucket.getName(), srcKey, new S3Object(dstKey));
     } catch (ServiceException e) {
-      handleServiceException(e);
+      handleException(e, srcKey);
     }
   }
   
@@ -317,10 +317,13 @@ class Jets3tNativeFileSystemStore implem
           return;
         }
       }
+
+      S3Object dstObject = new S3Object(dstKey);
+      dstObject.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
       s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
-          new S3Object(dstKey), false);
+          dstObject, false);
     } catch (ServiceException e) {
-      handleServiceException(srcKey, e);
+      handleException(e, srcKey);
     }
   }
 
@@ -355,19 +358,22 @@ class Jets3tNativeFileSystemStore implem
       Collections.reverse(listedParts);
       s3Service.multipartCompleteUpload(multipartUpload, listedParts);
     } catch (ServiceException e) {
-      handleServiceException(e);
+      handleException(e, srcObject.getKey());
     }
   }
 
   @Override
   public void purge(String prefix) throws IOException {
+    String key = "";
     try {
-      S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null);
+      S3Object[] objects =
+          s3Service.listObjects(bucket.getName(), prefix, null);
       for (S3Object object : objects) {
-        s3Service.deleteObject(bucket, object.getKey());
+        key = object.getKey();
+        s3Service.deleteObject(bucket, key);
       }
     } catch (S3ServiceException e) {
-      handleS3ServiceException(e);
+      handleException(e, key);
     }
   }
 
@@ -381,39 +387,97 @@ class Jets3tNativeFileSystemStore implem
         sb.append(object.getKey()).append("\n");
       }
     } catch (S3ServiceException e) {
-      handleS3ServiceException(e);
+      handleException(e);
     }
     System.out.println(sb);
   }
 
-  private void handleServiceException(String key, ServiceException e) throws IOException {
-    if ("NoSuchKey".equals(e.getErrorCode())) {
-      throw new FileNotFoundException("Key '" + key + "' does not exist in S3");
-    } else {
-      handleServiceException(e);
-    }
+  /**
+   * Handle any service exception by translating it into an IOException
+   * @param e exception
+   * @throws IOException exception -always
+   */
+  private void handleException(Exception e) throws IOException {
+    throw processException(e, e, "");
   }
+  /**
+   * Handle any service exception by translating it into an IOException
+   * @param e exception
+   * @param key key sought from object store
 
-  private void handleS3ServiceException(S3ServiceException e) throws IOException {
-    if (e.getCause() instanceof IOException) {
-      throw (IOException) e.getCause();
-    }
-    else {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("S3 Error code: " + e.getS3ErrorCode() + "; S3 Error message: " + e.getS3ErrorMessage());
-      }
-      throw new S3Exception(e);
-    }
+   * @throws IOException exception -always
+   */
+  private void handleException(Exception e, String key) throws IOException {
+    throw processException(e, e, key);
   }
 
-  private void handleServiceException(ServiceException e) throws IOException {
-    if (e.getCause() instanceof IOException) {
-      throw (IOException) e.getCause();
-    }
-    else {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Got ServiceException with Error code: " + e.getErrorCode() + ";and Error message: " + e.getErrorMessage());
-      }
+  /**
+   * Handle any service exception by translating it into an IOException
+   * @param thrown exception
+   * @param original original exception -thrown if no other translation could
+   * be made
+   * @param key key sought from object store or "" for undefined
+   * @return an exception to throw. If isProcessingCause==true this may be null.
+   */
+  private IOException processException(Throwable thrown, Throwable original,
+      String key) {
+    IOException result;
+    if (thrown.getCause() != null) {
+      // recurse down
+      result = processException(thrown.getCause(), original, key);
+    } else if (thrown instanceof HttpException) {
+      // nested HttpException - examine error code and react
+      HttpException httpException = (HttpException) thrown;
+      String responseMessage = httpException.getResponseMessage();
+      int responseCode = httpException.getResponseCode();
+      String bucketName = "s3n://" + bucket.getName();
+      String text = String.format("%s : %03d : %s",
+          bucketName,
+          responseCode,
+          responseMessage);
+      String filename = !key.isEmpty() ? (bucketName + "/" + key) : text;
+      IOException ioe;
+      switch (responseCode) {
+        case 404:
+          result = new FileNotFoundException(filename);
+          break;
+        case 416: // invalid range
+          result = new EOFException(FSExceptionMessages.CANNOT_SEEK_PAST_EOF
+                                    +": " + filename);
+          break;
+        case 403: //forbidden
+          result = new AccessControlException("Permission denied"
+                                    +": " + filename);
+          break;
+        default:
+          result = new IOException(text);
+      }
+      result.initCause(thrown);
+    } else if (thrown instanceof S3ServiceException) {
+      S3ServiceException se = (S3ServiceException) thrown;
+      LOG.debug(
+          "S3ServiceException: {}: {} : {}",
+          se.getS3ErrorCode(), se.getS3ErrorMessage(), se, se);
+      if ("InvalidRange".equals(se.getS3ErrorCode())) {
+        result = new EOFException(FSExceptionMessages.CANNOT_SEEK_PAST_EOF);
+      } else {
+        result = new S3Exception(se);
+      }
+    } else if (thrown instanceof ServiceException) {
+      ServiceException se = (ServiceException) thrown;
+      LOG.debug("S3ServiceException: {}: {} : {}",
+          se.getErrorCode(), se.toString(), se, se);
+      result = new S3Exception(se);
+    } else if (thrown instanceof IOException) {
+      result = (IOException) thrown;
+    } else {
+      // here there is no exception derived yet.
+      // this means no inner cause, and no translation made yet.
+      // convert the original to an IOException -rather than just the
+      // exception at the base of the tree
+      result = new S3Exception(original);
     }
+
+    return result;
   }
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java Tue Aug 19 23:49:39 2014
@@ -19,6 +19,7 @@
 package org.apache.hadoop.fs.s3native;
 
 import java.io.BufferedOutputStream;
+import java.io.EOFException;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
@@ -37,17 +38,19 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import com.google.common.base.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BufferedFSInputStream;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FSExceptionMessages;
 import org.apache.hadoop.fs.FSInputStream;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalDirAllocator;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.s3.S3Exception;
@@ -55,6 +58,8 @@ import org.apache.hadoop.io.retry.RetryP
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryProxy;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * <p>
@@ -81,8 +86,8 @@ import org.apache.hadoop.util.Progressab
 @InterfaceStability.Stable
 public class NativeS3FileSystem extends FileSystem {
   
-  public static final Log LOG = 
-    LogFactory.getLog(NativeS3FileSystem.class);
+  public static final Logger LOG =
+      LoggerFactory.getLogger(NativeS3FileSystem.class);
   
   private static final String FOLDER_SUFFIX = "_$folder$";
   static final String PATH_DELIMITER = Path.SEPARATOR;
@@ -97,6 +102,7 @@ public class NativeS3FileSystem extends 
     private long pos = 0;
     
     public NativeS3FsInputStream(NativeFileSystemStore store, Statistics statistics, InputStream in, String key) {
+      Preconditions.checkNotNull(in, "Null input stream");
       this.store = store;
       this.statistics = statistics;
       this.in = in;
@@ -105,13 +111,20 @@ public class NativeS3FileSystem extends 
     
     @Override
     public synchronized int read() throws IOException {
-      int result = -1;
+      int result;
       try {
         result = in.read();
       } catch (IOException e) {
-        LOG.info("Received IOException while reading '" + key + "', attempting to reopen.");
-        seek(pos);
-        result = in.read();
+        LOG.info("Received IOException while reading '{}', attempting to reopen",
+            key);
+        LOG.debug("{}", e, e);
+        try {
+          seek(pos);
+          result = in.read();
+        } catch (EOFException eof) {
+          LOG.debug("EOF on input stream read: {}", eof, eof);
+          result = -1;
+        }
       } 
       if (result != -1) {
         pos++;
@@ -124,12 +137,17 @@ public class NativeS3FileSystem extends 
     @Override
     public synchronized int read(byte[] b, int off, int len)
       throws IOException {
-      
+      if (in == null) {
+        throw new EOFException("Cannot read closed stream");
+      }
       int result = -1;
       try {
         result = in.read(b, off, len);
+      } catch (EOFException eof) {
+        throw eof;
       } catch (IOException e) {
-        LOG.info("Received IOException while reading '" + key + "', attempting to reopen.");
+        LOG.info( "Received IOException while reading '{}'," +
+                  " attempting to reopen.", key);
         seek(pos);
         result = in.read(b, off, len);
       }
@@ -143,17 +161,53 @@ public class NativeS3FileSystem extends 
     }
 
     @Override
-    public void close() throws IOException {
-      in.close();
+    public synchronized void close() throws IOException {
+      closeInnerStream();
+    }
+
+    /**
+     * Close the inner stream if not null. Even if an exception
+     * is raised during the close, the field is set to null
+     * @throws IOException if raised by the close() operation.
+     */
+    private void closeInnerStream() throws IOException {
+      if (in != null) {
+        try {
+          in.close();
+        } finally {
+          in = null;
+        }
+      }
+    }
+
+    /**
+     * Update inner stream with a new stream and position
+     * @param newStream new stream -must not be null
+     * @param newpos new position
+     * @throws IOException IO exception on a failure to close the existing
+     * stream.
+     */
+    private synchronized void updateInnerStream(InputStream newStream, long newpos) throws IOException {
+      Preconditions.checkNotNull(newStream, "Null newstream argument");
+      closeInnerStream();
+      in = newStream;
+      this.pos = newpos;
     }
 
     @Override
-    public synchronized void seek(long pos) throws IOException {
-      in.close();
-      LOG.info("Opening key '" + key + "' for reading at position '" + pos + "'");
-      in = store.retrieve(key, pos);
-      this.pos = pos;
+    public synchronized void seek(long newpos) throws IOException {
+      if (newpos < 0) {
+        throw new EOFException(
+            FSExceptionMessages.NEGATIVE_SEEK);
+      }
+      if (pos != newpos) {
+        // the seek is attempting to move the current position
+        LOG.debug("Opening key '{}' for reading at position '{}", key, newpos);
+        InputStream newStream = store.retrieve(key, newpos);
+        updateInnerStream(newStream, newpos);
+      }
     }
+
     @Override
     public synchronized long getPos() throws IOException {
       return pos;
@@ -172,6 +226,7 @@ public class NativeS3FileSystem extends 
     private OutputStream backupStream;
     private MessageDigest digest;
     private boolean closed;
+    private LocalDirAllocator lDirAlloc;
     
     public NativeS3FsOutputStream(Configuration conf,
         NativeFileSystemStore store, String key, Progressable progress,
@@ -193,11 +248,10 @@ public class NativeS3FileSystem extends 
     }
 
     private File newBackupFile() throws IOException {
-      File dir = new File(conf.get("fs.s3.buffer.dir"));
-      if (!dir.mkdirs() && !dir.exists()) {
-        throw new IOException("Cannot create S3 buffer directory: " + dir);
+      if (lDirAlloc == null) {
+        lDirAlloc = new LocalDirAllocator("fs.s3.buffer.dir");
       }
-      File result = File.createTempFile("output-", ".tmp", dir);
+      File result = lDirAlloc.createTmpFileForWrite("output-", LocalDirAllocator.SIZE_UNKNOWN, conf);
       result.deleteOnExit();
       return result;
     }
@@ -214,7 +268,7 @@ public class NativeS3FileSystem extends 
       }
 
       backupStream.close();
-      LOG.info("OutputStream for key '" + key + "' closed. Now beginning upload");
+      LOG.info("OutputStream for key '{}' closed. Now beginning upload", key);
       
       try {
         byte[] md5Hash = digest == null ? null : digest.digest();
@@ -226,7 +280,7 @@ public class NativeS3FileSystem extends 
         super.close();
         closed = true;
       } 
-      LOG.info("OutputStream for key '" + key + "' upload complete");
+      LOG.info("OutputStream for key '{}' upload complete", key);
     }
 
     @Override
@@ -339,7 +393,7 @@ public class NativeS3FileSystem extends 
       Progressable progress) throws IOException {
 
     if (exists(f) && !overwrite) {
-      throw new IOException("File already exists:"+f);
+      throw new FileAlreadyExistsException("File already exists: " + f);
     }
     
     if(LOG.isDebugEnabled()) {
@@ -367,7 +421,7 @@ public class NativeS3FileSystem extends 
     String key = pathToKey(absolutePath);
     if (status.isDirectory()) {
       if (!recurse && listStatus(f).length > 0) {
-        throw new IOException("Can not delete " + f + " at is a not empty directory and recurse option is false");
+        throw new IOException("Can not delete " + f + " as is a not empty directory and recurse option is false");
       }
 
       createParent(f);
@@ -538,7 +592,7 @@ public class NativeS3FileSystem extends 
     try {
       FileStatus fileStatus = getFileStatus(f);
       if (fileStatus.isFile()) {
-        throw new IOException(String.format(
+        throw new FileAlreadyExistsException(String.format(
             "Can't make directory for path '%s' since it is a file.", f));
 
       }
@@ -556,7 +610,7 @@ public class NativeS3FileSystem extends 
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
     FileStatus fs = getFileStatus(f); // will throw if the file doesn't exist
     if (fs.isDirectory()) {
-      throw new IOException("'" + f + "' is a directory");
+      throw new FileNotFoundException("'" + f + "' is a directory");
     }
     LOG.info("Opening '" + f + "' for reading");
     Path absolutePath = makeAbsolute(f);

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java Tue Aug 19 23:49:39 2014
@@ -18,7 +18,7 @@
 package org.apache.hadoop.fs.shell;
 
 import java.io.IOException;
-import java.util.Iterator;
+import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 
@@ -31,8 +31,10 @@ import org.apache.hadoop.fs.permission.A
 import org.apache.hadoop.fs.permission.AclEntryScope;
 import org.apache.hadoop.fs.permission.AclEntryType;
 import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.AclUtil;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.permission.ScopedAclEntries;
 
 /**
  * Acl related operations
@@ -57,8 +59,8 @@ class AclCommands extends FsCommand {
     public static String DESCRIPTION = "Displays the Access Control Lists"
         + " (ACLs) of files and directories. If a directory has a default ACL,"
         + " then getfacl also displays the default ACL.\n"
-        + "-R: List the ACLs of all files and directories recursively.\n"
-        + "<path>: File or directory to list.\n";
+        + "  -R: List the ACLs of all files and directories recursively.\n"
+        + "  <path>: File or directory to list.\n";
 
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
@@ -75,84 +77,43 @@ class AclCommands extends FsCommand {
 
     @Override
     protected void processPath(PathData item) throws IOException {
-      AclStatus aclStatus = item.fs.getAclStatus(item.path);
       out.println("# file: " + item);
-      out.println("# owner: " + aclStatus.getOwner());
-      out.println("# group: " + aclStatus.getGroup());
-      List<AclEntry> entries = aclStatus.getEntries();
-      if (aclStatus.isStickyBit()) {
-        String stickyFlag = "T";
-        for (AclEntry aclEntry : entries) {
-          if (aclEntry.getType() == AclEntryType.OTHER
-              && aclEntry.getScope() == AclEntryScope.ACCESS
-              && aclEntry.getPermission().implies(FsAction.EXECUTE)) {
-            stickyFlag = "t";
-            break;
-          }
-        }
-        out.println("# flags: --" + stickyFlag);
-      }
-
+      out.println("# owner: " + item.stat.getOwner());
+      out.println("# group: " + item.stat.getGroup());
       FsPermission perm = item.stat.getPermission();
-      if (entries.isEmpty()) {
-        printMinimalAcl(perm);
-      } else {
-        printExtendedAcl(perm, entries);
+      if (perm.getStickyBit()) {
+        out.println("# flags: --" +
+          (perm.getOtherAction().implies(FsAction.EXECUTE) ? "t" : "T"));
       }
 
+      List<AclEntry> entries = perm.getAclBit() ?
+        item.fs.getAclStatus(item.path).getEntries() :
+        Collections.<AclEntry>emptyList();
+      ScopedAclEntries scopedEntries = new ScopedAclEntries(
+        AclUtil.getAclFromPermAndEntries(perm, entries));
+      printAclEntriesForSingleScope(scopedEntries.getAccessEntries());
+      printAclEntriesForSingleScope(scopedEntries.getDefaultEntries());
       out.println();
     }
 
     /**
-     * Prints an extended ACL, including all extended ACL entries and also the
-     * base entries implied by the permission bits.
+     * Prints all the ACL entries in a single scope.
      *
-     * @param perm FsPermission of file
      * @param entries List<AclEntry> containing ACL entries of file
      */
-    private void printExtendedAcl(FsPermission perm, List<AclEntry> entries) {
-      // Print owner entry implied by owner permission bits.
-      out.println(new AclEntry.Builder()
-        .setScope(AclEntryScope.ACCESS)
-        .setType(AclEntryType.USER)
-        .setPermission(perm.getUserAction())
-        .build());
-
-      // Print all extended access ACL entries.
-      boolean hasAccessAcl = false;
-      Iterator<AclEntry> entryIter = entries.iterator();
-      AclEntry curEntry = null;
-      while (entryIter.hasNext()) {
-        curEntry = entryIter.next();
-        if (curEntry.getScope() == AclEntryScope.DEFAULT) {
-          break;
-        }
-        hasAccessAcl = true;
-        printExtendedAclEntry(curEntry, perm.getGroupAction());
+    private void printAclEntriesForSingleScope(List<AclEntry> entries) {
+      if (entries.isEmpty()) {
+        return;
       }
-
-      // Print mask entry implied by group permission bits, or print group entry
-      // if there is no access ACL (only default ACL).
-      out.println(new AclEntry.Builder()
-        .setScope(AclEntryScope.ACCESS)
-        .setType(hasAccessAcl ? AclEntryType.MASK : AclEntryType.GROUP)
-        .setPermission(perm.getGroupAction())
-        .build());
-
-      // Print other entry implied by other bits.
-      out.println(new AclEntry.Builder()
-        .setScope(AclEntryScope.ACCESS)
-        .setType(AclEntryType.OTHER)
-        .setPermission(perm.getOtherAction())
-        .build());
-
-      // Print default ACL entries.
-      if (curEntry != null && curEntry.getScope() == AclEntryScope.DEFAULT) {
-        out.println(curEntry);
-        // ACL sort order guarantees default mask is the second-to-last entry.
+      if (AclUtil.isMinimalAcl(entries)) {
+        for (AclEntry entry: entries) {
+          out.println(entry);
+        }
+      } else {
+        // ACL sort order guarantees mask is the second-to-last entry.
         FsAction maskPerm = entries.get(entries.size() - 2).getPermission();
-        while (entryIter.hasNext()) {
-          printExtendedAclEntry(entryIter.next(), maskPerm);
+        for (AclEntry entry: entries) {
+          printExtendedAclEntry(entry, maskPerm);
         }
       }
     }
@@ -180,30 +141,6 @@ class AclCommands extends FsCommand {
         out.println(entry);
       }
     }
-
-    /**
-     * Prints a minimal ACL, consisting of exactly 3 ACL entries implied by the
-     * permission bits.
-     *
-     * @param perm FsPermission of file
-     */
-    private void printMinimalAcl(FsPermission perm) {
-      out.println(new AclEntry.Builder()
-        .setScope(AclEntryScope.ACCESS)
-        .setType(AclEntryType.USER)
-        .setPermission(perm.getUserAction())
-        .build());
-      out.println(new AclEntry.Builder()
-        .setScope(AclEntryScope.ACCESS)
-        .setType(AclEntryType.GROUP)
-        .setPermission(perm.getGroupAction())
-        .build());
-      out.println(new AclEntry.Builder()
-        .setScope(AclEntryScope.ACCESS)
-        .setType(AclEntryType.OTHER)
-        .setPermission(perm.getOtherAction())
-        .build());
-    }
   }
 
   /**
@@ -216,19 +153,19 @@ class AclCommands extends FsCommand {
     public static String DESCRIPTION = "Sets Access Control Lists (ACLs)"
         + " of files and directories.\n" 
         + "Options:\n"
-        + "-b :Remove all but the base ACL entries. The entries for user,"
+        + "  -b :Remove all but the base ACL entries. The entries for user,"
         + " group and others are retained for compatibility with permission "
         + "bits.\n" 
-        + "-k :Remove the default ACL.\n"
-        + "-R :Apply operations to all files and directories recursively.\n"
-        + "-m :Modify ACL. New entries are added to the ACL, and existing"
+        + "  -k :Remove the default ACL.\n"
+        + "  -R :Apply operations to all files and directories recursively.\n"
+        + "  -m :Modify ACL. New entries are added to the ACL, and existing"
         + " entries are retained.\n"
-        + "-x :Remove specified ACL entries. Other ACL entries are retained.\n"
-        + "--set :Fully replace the ACL, discarding all existing entries."
+        + "  -x :Remove specified ACL entries. Other ACL entries are retained.\n"
+        + "  --set :Fully replace the ACL, discarding all existing entries."
         + " The <acl_spec> must include entries for user, group, and others"
         + " for compatibility with permission bits.\n"
-        + "<acl_spec>: Comma separated list of ACL entries.\n"
-        + "<path>: File or directory to modify.\n";
+        + "  <acl_spec>: Comma separated list of ACL entries.\n"
+        + "  <path>: File or directory to modify.\n";
 
     CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, "b", "k", "R",
         "m", "x", "-set");

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Tue Aug 19 23:49:39 2014
@@ -22,7 +22,13 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.util.EnumSet;
+import java.util.Iterator;
 import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.NoSuchElementException;
 
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -34,6 +40,9 @@ import org.apache.hadoop.fs.PathIsDirect
 import org.apache.hadoop.fs.PathIsNotDirectoryException;
 import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.fs.PathOperationException;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclUtil;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.IOUtils;
 
 /**
@@ -45,7 +54,6 @@ import org.apache.hadoop.io.IOUtils;
 abstract class CommandWithDestination extends FsCommand {  
   protected PathData dst;
   private boolean overwrite = false;
-  private boolean preserve = false;
   private boolean verifyChecksum = true;
   private boolean writeChecksum = true;
   
@@ -74,7 +82,54 @@ abstract class CommandWithDestination ex
    * implementation allows.
    */
   protected void setPreserve(boolean preserve) {
-    this.preserve = preserve;
+    if (preserve) {
+      preserve(FileAttribute.TIMESTAMPS);
+      preserve(FileAttribute.OWNERSHIP);
+      preserve(FileAttribute.PERMISSION);
+    } else {
+      preserveStatus.clear();
+    }
+  }
+  
+  protected static enum FileAttribute {
+    TIMESTAMPS, OWNERSHIP, PERMISSION, ACL, XATTR;
+
+    public static FileAttribute getAttribute(char symbol) {
+      for (FileAttribute attribute : values()) {
+        if (attribute.name().charAt(0) == Character.toUpperCase(symbol)) {
+          return attribute;
+        }
+      }
+      throw new NoSuchElementException("No attribute for " + symbol);
+    }
+  }
+  
+  private EnumSet<FileAttribute> preserveStatus = 
+      EnumSet.noneOf(FileAttribute.class);
+  
+  /**
+   * Checks if the input attribute should be preserved or not
+   *
+   * @param attribute - Attribute to check
+   * @return boolean true if attribute should be preserved, false otherwise
+   */
+  private boolean shouldPreserve(FileAttribute attribute) {
+    return preserveStatus.contains(attribute);
+  }
+  
+  /**
+   * Add file attributes that need to be preserved. This method may be
+   * called multiple times to add attributes.
+   *
+   * @param fileAttribute - Attribute to add, one at a time
+   */
+  protected void preserve(FileAttribute fileAttribute) {
+    for (FileAttribute attribute : preserveStatus) {
+      if (attribute.equals(fileAttribute)) {
+        return;
+      }
+    }
+    preserveStatus.add(fileAttribute);
   }
 
   /**
@@ -212,6 +267,9 @@ abstract class CommandWithDestination ex
         dst.refreshStatus(); // need to update stat to know it exists now
       }      
       super.recursePath(src);
+      if (dst.stat.isDirectory()) {
+        preserveAttributes(src, dst);
+      }
     } finally {
       dst = savedDst;
     }
@@ -243,19 +301,7 @@ abstract class CommandWithDestination ex
     try {
       in = src.fs.open(src.path);
       copyStreamToTarget(in, target);
-      if(preserve) {
-        target.fs.setTimes(
-          target.path,
-          src.stat.getModificationTime(),
-          src.stat.getAccessTime());
-        target.fs.setOwner(
-          target.path,
-          src.stat.getOwner(),
-          src.stat.getGroup());
-        target.fs.setPermission(
-          target.path,
-          src.stat.getPermission());
-      }
+      preserveAttributes(src, target);
     } finally {
       IOUtils.closeStream(in);
     }
@@ -285,6 +331,56 @@ abstract class CommandWithDestination ex
     }
   }
 
+  /**
+   * Preserve the attributes of the source to the target.
+   * The method calls {@link #shouldPreserve(FileAttribute)} to check what
+   * attribute to preserve.
+   * @param src source to preserve
+   * @param target where to preserve attributes
+   * @throws IOException if fails to preserve attributes
+   */
+  protected void preserveAttributes(PathData src, PathData target)
+      throws IOException {
+    if (shouldPreserve(FileAttribute.TIMESTAMPS)) {
+      target.fs.setTimes(
+          target.path,
+          src.stat.getModificationTime(),
+          src.stat.getAccessTime());
+    }
+    if (shouldPreserve(FileAttribute.OWNERSHIP)) {
+      target.fs.setOwner(
+          target.path,
+          src.stat.getOwner(),
+          src.stat.getGroup());
+    }
+    if (shouldPreserve(FileAttribute.PERMISSION) ||
+        shouldPreserve(FileAttribute.ACL)) {
+      target.fs.setPermission(
+          target.path,
+          src.stat.getPermission());
+    }
+    if (shouldPreserve(FileAttribute.ACL)) {
+      FsPermission perm = src.stat.getPermission();
+      if (perm.getAclBit()) {
+        List<AclEntry> srcEntries =
+            src.fs.getAclStatus(src.path).getEntries();
+        List<AclEntry> srcFullEntries =
+            AclUtil.getAclFromPermAndEntries(perm, srcEntries);
+        target.fs.setAcl(target.path, srcFullEntries);
+      }
+    }
+    if (shouldPreserve(FileAttribute.XATTR)) {
+      Map<String, byte[]> srcXAttrs = src.fs.getXAttrs(src.path);
+      if (srcXAttrs != null) {
+        Iterator<Entry<String, byte[]>> iter = srcXAttrs.entrySet().iterator();
+        while (iter.hasNext()) {
+          Entry<String, byte[]> entry = iter.next();
+          target.fs.setXAttr(target.path, entry.getKey(), entry.getValue());
+        }
+      }
+    }
+  }
+
   // Helper filter filesystem that registers created files as temp files to
   // be deleted on exit unless successfully renamed
   private static class TargetFileSystem extends FilterFileSystem {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1619012&r1=1619011&r2=1619012&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Tue Aug 19 23:49:39 2014
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 
@@ -54,10 +55,10 @@ class CopyCommands {  
     public static final String NAME = "getmerge";    
     public static final String USAGE = "[-nl] <src> <localdst>";
     public static final String DESCRIPTION =
-      "Get all the files in the directories that\n" +
-      "match the source file pattern and merge and sort them to only\n" +
+      "Get all the files in the directories that " +
+      "match the source file pattern and merge and sort them to only " +
       "one file on local fs. <src> is kept.\n" +
-      "  -nl   Add a newline character at the end of each file.";
+      "-nl: Add a newline character at the end of each file.";
 
     protected PathData dst = null;
     protected String delimiter = null;
@@ -132,24 +133,49 @@ class CopyCommands {  
 
   static class Cp extends CommandWithDestination {
     public static final String NAME = "cp";
-    public static final String USAGE = "[-f] [-p] <src> ... <dst>";
+    public static final String USAGE = "[-f] [-p | -p[topax]] <src> ... <dst>";
     public static final String DESCRIPTION =
-      "Copy files that match the file pattern <src> to a\n" +
-      "destination.  When copying multiple files, the destination\n" +
-      "must be a directory. Passing -p preserves access and\n" +
-      "modification times, ownership and the mode. Passing -f\n" +
-      "overwrites the destination if it already exists.\n";
-    
+      "Copy files that match the file pattern <src> to a " +
+      "destination.  When copying multiple files, the destination " +
+      "must be a directory. Passing -p preserves status " +
+      "[topax] (timestamps, ownership, permission, ACLs, XAttr). " +
+      "If -p is specified with no <arg>, then preserves " +
+      "timestamps, ownership, permission. If -pa is specified, " +
+      "then preserves permission also because ACL is a super-set of " +
+      "permission. Passing -f overwrites the destination if it " +
+      "already exists.\n";
+
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
-      CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "f", "p");
+      popPreserveOption(args);
+      CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "f");
       cf.parse(args);
       setOverwrite(cf.getOpt("f"));
-      setPreserve(cf.getOpt("p"));
       // should have a -r option
       setRecursive(true);
       getRemoteDestination(args);
     }
+    
+    private void popPreserveOption(List<String> args) {
+      for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) {
+        String cur = iter.next();
+        if (cur.equals("--")) {
+          // stop parsing arguments when you see --
+          break;
+        } else if (cur.startsWith("-p")) {
+          iter.remove();
+          if (cur.length() == 2) {
+            setPreserve(true);
+          } else {
+            String attributes = cur.substring(2);
+            for (int index = 0; index < attributes.length(); index++) {
+              preserve(FileAttribute.getAttribute(attributes.charAt(index)));
+            }
+          }
+          return;
+        }
+      }
+    }
   }
   
   /** 
@@ -160,10 +186,10 @@ class CopyCommands {  
     public static final String USAGE =
       "[-p] [-ignoreCrc] [-crc] <src> ... <localdst>";
     public static final String DESCRIPTION =
-      "Copy files that match the file pattern <src>\n" +
-      "to the local name.  <src> is kept.  When copying multiple,\n" +
-      "files, the destination must be a directory. Passing\n" +
-      "-p preserves access and modification times,\n" +
+      "Copy files that match the file pattern <src> " +
+      "to the local name.  <src> is kept.  When copying multiple " +
+      "files, the destination must be a directory. Passing " +
+      "-p preserves access and modification times, " +
       "ownership and the mode.\n";
 
     @Override
@@ -187,11 +213,11 @@ class CopyCommands {  
     public static final String NAME = "put";
     public static final String USAGE = "[-f] [-p] <localsrc> ... <dst>";
     public static final String DESCRIPTION =
-      "Copy files from the local file system\n" +
-      "into fs. Copying fails if the file already\n" +
-      "exists, unless the -f flag is given. Passing\n" +
-      "-p preserves access and modification times,\n" +
-      "ownership and the mode. Passing -f overwrites\n" +
+      "Copy files from the local file system " +
+      "into fs. Copying fails if the file already " +
+      "exists, unless the -f flag is given. Passing " +
+      "-p preserves access and modification times, " +
+      "ownership and the mode. Passing -f overwrites " +
       "the destination if it already exists.\n";
 
     @Override
@@ -254,9 +280,9 @@ class CopyCommands {  
     public static final String NAME = "appendToFile";
     public static final String USAGE = "<localsrc> ... <dst>";
     public static final String DESCRIPTION =
-        "Appends the contents of all the given local files to the\n" +
-            "given dst file. The dst file will be created if it does\n" +
-            "not exist. If <localSrc> is -, then the input is read\n" +
+        "Appends the contents of all the given local files to the " +
+            "given dst file. The dst file will be created if it does " +
+            "not exist. If <localSrc> is -, then the input is read " +
             "from stdin.";
 
     private static final int DEFAULT_IO_LENGTH = 1024 * 1024;



Mime
View raw message