hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cnaur...@apache.org
Subject svn commit: r1595290 - in /hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src: main/java/org/apache/hadoop/tools/ main/java/org/apache/hadoop/tools/mapred/ main/java/org/apache/hadoop/tools/mapred/lib/ main/java/org/apache/hadoop/tools/util...
Date Fri, 16 May 2014 18:32:16 GMT
Author: cnauroth
Date: Fri May 16 18:32:15 2014
New Revision: 1595290

URL: http://svn.apache.org/r1595290
Log:
MAPREDUCE-5809. Merging change r1595283 from trunk to branch-2.

Added:
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java
      - copied unchanged from r1595283, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithAcls.java
      - copied unchanged from r1595283, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithAcls.java
Modified:
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/StubContext.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
    hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java Fri May 16 18:32:15 2014
@@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.IOUtils;
@@ -31,11 +30,15 @@ import org.apache.hadoop.security.Creden
 
 import java.io.IOException;
 import java.lang.reflect.Constructor;
+import java.net.URI;
+import java.util.Set;
+
+import com.google.common.collect.Sets;
 
 /**
  * The CopyListing abstraction is responsible for how the list of
  * sources and targets is constructed, for DistCp's copy function.
- * The copy-listing should be a SequenceFile<Text, FileStatus>,
+ * The copy-listing should be a SequenceFile<Text, CopyListingFileStatus>,
  * located at the path specified to buildListing(),
  * each entry being a pair of (Source relative path, source file status),
  * all the paths being fully qualified.
@@ -85,7 +88,7 @@ public abstract class CopyListing extend
     config.setLong(DistCpConstants.CONF_LABEL_TOTAL_BYTES_TO_BE_COPIED, getBytesToCopy());
     config.setLong(DistCpConstants.CONF_LABEL_TOTAL_NUMBER_OF_RECORDS, getNumberOfPaths());
 
-    checkForDuplicates(pathToListFile);
+    validateFinalListing(pathToListFile, options);
   }
 
   /**
@@ -124,13 +127,15 @@ public abstract class CopyListing extend
   protected abstract long getNumberOfPaths();
 
   /**
-   * Validate the final resulting path listing to see if there are any duplicate entries
+   * Validate the final resulting path listing.  Checks if there are duplicate
+   * entries.  If preserving ACLs, checks that file system can support ACLs.
    *
    * @param pathToListFile - path listing build by doBuildListing
+   * @param options - Input options to distcp
    * @throws IOException - Any issues while checking for duplicates and throws
    * @throws DuplicateFileException - if there are duplicates
    */
-  private void checkForDuplicates(Path pathToListFile)
+  private void validateFinalListing(Path pathToListFile, DistCpOptions options)
       throws DuplicateFileException, IOException {
 
     Configuration config = getConf();
@@ -142,17 +147,26 @@ public abstract class CopyListing extend
                           config, SequenceFile.Reader.file(sortedList));
     try {
       Text lastKey = new Text("*"); //source relative path can never hold *
-      FileStatus lastFileStatus = new FileStatus();
+      CopyListingFileStatus lastFileStatus = new CopyListingFileStatus();
 
       Text currentKey = new Text();
+      Set<URI> aclSupportCheckFsSet = Sets.newHashSet();
       while (reader.next(currentKey)) {
         if (currentKey.equals(lastKey)) {
-          FileStatus currentFileStatus = new FileStatus();
+          CopyListingFileStatus currentFileStatus = new CopyListingFileStatus();
           reader.getCurrentValue(currentFileStatus);
           throw new DuplicateFileException("File " + lastFileStatus.getPath() + " and " +
               currentFileStatus.getPath() + " would cause duplicates. Aborting");
         }
         reader.getCurrentValue(lastFileStatus);
+        if (options.shouldPreserve(DistCpOptions.FileAttribute.ACL)) {
+          FileSystem lastFs = lastFileStatus.getPath().getFileSystem(config);
+          URI lastFsUri = lastFs.getUri();
+          if (!aclSupportCheckFsSet.contains(lastFsUri)) {
+            DistCpUtils.checkFileSystemAclSupport(lastFs);
+            aclSupportCheckFsSet.add(lastFsUri);
+          }
+        }
         lastKey.set(currentKey);
       }
     } finally {
@@ -236,4 +250,10 @@ public abstract class CopyListing extend
       super(message);
     }
   }
+
+  public static class AclsNotSupportedException extends RuntimeException {
+    public AclsNotSupportedException(String message) {
+      super(message);
+    }
+  }
 }

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Fri May 16 18:32:15 2014
@@ -125,6 +125,9 @@ public class DistCp extends Configured i
     } catch (DuplicateFileException e) {
       LOG.error("Duplicate files in input path: ", e);
       return DistCpConstants.DUPLICATE_INPUT;
+    } catch (AclsNotSupportedException e) {
+      LOG.error("ACLs not supported on at least one file system: ", e);
+      return DistCpConstants.ACLS_NOT_SUPPORTED;
     } catch (Exception e) {
       LOG.error("Exception encountered ", e);
       return DistCpConstants.UNKNOWN_ERROR;
@@ -298,7 +301,9 @@ public class DistCp extends Configured i
     FileSystem targetFS = targetPath.getFileSystem(configuration);
     targetPath = targetPath.makeQualified(targetFS.getUri(),
                                           targetFS.getWorkingDirectory());
-
+    if (inputOptions.shouldPreserve(DistCpOptions.FileAttribute.ACL)) {
+      DistCpUtils.checkFileSystemAclSupport(targetFS);
+    }
     if (inputOptions.shouldAtomicCommit()) {
       Path workDir = inputOptions.getAtomicWorkPath();
       if (workDir == null) {

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java Fri May 16 18:32:15 2014
@@ -115,6 +115,7 @@ public class DistCpConstants {
   public static final int SUCCESS = 0;
   public static final int INVALID_ARGUMENT = -1;
   public static final int DUPLICATE_INPUT = -2;
+  public static final int ACLS_NOT_SUPPORTED = -3;
   public static final int UNKNOWN_ERROR = -999;
   
   /**

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java Fri May 16 18:32:15 2014
@@ -45,8 +45,10 @@ public enum DistCpOptionSwitch {
    *
    */
   PRESERVE_STATUS(DistCpConstants.CONF_LABEL_PRESERVE_STATUS,
-      new Option("p", true, "preserve status (rbugpc)" +
-          "(replication, block-size, user, group, permission, checksum-type)")),
+      new Option("p", true, "preserve status (rbugpca)(replication, " +
+          "block-size, user, group, permission, checksum-type, ACL).  If " +
+          "-p is specified with no <arg>, then preserves replication, block " +
+          "size, user, group, permission and checksum type.")),
 
   /**
    * Update target location by copying only files that are missing

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java Fri May 16 18:32:15 2014
@@ -65,7 +65,7 @@ public class DistCpOptions {
   private boolean targetPathExists = true;
   
   public static enum FileAttribute{
-    REPLICATION, BLOCKSIZE, USER, GROUP, PERMISSION, CHECKSUMTYPE;
+    REPLICATION, BLOCKSIZE, USER, GROUP, PERMISSION, CHECKSUMTYPE, ACL;
 
     public static FileAttribute getAttribute(char symbol) {
       for (FileAttribute attribute : values()) {

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java Fri May 16 18:32:15 2014
@@ -23,11 +23,12 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.security.Credentials;
@@ -35,6 +36,7 @@ import org.apache.hadoop.security.Creden
 import com.google.common.annotations.VisibleForTesting;
 
 import java.io.*;
+import java.util.List;
 import java.util.Stack;
 
 /**
@@ -139,28 +141,34 @@ public class SimpleCopyListing extends C
 
         FileStatus rootStatus = sourceFS.getFileStatus(path);
         Path sourcePathRoot = computeSourceRootPath(rootStatus, options);
-        boolean localFile = (rootStatus.getClass() != FileStatus.class);
 
         FileStatus[] sourceFiles = sourceFS.listStatus(path);
         boolean explore = (sourceFiles != null && sourceFiles.length > 0);
         if (!explore || rootStatus.isDirectory()) {
-          writeToFileListingRoot(fileListWriter, rootStatus, sourcePathRoot,
-              localFile, options);
+          CopyListingFileStatus rootCopyListingStatus =
+            DistCpUtils.toCopyListingFileStatus(sourceFS, rootStatus,
+              options.shouldPreserve(FileAttribute.ACL));
+          writeToFileListingRoot(fileListWriter, rootCopyListingStatus,
+              sourcePathRoot, options);
         }
         if (explore) {
           for (FileStatus sourceStatus: sourceFiles) {
             if (LOG.isDebugEnabled()) {
               LOG.debug("Recording source-path: " + sourceStatus.getPath() + " for copy.");
             }
-            writeToFileListing(fileListWriter, sourceStatus, sourcePathRoot,
-                localFile, options);
+            CopyListingFileStatus sourceCopyListingStatus =
+              DistCpUtils.toCopyListingFileStatus(sourceFS, sourceStatus,
+                options.shouldPreserve(FileAttribute.ACL) &&
+                sourceStatus.isDirectory());
+            writeToFileListing(fileListWriter, sourceCopyListingStatus,
+                sourcePathRoot, options);
 
             if (isDirectoryAndNotEmpty(sourceFS, sourceStatus)) {
               if (LOG.isDebugEnabled()) {
                 LOG.debug("Traversing non-empty source dir: " + sourceStatus.getPath());
               }
               traverseNonEmptyDirectory(fileListWriter, sourceStatus, sourcePathRoot,
-                  localFile, options);
+                  options);
             }
           }
         }
@@ -233,7 +241,7 @@ public class SimpleCopyListing extends C
     return SequenceFile.createWriter(getConf(),
             SequenceFile.Writer.file(pathToListFile),
             SequenceFile.Writer.keyClass(Text.class),
-            SequenceFile.Writer.valueClass(FileStatus.class),
+            SequenceFile.Writer.valueClass(CopyListingFileStatus.class),
             SequenceFile.Writer.compression(SequenceFile.CompressionType.NONE));
   }
 
@@ -250,7 +258,6 @@ public class SimpleCopyListing extends C
   private void traverseNonEmptyDirectory(SequenceFile.Writer fileListWriter,
                                          FileStatus sourceStatus,
                                          Path sourcePathRoot,
-                                         boolean localFile,
                                          DistCpOptions options)
                                          throws IOException {
     FileSystem sourceFS = sourcePathRoot.getFileSystem(getConf());
@@ -262,8 +269,11 @@ public class SimpleCopyListing extends C
         if (LOG.isDebugEnabled())
           LOG.debug("Recording source-path: "
                     + sourceStatus.getPath() + " for copy.");
-        writeToFileListing(fileListWriter, child, sourcePathRoot,
-             localFile, options);
+        CopyListingFileStatus childCopyListingStatus =
+          DistCpUtils.toCopyListingFileStatus(sourceFS, child,
+            options.shouldPreserve(FileAttribute.ACL) && child.isDirectory());
+        writeToFileListing(fileListWriter, childCopyListingStatus,
+             sourcePathRoot, options);
         if (isDirectoryAndNotEmpty(sourceFS, child)) {
           if (LOG.isDebugEnabled())
             LOG.debug("Traversing non-empty source dir: "
@@ -275,8 +285,7 @@ public class SimpleCopyListing extends C
   }
   
   private void writeToFileListingRoot(SequenceFile.Writer fileListWriter,
-      FileStatus fileStatus, Path sourcePathRoot,
-      boolean localFile,
+      CopyListingFileStatus fileStatus, Path sourcePathRoot,
       DistCpOptions options) throws IOException {
     boolean syncOrOverwrite = options.shouldSyncFolder() ||
         options.shouldOverwrite();
@@ -288,14 +297,12 @@ public class SimpleCopyListing extends C
       }      
       return;
     }
-    writeToFileListing(fileListWriter, fileStatus, sourcePathRoot, localFile,
-        options);
+    writeToFileListing(fileListWriter, fileStatus, sourcePathRoot, options);
   }
 
   private void writeToFileListing(SequenceFile.Writer fileListWriter,
-                                  FileStatus fileStatus,
+                                  CopyListingFileStatus fileStatus,
                                   Path sourcePathRoot,
-                                  boolean localFile,
                                   DistCpOptions options) throws IOException {
     if (LOG.isDebugEnabled()) {
       LOG.debug("REL PATH: " + DistCpUtils.getRelativePath(sourcePathRoot,
@@ -303,9 +310,6 @@ public class SimpleCopyListing extends C
     }
 
     FileStatus status = fileStatus;
-    if (localFile) {
-      status = getFileStatus(fileStatus);
-    }
 
     if (!shouldCopy(fileStatus.getPath(), options)) {
       return;
@@ -320,19 +324,4 @@ public class SimpleCopyListing extends C
     }
     totalPaths++;
   }
-
-  private static final ByteArrayOutputStream buffer = new ByteArrayOutputStream(64);
-  private DataInputBuffer in = new DataInputBuffer();
-  
-  private FileStatus getFileStatus(FileStatus fileStatus) throws IOException {
-    FileStatus status = new FileStatus();
-
-    buffer.reset();
-    DataOutputStream out = new DataOutputStream(buffer);
-    fileStatus.write(out);
-
-    in.reset(buffer.toByteArray(), 0, buffer.size());
-    status.readFields(in);
-    return status;
-  }
 }

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java Fri May 16 18:32:15 2014
@@ -178,7 +178,7 @@ public class CopyCommitter extends FileO
 
     long preservedEntries = 0;
     try {
-      FileStatus srcFileStatus = new FileStatus();
+      CopyListingFileStatus srcFileStatus = new CopyListingFileStatus();
       Text srcRelPath = new Text();
 
       // Iterate over every source path that was copied.
@@ -246,9 +246,9 @@ public class CopyCommitter extends FileO
     // Delete all from target that doesn't also exist on source.
     long deletedEntries = 0;
     try {
-      FileStatus srcFileStatus = new FileStatus();
+      CopyListingFileStatus srcFileStatus = new CopyListingFileStatus();
       Text srcRelPath = new Text();
-      FileStatus trgtFileStatus = new FileStatus();
+      CopyListingFileStatus trgtFileStatus = new CopyListingFileStatus();
       Text trgtRelPath = new Text();
 
       FileSystem targetFS = targetFinalPath.getFileSystem(conf);

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java Fri May 16 18:32:15 2014
@@ -24,9 +24,11 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AclEntry;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
 import org.apache.hadoop.tools.DistCpOptions;
@@ -37,12 +39,13 @@ import org.apache.hadoop.util.StringUtil
 import java.io.*;
 import java.util.EnumSet;
 import java.util.Arrays;
+import java.util.List;
 
 /**
  * Mapper class that executes the DistCp copy operation.
  * Implements the o.a.h.mapreduce.Mapper<> interface.
  */
-public class CopyMapper extends Mapper<Text, FileStatus, Text, Text> {
+public class CopyMapper extends Mapper<Text, CopyListingFileStatus, Text, Text> {
 
   /**
    * Hadoop counters for the DistCp CopyMapper.
@@ -172,8 +175,8 @@ public class CopyMapper extends Mapper<T
    * @throws IOException
    */
   @Override
-  public void map(Text relPath, FileStatus sourceFileStatus, Context context)
-          throws IOException, InterruptedException {
+  public void map(Text relPath, CopyListingFileStatus sourceFileStatus,
+          Context context) throws IOException, InterruptedException {
     Path sourcePath = sourceFileStatus.getPath();
 
     if (LOG.isDebugEnabled())
@@ -191,11 +194,13 @@ public class CopyMapper extends Mapper<T
     LOG.info(description);
 
     try {
-      FileStatus sourceCurrStatus;
+      CopyListingFileStatus sourceCurrStatus;
       FileSystem sourceFS;
       try {
         sourceFS = sourcePath.getFileSystem(conf);
-        sourceCurrStatus = sourceFS.getFileStatus(sourcePath);
+        sourceCurrStatus = DistCpUtils.toCopyListingFileStatus(sourceFS,
+          sourceFS.getFileStatus(sourcePath),
+          fileAttributes.contains(FileAttribute.ACL));
       } catch (FileNotFoundException e) {
         throw new IOException(new RetriableFileCopyCommand.CopyReadException(e));
       }

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java Fri May 16 18:32:15 2014
@@ -23,11 +23,11 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
@@ -44,7 +44,8 @@ import java.util.ArrayList;
  * that the total-number of bytes to be copied for each input split is
  * uniform.
  */
-public class UniformSizeInputFormat extends InputFormat<Text, FileStatus> {
+public class UniformSizeInputFormat
+    extends InputFormat<Text, CopyListingFileStatus> {
   private static final Log LOG
                 = LogFactory.getLog(UniformSizeInputFormat.class);
 
@@ -76,7 +77,7 @@ public class UniformSizeInputFormat exte
     List<InputSplit> splits = new ArrayList<InputSplit>(numSplits);
     long nBytesPerSplit = (long) Math.ceil(totalSizeBytes * 1.0 / numSplits);
 
-    FileStatus srcFileStatus = new FileStatus();
+    CopyListingFileStatus srcFileStatus = new CopyListingFileStatus();
     Text srcRelPath = new Text();
     long currentSplitSize = 0;
     long lastSplitStart = 0;
@@ -161,9 +162,9 @@ public class UniformSizeInputFormat exte
    * @throws InterruptedException
    */
   @Override
-  public RecordReader<Text, FileStatus> createRecordReader(InputSplit split,
-                                                     TaskAttemptContext context)
-                                      throws IOException, InterruptedException {
-    return new SequenceFileRecordReader<Text, FileStatus>();
+  public RecordReader<Text, CopyListingFileStatus> createRecordReader(
+      InputSplit split, TaskAttemptContext context)
+      throws IOException, InterruptedException {
+    return new SequenceFileRecordReader<Text, CopyListingFileStatus>();
   }
 }

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputChunk.java Fri May 16 18:32:15 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.io.SequenceFile
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileRecordReader;
@@ -90,7 +91,7 @@ class DynamicInputChunk<K, V> {
   private void openForWrite() throws IOException {
     writer = SequenceFile.createWriter(
             chunkFilePath.getFileSystem(configuration), configuration,
-            chunkFilePath, Text.class, FileStatus.class,
+            chunkFilePath, Text.class, CopyListingFileStatus.class,
             SequenceFile.CompressionType.NONE);
 
   }
@@ -117,7 +118,7 @@ class DynamicInputChunk<K, V> {
    * @param value Corresponding value from the listing file.
    * @throws IOException Exception onf failure to write to the file.
    */
-  public void write(Text key, FileStatus value) throws IOException {
+  public void write(Text key, CopyListingFileStatus value) throws IOException {
     writer.append(key, value);
   }
 

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java Fri May 16 18:32:15 2014
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.SequenceFile
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 
 import java.util.List;
 import java.util.ArrayList;
@@ -133,7 +133,7 @@ public class DynamicInputFormat<K, V> ex
     
     List<DynamicInputChunk> chunksFinal = new ArrayList<DynamicInputChunk>();
 
-    FileStatus fileStatus = new FileStatus();
+    CopyListingFileStatus fileStatus = new CopyListingFileStatus();
     Text relPath = new Text();
     int recordCounter = 0;
     int chunkCount = 0;

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java Fri May 16 18:32:15 2014
@@ -25,15 +25,21 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclUtil;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.mapred.UniformSizeInputFormat;
+import org.apache.hadoop.tools.CopyListing.AclsNotSupportedException;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.mapreduce.InputFormat;
 
 import java.io.IOException;
 import java.util.EnumSet;
+import java.util.List;
 import java.util.Locale;
 import java.text.DecimalFormat;
 import java.net.URI;
@@ -181,7 +187,7 @@ public class DistCpUtils {
    *                       change or any transient error)
    */
   public static void preserve(FileSystem targetFS, Path path,
-                              FileStatus srcFileStatus,
+                              CopyListingFileStatus srcFileStatus,
                               EnumSet<FileAttribute> attributes) throws IOException {
 
     FileStatus targetFileStatus = targetFS.getFileStatus(path);
@@ -189,7 +195,18 @@ public class DistCpUtils {
     String user = targetFileStatus.getOwner();
     boolean chown = false;
 
-    if (attributes.contains(FileAttribute.PERMISSION) &&
+    if (attributes.contains(FileAttribute.ACL)) {
+      List<AclEntry> srcAcl = srcFileStatus.getAclEntries();
+      List<AclEntry> targetAcl = getAcl(targetFS, targetFileStatus);
+      if (!srcAcl.equals(targetAcl)) {
+        targetFS.setAcl(path, srcAcl);
+      }
+      // setAcl can't preserve sticky bit, so also call setPermission if needed.
+      if (srcFileStatus.getPermission().getStickyBit() !=
+          targetFileStatus.getPermission().getStickyBit()) {
+        targetFS.setPermission(path, srcFileStatus.getPermission());
+      }
+    } else if (attributes.contains(FileAttribute.PERMISSION) &&
       !srcFileStatus.getPermission().equals(targetFileStatus.getPermission())) {
       targetFS.setPermission(path, srcFileStatus.getPermission());
     }
@@ -217,6 +234,46 @@ public class DistCpUtils {
   }
 
   /**
+   * Returns a file's full logical ACL.
+   *
+   * @param fileSystem FileSystem containing the file
+   * @param fileStatus FileStatus of file
+   * @return List<AclEntry> containing full logical ACL
+   * @throws IOException if there is an I/O error
+   */
+  public static List<AclEntry> getAcl(FileSystem fileSystem,
+      FileStatus fileStatus) throws IOException {
+    List<AclEntry> entries = fileSystem.getAclStatus(fileStatus.getPath())
+      .getEntries();
+    return AclUtil.getAclFromPermAndEntries(fileStatus.getPermission(), entries);
+  }
+
+  /**
+   * Converts a FileStatus to a CopyListingFileStatus.  If preserving ACLs,
+   * populates the CopyListingFileStatus with the ACLs.
+   *
+   * @param fileSystem FileSystem containing the file
+   * @param fileStatus FileStatus of file
+   * @param preserveAcls boolean true if preserving ACLs
+   * @throws IOException if there is an I/O error
+   */
+  public static CopyListingFileStatus toCopyListingFileStatus(
+      FileSystem fileSystem, FileStatus fileStatus, boolean preserveAcls)
+      throws IOException {
+    CopyListingFileStatus copyListingFileStatus =
+      new CopyListingFileStatus(fileStatus);
+    if (preserveAcls) {
+      FsPermission perm = fileStatus.getPermission();
+      if (perm.getAclBit()) {
+        List<AclEntry> aclEntries = fileSystem.getAclStatus(
+          fileStatus.getPath()).getEntries();
+        copyListingFileStatus.setAclEntries(aclEntries);
+      }
+    }
+    return copyListingFileStatus;
+  }
+
+  /**
    * Sort sequence file containing FileStatus and Text as key and value respecitvely
    *
    * @param fs - File System
@@ -227,7 +284,8 @@ public class DistCpUtils {
    */
   public static Path sortListing(FileSystem fs, Configuration conf, Path sourceListing)
       throws IOException {
-    SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, Text.class, FileStatus.class, conf);
+    SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, Text.class,
+      CopyListingFileStatus.class, conf);
     Path output = new Path(sourceListing.toString() +  "_sorted");
 
     if (fs.exists(output)) {
@@ -239,6 +297,25 @@ public class DistCpUtils {
   }
 
   /**
+   * Determines if a file system supports ACLs by running a canary getAclStatus
+   * request on the file system root.  This method is used before distcp job
+   * submission to fail fast if the user requested preserving ACLs, but the file
+   * system cannot support ACLs.
+   *
+   * @param fs FileSystem to check
+   * @throws AclsNotSupportedException if fs does not support ACLs
+   */
+  public static void checkFileSystemAclSupport(FileSystem fs)
+      throws AclsNotSupportedException {
+    try {
+      fs.getAclStatus(new Path(Path.SEPARATOR));
+    } catch (Exception e) {
+      throw new AclsNotSupportedException("ACLs not supported for file system: "
+        + fs.getUri());
+    }
+  }
+
+  /**
    * String utility to convert a number-of-bytes to human readable format.
    */
   private static ThreadLocal<DecimalFormat> FORMATTER

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/StubContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/StubContext.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/StubContext.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/StubContext.java Fri May 16 18:32:15 2014
@@ -23,7 +23,6 @@ import org.apache.hadoop.mapreduce.task.
 import org.apache.hadoop.mapreduce.lib.map.WrappedMapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.conf.Configuration;
 
 import java.util.List;
@@ -33,18 +32,19 @@ import java.io.IOException;
 public class StubContext {
 
   private StubStatusReporter reporter = new StubStatusReporter();
-  private RecordReader<Text, FileStatus> reader;
+  private RecordReader<Text, CopyListingFileStatus> reader;
   private StubInMemoryWriter writer = new StubInMemoryWriter();
-  private Mapper<Text, FileStatus, Text, Text>.Context mapperContext;
+  private Mapper<Text, CopyListingFileStatus, Text, Text>.Context mapperContext;
 
-  public StubContext(Configuration conf, RecordReader<Text, FileStatus> reader,
-                     int taskId) throws IOException, InterruptedException {
+  public StubContext(Configuration conf,
+      RecordReader<Text, CopyListingFileStatus> reader, int taskId)
+      throws IOException, InterruptedException {
 
-    WrappedMapper<Text, FileStatus, Text, Text> wrappedMapper
-            = new WrappedMapper<Text, FileStatus, Text, Text>();
+    WrappedMapper<Text, CopyListingFileStatus, Text, Text> wrappedMapper
+            = new WrappedMapper<Text, CopyListingFileStatus, Text, Text>();
 
-    MapContextImpl<Text, FileStatus, Text, Text> contextImpl
-            = new MapContextImpl<Text, FileStatus, Text, Text>(conf,
+    MapContextImpl<Text, CopyListingFileStatus, Text, Text> contextImpl
+            = new MapContextImpl<Text, CopyListingFileStatus, Text, Text>(conf,
             getTaskAttemptID(taskId), reader, writer,
             null, reporter, null);
 
@@ -52,7 +52,7 @@ public class StubContext {
     this.mapperContext = wrappedMapper.getMapContext(contextImpl);
   }
 
-  public Mapper<Text, FileStatus, Text, Text>.Context getContext() {
+  public Mapper<Text, CopyListingFileStatus, Text, Text>.Context getContext() {
     return mapperContext;
   }
 
@@ -60,7 +60,7 @@ public class StubContext {
     return reporter;
   }
 
-  public RecordReader<Text, FileStatus> getReader() {
+  public RecordReader<Text, CopyListingFileStatus> getReader() {
     return reader;
   }
 

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java Fri May 16 18:32:15 2014
@@ -24,7 +24,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 import org.apache.hadoop.tools.util.TestDistCpUtils;
@@ -106,7 +105,7 @@ public class TestCopyListing extends Sim
     Assert.assertEquals(listing.getNumberOfPaths(), 3);
     SequenceFile.Reader reader = new SequenceFile.Reader(getConf(),
         SequenceFile.Reader.file(listingFile));
-    FileStatus fileStatus = new FileStatus();
+    CopyListingFileStatus fileStatus = new CopyListingFileStatus();
     Text relativePath = new Text();
     Assert.assertTrue(reader.next(relativePath, fileStatus));
     Assert.assertEquals(relativePath.toString(), "/1");
@@ -274,7 +273,7 @@ public class TestCopyListing extends Sim
 
       reader = new SequenceFile.Reader(getConf(), SequenceFile.Reader.file(listFile));
 
-      FileStatus fileStatus = new FileStatus();
+      CopyListingFileStatus fileStatus = new CopyListingFileStatus();
       Text relativePath = new Text();
       Assert.assertTrue(reader.next(relativePath, fileStatus));
       Assert.assertTrue(relativePath.toString().equals(""));

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java Fri May 16 18:32:15 2014
@@ -23,7 +23,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
@@ -531,7 +530,7 @@ public class TestFileBasedCopyListing {
                                             SequenceFile.Reader.file(listFile));
     try {
       Text relPath = new Text();
-      FileStatus fileStatus = new FileStatus();
+      CopyListingFileStatus fileStatus = new CopyListingFileStatus();
       while (reader.next(relPath, fileStatus)) {
         if (fileStatus.isDirectory() && relPath.toString().equals("")) {
           // ignore root with empty relPath, which is an entry to be 

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java Fri May 16 18:32:15 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.tools;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -121,7 +120,7 @@ public class TestGlobbedCopyListing {
     SequenceFile.Reader reader = new SequenceFile.Reader(cluster.getFileSystem(),
                                               listingPath, new Configuration());
     Text key   = new Text();
-    FileStatus value = new FileStatus();
+    CopyListingFileStatus value = new CopyListingFileStatus();
     Map<String, String> actualValues = new HashMap<String, String>();
     while (reader.next(key, value)) {
       if (value.isDirectory() && key.toString().equals("")) {

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java Fri May 16 18:32:15 2014
@@ -410,6 +410,7 @@ public class TestOptionsParser {
     Assert.assertTrue(options.shouldPreserve(FileAttribute.USER));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL));
 
     options = OptionsParser.parse(new String[] {
         "-p",
@@ -421,6 +422,7 @@ public class TestOptionsParser {
     Assert.assertTrue(options.shouldPreserve(FileAttribute.USER));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL));
 
     options = OptionsParser.parse(new String[] {
         "-pbr",
@@ -433,6 +435,7 @@ public class TestOptionsParser {
     Assert.assertFalse(options.shouldPreserve(FileAttribute.USER));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.GROUP));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL));
 
     options = OptionsParser.parse(new String[] {
         "-pbrgup",
@@ -445,9 +448,10 @@ public class TestOptionsParser {
     Assert.assertTrue(options.shouldPreserve(FileAttribute.USER));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL));
 
     options = OptionsParser.parse(new String[] {
-        "-pbrgupc",
+        "-pbrgupca",
         "-f",
         "hdfs://localhost:8020/source/first",
         "hdfs://localhost:8020/target/"});
@@ -457,6 +461,7 @@ public class TestOptionsParser {
     Assert.assertTrue(options.shouldPreserve(FileAttribute.USER));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.ACL));
 
     options = OptionsParser.parse(new String[] {
         "-pc",
@@ -469,6 +474,7 @@ public class TestOptionsParser {
     Assert.assertFalse(options.shouldPreserve(FileAttribute.USER));
     Assert.assertFalse(options.shouldPreserve(FileAttribute.GROUP));
     Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL));
 
     options = OptionsParser.parse(new String[] {
         "-p",
@@ -485,7 +491,7 @@ public class TestOptionsParser {
 
     try {
       OptionsParser.parse(new String[] {
-          "-pabc",
+          "-pabcd",
           "-f",
           "hdfs://localhost:8020/source/first",
           "hdfs://localhost:8020/target"});

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java Fri May 16 18:32:15 2014
@@ -42,6 +42,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
 import org.apache.hadoop.tools.DistCpOptions;
@@ -222,7 +223,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       Configuration configuration = context.getConfiguration();
@@ -238,7 +239,7 @@ public class TestCopyMapper {
 
       for (Path path: pathList) {
         copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
-                fs.getFileStatus(path), context);
+                new CopyListingFileStatus(fs.getFileStatus(path)), context);
       }
 
       // Check that the maps worked.
@@ -283,12 +284,11 @@ public class TestCopyMapper {
   }
 
   private void testCopyingExistingFiles(FileSystem fs, CopyMapper copyMapper,
-                                        Mapper<Text, FileStatus, Text, Text>.Context context) {
-
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context) {
     try {
       for (Path path : pathList) {
         copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
-                fs.getFileStatus(path), context);
+                new CopyListingFileStatus(fs.getFileStatus(path)), context);
       }
 
       Assert.assertEquals(nFiles,
@@ -309,7 +309,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       Configuration configuration = context.getConfiguration();
@@ -320,7 +320,7 @@ public class TestCopyMapper {
       copyMapper.setup(context);
 
       copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), pathList.get(0))),
-              fs.getFileStatus(pathList.get(0)), context);
+              new CopyListingFileStatus(fs.getFileStatus(pathList.get(0))), context);
 
       Assert.assertTrue("There should have been an exception.", false);
     }
@@ -343,7 +343,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       mkdirs(SOURCE_PATH + "/src/file");
@@ -351,7 +351,8 @@ public class TestCopyMapper {
       try {
         copyMapper.setup(context);
         copyMapper.map(new Text("/src/file"),
-            fs.getFileStatus(new Path(SOURCE_PATH + "/src/file")),
+            new CopyListingFileStatus(fs.getFileStatus(
+              new Path(SOURCE_PATH + "/src/file"))),
             context);
       } catch (IOException e) {
         Assert.assertTrue(e.getMessage().startsWith("Can't replace"));
@@ -372,22 +373,24 @@ public class TestCopyMapper {
 
       final CopyMapper copyMapper = new CopyMapper();
 
-      final Mapper<Text, FileStatus, Text, Text>.Context context =  tmpUser.
-          doAs(new PrivilegedAction<Mapper<Text, FileStatus, Text, Text>.Context>() {
-        @Override
-        public Mapper<Text, FileStatus, Text, Text>.Context run() {
-          try {
-            StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-            return stubContext.getContext();
-          } catch (Exception e) {
-            LOG.error("Exception encountered ", e);
-            throw new RuntimeException(e);
-          }
-        }
-      });
+      final Mapper<Text, CopyListingFileStatus, Text, Text>.Context context =
+        tmpUser.doAs(
+          new PrivilegedAction<Mapper<Text, CopyListingFileStatus, Text, Text>.Context>() {
+            @Override
+            public Mapper<Text, CopyListingFileStatus, Text, Text>.Context run() {
+              try {
+                StubContext stubContext = new StubContext(getConfiguration(), null, 0);
+                return stubContext.getContext();
+              } catch (Exception e) {
+                LOG.error("Exception encountered ", e);
+                throw new RuntimeException(e);
+              }
+            }
+          });
 
       EnumSet<DistCpOptions.FileAttribute> preserveStatus =
           EnumSet.allOf(DistCpOptions.FileAttribute.class);
+      preserveStatus.remove(DistCpOptions.FileAttribute.ACL);
 
       context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS,
         DistCpUtils.packAttributes(preserveStatus));
@@ -415,7 +418,8 @@ public class TestCopyMapper {
           try {
             copyMapper.setup(context);
             copyMapper.map(new Text("/src/file"),
-                tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file")),
+                new CopyListingFileStatus(tmpFS.getFileStatus(
+                  new Path(SOURCE_PATH + "/src/file"))),
                 context);
             Assert.fail("Expected copy to fail");
           } catch (AccessControlException e) {
@@ -442,19 +446,20 @@ public class TestCopyMapper {
 
       final CopyMapper copyMapper = new CopyMapper();
 
-      final Mapper<Text, FileStatus, Text, Text>.Context context =  tmpUser.
-          doAs(new PrivilegedAction<Mapper<Text, FileStatus, Text, Text>.Context>() {
-        @Override
-        public Mapper<Text, FileStatus, Text, Text>.Context run() {
-          try {
-            StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-            return stubContext.getContext();
-          } catch (Exception e) {
-            LOG.error("Exception encountered ", e);
-            throw new RuntimeException(e);
-          }
-        }
-      });
+      final Mapper<Text, CopyListingFileStatus, Text, Text>.Context context =
+        tmpUser.doAs(
+          new PrivilegedAction<Mapper<Text, CopyListingFileStatus, Text, Text>.Context>() {
+            @Override
+            public Mapper<Text, CopyListingFileStatus, Text, Text>.Context run() {
+              try {
+                StubContext stubContext = new StubContext(getConfiguration(), null, 0);
+                return stubContext.getContext();
+              } catch (Exception e) {
+                LOG.error("Exception encountered ", e);
+                throw new RuntimeException(e);
+              }
+            }
+          });
 
       touchFile(SOURCE_PATH + "/src/file");
       mkdirs(TARGET_PATH);
@@ -481,7 +486,8 @@ public class TestCopyMapper {
           try {
             copyMapper.setup(context);
             copyMapper.map(new Text("/src/file"),
-                tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file")),
+                new CopyListingFileStatus(tmpFS.getFileStatus(
+                  new Path(SOURCE_PATH + "/src/file"))),
                 context);
           } catch (Exception e) {
             throw new RuntimeException(e);
@@ -518,9 +524,11 @@ public class TestCopyMapper {
         }
       });
 
-      final Mapper<Text, FileStatus, Text, Text>.Context context = stubContext.getContext();
+      final Mapper<Text, CopyListingFileStatus, Text, Text>.Context context =
+        stubContext.getContext();
       EnumSet<DistCpOptions.FileAttribute> preserveStatus =
           EnumSet.allOf(DistCpOptions.FileAttribute.class);
+      preserveStatus.remove(DistCpOptions.FileAttribute.ACL);
 
       context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS,
         DistCpUtils.packAttributes(preserveStatus));
@@ -551,7 +559,8 @@ public class TestCopyMapper {
           try {
             copyMapper.setup(context);
             copyMapper.map(new Text("/src/file"),
-                tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file")),
+                new CopyListingFileStatus(tmpFS.getFileStatus(
+                  new Path(SOURCE_PATH + "/src/file"))),
                 context);
             Assert.assertEquals(stubContext.getWriter().values().size(), 1);
             Assert.assertTrue(stubContext.getWriter().values().get(0).toString().startsWith("SKIP"));
@@ -594,8 +603,9 @@ public class TestCopyMapper {
 
       EnumSet<DistCpOptions.FileAttribute> preserveStatus =
           EnumSet.allOf(DistCpOptions.FileAttribute.class);
+      preserveStatus.remove(DistCpOptions.FileAttribute.ACL);
 
-      final Mapper<Text, FileStatus, Text, Text>.Context context
+      final Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS,
@@ -629,7 +639,8 @@ public class TestCopyMapper {
           try {
             copyMapper.setup(context);
             copyMapper.map(new Text("/src/file"),
-                tmpFS.getFileStatus(new Path(SOURCE_PATH + "/src/file")),
+                new CopyListingFileStatus(tmpFS.getFileStatus(
+                  new Path(SOURCE_PATH + "/src/file"))),
                 context);
             Assert.fail("Didn't expect the file to be copied");
           } catch (AccessControlException ignore) {
@@ -661,7 +672,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       touchFile(SOURCE_PATH + "/src/file");
@@ -669,7 +680,8 @@ public class TestCopyMapper {
       try {
         copyMapper.setup(context);
         copyMapper.map(new Text("/src/file"),
-            fs.getFileStatus(new Path(SOURCE_PATH + "/src/file")),
+            new CopyListingFileStatus(fs.getFileStatus(
+              new Path(SOURCE_PATH + "/src/file"))),
             context);
       } catch (IOException e) {
         Assert.assertTrue(e.getMessage().startsWith("Can't replace"));
@@ -688,7 +700,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       Configuration configuration = context.getConfiguration();
@@ -705,7 +717,7 @@ public class TestCopyMapper {
         if (!fileStatus.isDirectory()) {
           fs.delete(path, true);
           copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
-                  fileStatus, context);
+                  new CopyListingFileStatus(fileStatus), context);
         }
       }
       if (ignoreFailures) {
@@ -745,7 +757,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
           = stubContext.getContext();
 
       Configuration configuration = context.getConfiguration();
@@ -759,7 +771,7 @@ public class TestCopyMapper {
       for (Path path : pathList) {
         final FileStatus fileStatus = fs.getFileStatus(path);
         copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
-            fileStatus, context);
+            new CopyListingFileStatus(fileStatus), context);
       }
 
       Assert.fail("Copy should have failed because of block-size difference.");
@@ -780,7 +792,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       Configuration configuration = context.getConfiguration();
@@ -798,7 +810,7 @@ public class TestCopyMapper {
       for (Path path : pathList) {
         final FileStatus fileStatus = fs.getFileStatus(path);
         copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
-                fileStatus, context);
+                new CopyListingFileStatus(fileStatus), context);
       }
 
       // Check that the block-size/replication aren't preserved.
@@ -855,7 +867,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       context.getConfiguration().set(
@@ -863,7 +875,8 @@ public class TestCopyMapper {
               targetFilePath.getParent().toString()); // Parent directory.
       copyMapper.setup(context);
 
-      final FileStatus sourceFileStatus = fs.getFileStatus(sourceFilePath);
+      final CopyListingFileStatus sourceFileStatus = new CopyListingFileStatus(
+        fs.getFileStatus(sourceFilePath));
 
       long before = fs.getFileStatus(targetFilePath).getModificationTime();
       copyMapper.map(new Text(DistCpUtils.getRelativePath(
@@ -907,7 +920,7 @@ public class TestCopyMapper {
       FileSystem fs = cluster.getFileSystem();
       CopyMapper copyMapper = new CopyMapper();
       StubContext stubContext = new StubContext(getConfiguration(), null, 0);
-      Mapper<Text, FileStatus, Text, Text>.Context context
+      Mapper<Text, CopyListingFileStatus, Text, Text>.Context context
               = stubContext.getContext();
 
       Configuration configuration = context.getConfiguration();
@@ -926,7 +939,7 @@ public class TestCopyMapper {
       for (Path path : pathList) {
         final FileStatus fileStatus = fs.getFileStatus(path);
         copyMapper.map(new Text(DistCpUtils.getRelativePath(new Path(SOURCE_PATH), path)),
-                fileStatus, context);
+                new CopyListingFileStatus(fileStatus), context);
       }
 
       // Check that the user/group attributes are preserved

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java Fri May 16 18:32:15 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.tools.CopyListing;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.StubContext;
 import org.apache.hadoop.security.Credentials;
@@ -122,8 +123,8 @@ public class TestUniformSizeInputFormat 
     for (int i=0; i<splits.size(); ++i) {
       InputSplit split = splits.get(i);
       int currentSplitSize = 0;
-      RecordReader<Text, FileStatus> recordReader = uniformSizeInputFormat.createRecordReader(
-              split, null);
+      RecordReader<Text, CopyListingFileStatus> recordReader =
+        uniformSizeInputFormat.createRecordReader(split, null);
       StubContext stubContext = new StubContext(jobContext.getConfiguration(),
                                                 recordReader, 0);
       final TaskAttemptContext taskAttemptContext
@@ -168,7 +169,7 @@ public class TestUniformSizeInputFormat 
 
     try {
       reader.seek(lastEnd);
-      FileStatus srcFileStatus = new FileStatus();
+      CopyListingFileStatus srcFileStatus = new CopyListingFileStatus();
       Text srcRelPath = new Text();
       Assert.assertFalse(reader.next(srcRelPath, srcFileStatus));
     } finally {

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java Fri May 16 18:32:15 2014
@@ -25,13 +25,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.*;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.tools.CopyListing;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.StubContext;
 import org.apache.hadoop.security.Credentials;
@@ -118,15 +118,15 @@ public class TestDynamicInputFormat {
                     +"/tmp/testDynInputFormat/fileList.seq"), options);
 
     JobContext jobContext = new JobContextImpl(configuration, new JobID());
-    DynamicInputFormat<Text, FileStatus> inputFormat =
-        new DynamicInputFormat<Text, FileStatus>();
+    DynamicInputFormat<Text, CopyListingFileStatus> inputFormat =
+        new DynamicInputFormat<Text, CopyListingFileStatus>();
     List<InputSplit> splits = inputFormat.getSplits(jobContext);
 
     int nFiles = 0;
     int taskId = 0;
 
     for (InputSplit split : splits) {
-      RecordReader<Text, FileStatus> recordReader =
+      RecordReader<Text, CopyListingFileStatus> recordReader =
            inputFormat.createRecordReader(split, null);
       StubContext stubContext = new StubContext(jobContext.getConfiguration(),
                                                 recordReader, taskId);
@@ -136,7 +136,7 @@ public class TestDynamicInputFormat {
       recordReader.initialize(splits.get(0), taskAttemptContext);
       float previousProgressValue = 0f;
       while (recordReader.nextKeyValue()) {
-        FileStatus fileStatus = recordReader.getCurrentValue();
+        CopyListingFileStatus fileStatus = recordReader.getCurrentValue();
         String source = fileStatus.getPath().toString();
         System.out.println(source);
         Assert.assertTrue(expectedFilePaths.contains(source));

Modified: hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java?rev=1595290&r1=1595289&r2=1595290&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java (original)
+++ hadoop/common/branches/branch-2/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java Fri May 16 18:32:15 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.junit.Assert;
@@ -106,7 +107,8 @@ public class TestDistCpUtils {
       Path src = new Path("/tmp/src");
       fs.mkdirs(path);
       fs.mkdirs(src);
-      FileStatus srcStatus = fs.getFileStatus(src);
+      CopyListingFileStatus srcStatus = new CopyListingFileStatus(
+        fs.getFileStatus(src));
 
       FsPermission noPerm = new FsPermission((short) 0);
       fs.setPermission(path, noPerm);



Mime
View raw message