hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From inigo...@apache.org
Subject [16/50] [abbrv] hadoop git commit: HADOOP-14267. Make DistCpOptions immutable. Contributed by Mingliang Liu
Date Fri, 07 Apr 2017 01:59:12 GMT
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
index af91347..8111b04 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
@@ -123,10 +123,10 @@ public class SimpleCopyListing extends CopyListing {
   }
 
   @Override
-  protected void validatePaths(DistCpOptions options)
+  protected void validatePaths(DistCpContext context)
       throws IOException, InvalidInputException {
 
-    Path targetPath = options.getTargetPath();
+    Path targetPath = context.getTargetPath();
     FileSystem targetFS = targetPath.getFileSystem(getConf());
     boolean targetExists = false;
     boolean targetIsFile = false;
@@ -142,12 +142,12 @@ public class SimpleCopyListing extends CopyListing {
 
     //If target is a file, then source has to be single file
     if (targetIsFile) {
-      if (options.getSourcePaths().size() > 1) {
+      if (context.getSourcePaths().size() > 1) {
         throw new InvalidInputException("Multiple source being copied to a file: " +
             targetPath);
       }
 
-      Path srcPath = options.getSourcePaths().get(0);
+      Path srcPath = context.getSourcePaths().get(0);
       FileSystem sourceFS = srcPath.getFileSystem(getConf());
       if (!sourceFS.isFile(srcPath)) {
         throw new InvalidInputException("Cannot copy " + srcPath +
@@ -155,12 +155,12 @@ public class SimpleCopyListing extends CopyListing {
       }
     }
 
-    if (options.shouldAtomicCommit() && targetExists) {
+    if (context.shouldAtomicCommit() && targetExists) {
       throw new InvalidInputException("Target path for atomic-commit already exists: " +
         targetPath + ". Cannot atomic-commit to pre-existing target-path.");
     }
 
-    for (Path path: options.getSourcePaths()) {
+    for (Path path: context.getSourcePaths()) {
       FileSystem fs = path.getFileSystem(getConf());
       if (!fs.exists(path)) {
         throw new InvalidInputException(path + " doesn't exist");
@@ -184,7 +184,7 @@ public class SimpleCopyListing extends CopyListing {
     }
 
     if (targetIsReservedRaw) {
-      options.preserveRawXattrs();
+      context.setPreserveRawXattrs(true);
       getConf().setBoolean(DistCpConstants.CONF_LABEL_PRESERVE_RAWXATTRS, true);
     }
 
@@ -194,18 +194,19 @@ public class SimpleCopyListing extends CopyListing {
      */
     Credentials credentials = getCredentials();
     if (credentials != null) {
-      Path[] inputPaths = options.getSourcePaths().toArray(new Path[1]);
+      Path[] inputPaths = context.getSourcePaths()
+          .toArray(new Path[1]);
       TokenCache.obtainTokensForNamenodes(credentials, inputPaths, getConf());
     }
   }
 
   @Override
   protected void doBuildListing(Path pathToListingFile,
-                                DistCpOptions options) throws IOException {
-    if(options.shouldUseSnapshotDiff()) {
-      doBuildListingWithSnapshotDiff(getWriter(pathToListingFile), options);
-    }else {
-      doBuildListing(getWriter(pathToListingFile), options);
+                                DistCpContext context) throws IOException {
+    if (context.shouldUseSnapshotDiff()) {
+      doBuildListingWithSnapshotDiff(getWriter(pathToListingFile), context);
+    } else {
+      doBuildListing(getWriter(pathToListingFile), context);
     }
   }
 
@@ -232,22 +233,22 @@ public class SimpleCopyListing extends CopyListing {
    * @throws IOException
    */
   private void addToFileListing(SequenceFile.Writer fileListWriter,
-      Path sourceRoot, Path path, DistCpOptions options) throws IOException {
+      Path sourceRoot, Path path, DistCpContext context) throws IOException {
     sourceRoot = getPathWithSchemeAndAuthority(sourceRoot);
     path = getPathWithSchemeAndAuthority(path);
     path = makeQualified(path);
 
     FileSystem sourceFS = sourceRoot.getFileSystem(getConf());
     FileStatus fileStatus = sourceFS.getFileStatus(path);
-    final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
-    final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
-    final boolean preserveRawXAttrs = options.shouldPreserveRawXattrs();
+    final boolean preserveAcls = context.shouldPreserve(FileAttribute.ACL);
+    final boolean preserveXAttrs = context.shouldPreserve(FileAttribute.XATTR);
+    final boolean preserveRawXAttrs = context.shouldPreserveRawXattrs();
     LinkedList<CopyListingFileStatus> fileCopyListingStatus =
         DistCpUtils.toCopyListingFileStatus(sourceFS, fileStatus,
             preserveAcls, preserveXAttrs, preserveRawXAttrs,
-            options.getBlocksPerChunk());
+            context.getBlocksPerChunk());
     writeToFileListingRoot(fileListWriter, fileCopyListingStatus,
-        sourceRoot, options);
+        sourceRoot, context);
   }
 
   /**
@@ -258,14 +259,16 @@ public class SimpleCopyListing extends CopyListing {
    * {@link org.apache.hadoop.tools.DistCpSync#sync}. An item can be
    * created/modified and renamed, in which case, the target path is put
    * into the list.
+   * @param fileListWriter the list for holding processed results
+   * @param context The DistCp context with associated input options
    * @throws IOException
    */
   @VisibleForTesting
   protected void doBuildListingWithSnapshotDiff(
-      SequenceFile.Writer fileListWriter, DistCpOptions options)
+      SequenceFile.Writer fileListWriter, DistCpContext context)
       throws IOException {
     ArrayList<DiffInfo> diffList = distCpSync.prepareDiffListForCopyListing();
-    Path sourceRoot = options.getSourcePaths().get(0);
+    Path sourceRoot = context.getSourcePaths().get(0);
     FileSystem sourceFS = sourceRoot.getFileSystem(getConf());
 
     try {
@@ -273,13 +276,13 @@ public class SimpleCopyListing extends CopyListing {
       for (DiffInfo diff : diffList) {
         // add snapshot paths prefix
         diff.setTarget(
-            new Path(options.getSourcePaths().get(0), diff.getTarget()));
+            new Path(context.getSourcePaths().get(0), diff.getTarget()));
         if (diff.getType() == SnapshotDiffReport.DiffType.MODIFY) {
           addToFileListing(fileListWriter,
-              sourceRoot, diff.getTarget(), options);
+              sourceRoot, diff.getTarget(), context);
         } else if (diff.getType() == SnapshotDiffReport.DiffType.CREATE) {
           addToFileListing(fileListWriter,
-              sourceRoot, diff.getTarget(), options);
+              sourceRoot, diff.getTarget(), context);
 
           FileStatus sourceStatus = sourceFS.getFileStatus(diff.getTarget());
           if (sourceStatus.isDirectory()) {
@@ -290,13 +293,13 @@ public class SimpleCopyListing extends CopyListing {
 
             HashSet<String> excludeList =
                 distCpSync.getTraverseExcludeList(diff.getSource(),
-                    options.getSourcePaths().get(0));
+                    context.getSourcePaths().get(0));
 
             ArrayList<FileStatus> sourceDirs = new ArrayList<>();
             sourceDirs.add(sourceStatus);
 
             traverseDirectory(fileListWriter, sourceFS, sourceDirs,
-                sourceRoot, options, excludeList, fileStatuses);
+                sourceRoot, context, excludeList, fileStatuses);
           }
         }
       }
@@ -325,27 +328,30 @@ public class SimpleCopyListing extends CopyListing {
    * See computeSourceRootPath method for how the root path of the source is
    *     computed.
    * @param fileListWriter
-   * @param options
+   * @param context The distcp context with associated input options
    * @throws IOException
    */
   @VisibleForTesting
   protected void doBuildListing(SequenceFile.Writer fileListWriter,
-      DistCpOptions options) throws IOException {
-    if (options.getNumListstatusThreads() > 0) {
-      numListstatusThreads = options.getNumListstatusThreads();
+      DistCpContext context) throws IOException {
+    if (context.getNumListstatusThreads() > 0) {
+      numListstatusThreads = context.getNumListstatusThreads();
     }
 
     try {
       List<FileStatusInfo> statusList = Lists.newArrayList();
-      for (Path path: options.getSourcePaths()) {
+      for (Path path: context.getSourcePaths()) {
         FileSystem sourceFS = path.getFileSystem(getConf());
-        final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
-        final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
-        final boolean preserveRawXAttrs = options.shouldPreserveRawXattrs();
+        final boolean preserveAcls =
+            context.shouldPreserve(FileAttribute.ACL);
+        final boolean preserveXAttrs =
+            context.shouldPreserve(FileAttribute.XATTR);
+        final boolean preserveRawXAttrs =
+            context.shouldPreserveRawXattrs();
         path = makeQualified(path);
 
         FileStatus rootStatus = sourceFS.getFileStatus(path);
-        Path sourcePathRoot = computeSourceRootPath(rootStatus, options);
+        Path sourcePathRoot = computeSourceRootPath(rootStatus, context);
 
         FileStatus[] sourceFiles = sourceFS.listStatus(path);
         boolean explore = (sourceFiles != null && sourceFiles.length > 0);
@@ -353,9 +359,9 @@ public class SimpleCopyListing extends CopyListing {
           LinkedList<CopyListingFileStatus> rootCopyListingStatus =
               DistCpUtils.toCopyListingFileStatus(sourceFS, rootStatus,
                   preserveAcls, preserveXAttrs, preserveRawXAttrs,
-                  options.getBlocksPerChunk());
+                  context.getBlocksPerChunk());
           writeToFileListingRoot(fileListWriter, rootCopyListingStatus,
-              sourcePathRoot, options);
+              sourcePathRoot, context);
         }
         if (explore) {
           ArrayList<FileStatus> sourceDirs = new ArrayList<FileStatus>();
@@ -368,7 +374,7 @@ public class SimpleCopyListing extends CopyListing {
                     preserveAcls && sourceStatus.isDirectory(),
                     preserveXAttrs && sourceStatus.isDirectory(),
                     preserveRawXAttrs && sourceStatus.isDirectory(),
-                    options.getBlocksPerChunk());
+                    context.getBlocksPerChunk());
             for (CopyListingFileStatus fs : sourceCopyListingStatus) {
               if (randomizeFileListing) {
                 addToFileListing(statusList,
@@ -385,7 +391,7 @@ public class SimpleCopyListing extends CopyListing {
             }
           }
           traverseDirectory(fileListWriter, sourceFS, sourceDirs,
-              sourcePathRoot, options, null, statusList);
+              sourcePathRoot, context, null, statusList);
         }
       }
       if (randomizeFileListing) {
@@ -447,13 +453,13 @@ public class SimpleCopyListing extends CopyListing {
   }
 
   private Path computeSourceRootPath(FileStatus sourceStatus,
-                                     DistCpOptions options) throws IOException {
+      DistCpContext context) throws IOException {
 
-    Path target = options.getTargetPath();
+    Path target = context.getTargetPath();
     FileSystem targetFS = target.getFileSystem(getConf());
-    final boolean targetPathExists = options.getTargetPathExists();
+    final boolean targetPathExists = context.isTargetPathExists();
 
-    boolean solitaryFile = options.getSourcePaths().size() == 1
+    boolean solitaryFile = context.getSourcePaths().size() == 1
                                                 && !sourceStatus.isDirectory();
 
     if (solitaryFile) {
@@ -463,8 +469,11 @@ public class SimpleCopyListing extends CopyListing {
         return sourceStatus.getPath().getParent();
       }
     } else {
-      boolean specialHandling = (options.getSourcePaths().size() == 1 && !targetPathExists) ||
-          options.shouldSyncFolder() || options.shouldOverwrite();
+      boolean specialHandling =
+          (context.getSourcePaths().size() == 1 &&
+              !targetPathExists) ||
+              context.shouldSyncFolder() ||
+              context.shouldOverwrite();
 
       if ((specialHandling && sourceStatus.isDirectory()) ||
           sourceStatus.getPath().isRoot()) {
@@ -610,13 +619,13 @@ public class SimpleCopyListing extends CopyListing {
                                  FileSystem sourceFS,
                                  ArrayList<FileStatus> sourceDirs,
                                  Path sourcePathRoot,
-                                 DistCpOptions options,
+                                 DistCpContext context,
                                  HashSet<String> excludeList,
                                  List<FileStatusInfo> fileStatuses)
                                  throws IOException {
-    final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
-    final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
-    final boolean preserveRawXattrs = options.shouldPreserveRawXattrs();
+    final boolean preserveAcls = context.shouldPreserve(FileAttribute.ACL);
+    final boolean preserveXAttrs = context.shouldPreserve(FileAttribute.XATTR);
+    final boolean preserveRawXattrs = context.shouldPreserveRawXattrs();
 
     assert numListstatusThreads > 0;
     if (LOG.isDebugEnabled()) {
@@ -649,7 +658,7 @@ public class SimpleCopyListing extends CopyListing {
                 preserveAcls && child.isDirectory(),
                 preserveXAttrs && child.isDirectory(),
                 preserveRawXattrs && child.isDirectory(),
-                options.getBlocksPerChunk());
+                context.getBlocksPerChunk());
 
             for (CopyListingFileStatus fs : childCopyListingStatus) {
               if (randomizeFileListing) {
@@ -681,9 +690,9 @@ public class SimpleCopyListing extends CopyListing {
 
   private void writeToFileListingRoot(SequenceFile.Writer fileListWriter,
       LinkedList<CopyListingFileStatus> fileStatus, Path sourcePathRoot,
-      DistCpOptions options) throws IOException {
-    boolean syncOrOverwrite = options.shouldSyncFolder() ||
-        options.shouldOverwrite();
+      DistCpContext context) throws IOException {
+    boolean syncOrOverwrite = context.shouldSyncFolder() ||
+        context.shouldOverwrite();
     for (CopyListingFileStatus fs : fileStatus) {
       if (fs.getPath().equals(sourcePathRoot) &&
           fs.isDirectory() && syncOrOverwrite) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
index 6ddaab9..81c2be7 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.tools.CopyListing;
 import org.apache.hadoop.tools.CopyListingFileStatus;
 import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
+import org.apache.hadoop.tools.DistCpContext;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.GlobbedCopyListing;
@@ -354,16 +355,18 @@ public class CopyCommitter extends FileOutputCommitter {
     Path resultNonePath = Path.getPathWithoutSchemeAndAuthority(targetFinalPath)
         .toString().startsWith(DistCpConstants.HDFS_RESERVED_RAW_DIRECTORY_NAME)
         ? DistCpConstants.RAW_NONE_PATH : DistCpConstants.NONE_PATH;
-    DistCpOptions options = new DistCpOptions(targets, resultNonePath);
     //
     // Set up options to be the same from the CopyListing.buildListing's perspective,
     // so to collect similar listings as when doing the copy
     //
-    options.setOverwrite(overwrite);
-    options.setSyncFolder(syncFolder);
-    options.setTargetPathExists(targetPathExists);
-    
-    target.buildListing(targetListing, options);
+    DistCpOptions options = new DistCpOptions.Builder(targets, resultNonePath)
+        .withOverwrite(overwrite)
+        .withSyncFolder(syncFolder)
+        .build();
+    DistCpContext distCpContext = new DistCpContext(options);
+    distCpContext.setTargetPathExists(targetPathExists);
+
+    target.buildListing(targetListing, distCpContext);
     Path sortedTargetListing = DistCpUtils.sortListing(clusterFS, conf, targetListing);
     long totalLen = clusterFS.getFileStatus(sortedTargetListing).getLen();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
index e315b84..dbe750a 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.tools.CopyListing.AclsNotSupportedException;
 import org.apache.hadoop.tools.CopyListing.XAttrsNotSupportedException;
 import org.apache.hadoop.tools.CopyListingFileStatus;
-import org.apache.hadoop.tools.DistCpOptions;
+import org.apache.hadoop.tools.DistCpContext;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.mapred.UniformSizeInputFormat;
 import org.apache.hadoop.util.StringUtils;
@@ -116,13 +116,13 @@ public class DistCpUtils {
    * a particular strategy from distcp-default.xml
    *
    * @param conf - Configuration object
-   * @param options - Handle to input options
+   * @param context - Distcp context with associated input options
    * @return Class implementing the strategy specified in options.
    */
   public static Class<? extends InputFormat> getStrategy(Configuration conf,
-      DistCpOptions options) {
+      DistCpContext context) {
     String confLabel = "distcp."
-        + StringUtils.toLowerCase(options.getCopyStrategy())
+        + StringUtils.toLowerCase(context.getCopyStrategy())
         + ".strategy" + ".impl";
     return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
index ea63e23..97a6f62 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestCopyListing.java
@@ -103,20 +103,19 @@ public class TestCopyListing extends SimpleCopyListing {
       List<Path> srcPaths = new ArrayList<Path>();
       srcPaths.add(new Path("/tmp/in/1"));
       srcPaths.add(new Path("/tmp/in/2"));
-      Path target = new Path("/tmp/out/1");
+      final Path target = new Path("/tmp/out/1");
       TestDistCpUtils.createFile(fs, "/tmp/in/1");
       TestDistCpUtils.createFile(fs, "/tmp/in/2");
       fs.mkdirs(target);
-      DistCpOptions options = new DistCpOptions(srcPaths, target);
-      validatePaths(options);
+      final DistCpOptions options = new DistCpOptions.Builder(srcPaths, target)
+          .build();
+      validatePaths(new DistCpContext(options));
       TestDistCpUtils.delete(fs, "/tmp");
       //No errors
 
-      target = new Path("/tmp/out/1");
       fs.create(target).close();
-      options = new DistCpOptions(srcPaths, target);
       try {
-        validatePaths(options);
+        validatePaths(new DistCpContext(options));
         Assert.fail("Invalid inputs accepted");
       } catch (InvalidInputException ignore) { }
       TestDistCpUtils.delete(fs, "/tmp");
@@ -124,11 +123,9 @@ public class TestCopyListing extends SimpleCopyListing {
       srcPaths.clear();
       srcPaths.add(new Path("/tmp/in/1"));
       fs.mkdirs(new Path("/tmp/in/1"));
-      target = new Path("/tmp/out/1");
       fs.create(target).close();
-      options = new DistCpOptions(srcPaths, target);
       try {
-        validatePaths(options);
+        validatePaths(new DistCpContext(options));
         Assert.fail("Invalid inputs accepted");
       } catch (InvalidInputException ignore) { }
       TestDistCpUtils.delete(fs, "/tmp");
@@ -151,10 +148,13 @@ public class TestCopyListing extends SimpleCopyListing {
       TestDistCpUtils.createFile(fs, "/tmp/in/src2/1.txt");
       Path target = new Path("/tmp/out");
       Path listingFile = new Path("/tmp/list");
-      DistCpOptions options = new DistCpOptions(srcPaths, target);
-      CopyListing listing = CopyListing.getCopyListing(getConf(), CREDENTIALS, options);
+      final DistCpOptions options = new DistCpOptions.Builder(srcPaths, target)
+          .build();
+      final DistCpContext context = new DistCpContext(options);
+      CopyListing listing = CopyListing.getCopyListing(getConf(), CREDENTIALS,
+          context);
       try {
-        listing.buildListing(listingFile, options);
+        listing.buildListing(listingFile, context);
         Assert.fail("Duplicates not detected");
       } catch (DuplicateFileException ignore) {
       }
@@ -196,11 +196,12 @@ public class TestCopyListing extends SimpleCopyListing {
 
       Path listingFile = new Path("/tmp/file");
 
-      DistCpOptions options = new DistCpOptions(srcPaths, target);
-      options.setSyncFolder(true);
+      final DistCpOptions options = new DistCpOptions.Builder(srcPaths, target)
+          .withSyncFolder(true)
+          .build();
       CopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS);
       try {
-        listing.buildListing(listingFile, options);
+        listing.buildListing(listingFile, new DistCpContext(options));
         Assert.fail("Duplicates not detected");
       } catch (DuplicateFileException ignore) {
       }
@@ -209,7 +210,7 @@ public class TestCopyListing extends SimpleCopyListing {
       TestDistCpUtils.delete(fs, "/tmp");
 
       try {
-        listing.buildListing(listingFile, options);
+        listing.buildListing(listingFile, new DistCpContext(options));
         Assert.fail("Invalid input not detected");
       } catch (InvalidInputException ignore) {
       }
@@ -244,14 +245,14 @@ public class TestCopyListing extends SimpleCopyListing {
       }
 
       Path listingFile = new Path("/tmp/file");
-      DistCpOptions options = new DistCpOptions(srcPaths, target);
-      options.setSyncFolder(true);
+      final DistCpOptions options = new DistCpOptions.Builder(srcPaths, target)
+          .withSyncFolder(true).build();
 
       // Check without randomizing files
       getConf().setBoolean(
           DistCpConstants.CONF_LABEL_SIMPLE_LISTING_RANDOMIZE_FILES, false);
       SimpleCopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS);
-      listing.buildListing(listingFile, options);
+      listing.buildListing(listingFile, new DistCpContext(options));
 
       Assert.assertEquals(listing.getNumberOfPaths(), pathCount);
       validateFinalListing(listingFile, srcFiles);
@@ -265,7 +266,7 @@ public class TestCopyListing extends SimpleCopyListing {
       // Set the seed for randomness, so that it can be verified later
       long seed = System.nanoTime();
       listing.setSeedForRandomListing(seed);
-      listing.buildListing(listingFile, options);
+      listing.buildListing(listingFile, new DistCpContext(options));
       Assert.assertEquals(listing.getNumberOfPaths(), pathCount);
 
       // validate randomness
@@ -322,11 +323,12 @@ public class TestCopyListing extends SimpleCopyListing {
       List<Path> srcPaths = new ArrayList<Path>();
       srcPaths.add(sourceFile);
 
-      DistCpOptions options = new DistCpOptions(srcPaths, targetFile);
+      DistCpOptions options = new DistCpOptions.Builder(srcPaths, targetFile)
+          .build();
       CopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS);
 
       final Path listFile = new Path(testRoot, "/tmp/fileList.seq");
-      listing.buildListing(listFile, options);
+      listing.buildListing(listFile, new DistCpContext(options));
 
       reader = new SequenceFile.Reader(getConf(), SequenceFile.Reader.file(listFile));
 
@@ -359,10 +361,11 @@ public class TestCopyListing extends SimpleCopyListing {
     doThrow(expectedEx).when(writer).close();
     
     SimpleCopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS);
-    DistCpOptions options = new DistCpOptions(srcs, new Path(outFile.toURI()));
+    final DistCpOptions options = new DistCpOptions.Builder(srcs,
+        new Path(outFile.toURI())).build();
     Exception actualEx = null;
     try {
-      listing.doBuildListing(writer, options);
+      listing.doBuildListing(writer, new DistCpContext(options));
     } catch (Exception e) {
       actualEx = e;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpOptions.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpOptions.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpOptions.java
new file mode 100644
index 0000000..3525194
--- /dev/null
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpOptions.java
@@ -0,0 +1,500 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.tools;
+
+import java.util.Collections;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
+
+import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
+import static org.apache.hadoop.tools.DistCpOptions.MAX_NUM_LISTSTATUS_THREADS;
+import static org.junit.Assert.fail;
+
+/**
+ * This is to test constructing {@link DistCpOptions} manually with setters.
+ *
+ * The test cases in this class is very similar to the parser test, see
+ * {@link TestOptionsParser}.
+ */
+public class TestDistCpOptions {
+
+  private static final float DELTA = 0.001f;
+
+  @Test
+  public void testSetIgnoreFailure() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertFalse(builder.build().shouldIgnoreFailures());
+
+    builder.withIgnoreFailures(true);
+    Assert.assertTrue(builder.build().shouldIgnoreFailures());
+  }
+
+  @Test
+  public void testSetOverwrite() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertFalse(builder.build().shouldOverwrite());
+
+    builder.withOverwrite(true);
+    Assert.assertTrue(builder.build().shouldOverwrite());
+
+    try {
+      builder.withSyncFolder(true).build();
+      Assert.fail("Update and overwrite aren't allowed together");
+    } catch (IllegalArgumentException ignore) {
+    }
+  }
+
+  @Test
+  public void testLogPath() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertNull(builder.build().getLogPath());
+
+    final Path logPath = new Path("hdfs://localhost:8020/logs");
+    builder.withLogPath(logPath);
+    Assert.assertEquals(logPath, builder.build().getLogPath());
+  }
+
+  @Test
+  public void testSetBlokcing() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertTrue(builder.build().shouldBlock());
+
+    builder.withBlocking(false);
+    Assert.assertFalse(builder.build().shouldBlock());
+  }
+
+  @Test
+  public void testSetBandwidth() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertEquals(0, builder.build().getMapBandwidth(), DELTA);
+
+    builder.withMapBandwidth(11);
+    Assert.assertEquals(11, builder.build().getMapBandwidth(), DELTA);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testSetNonPositiveBandwidth() {
+    new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"))
+        .withMapBandwidth(-11)
+        .build();
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testSetZeroBandwidth() {
+    new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"))
+        .withMapBandwidth(0)
+        .build();
+  }
+
+  @Test
+  public void testSetSkipCRC() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertFalse(builder.build().shouldSkipCRC());
+
+    final DistCpOptions options = builder.withSyncFolder(true).withCRC(true)
+        .build();
+    Assert.assertTrue(options.shouldSyncFolder());
+    Assert.assertTrue(options.shouldSkipCRC());
+  }
+
+  @Test
+  public void testSetAtomicCommit() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertFalse(builder.build().shouldAtomicCommit());
+
+    builder.withAtomicCommit(true);
+    Assert.assertTrue(builder.build().shouldAtomicCommit());
+
+    try {
+      builder.withSyncFolder(true).build();
+      Assert.fail("Atomic and sync folders were mutually exclusive");
+    } catch (IllegalArgumentException ignore) {
+    }
+  }
+
+  @Test
+  public void testSetWorkPath() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertNull(builder.build().getAtomicWorkPath());
+
+    builder.withAtomicCommit(true);
+    Assert.assertNull(builder.build().getAtomicWorkPath());
+
+    final Path workPath = new Path("hdfs://localhost:8020/work");
+    builder.withAtomicWorkPath(workPath);
+    Assert.assertEquals(workPath, builder.build().getAtomicWorkPath());
+  }
+
+  @Test
+  public void testSetSyncFolders() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertFalse(builder.build().shouldSyncFolder());
+
+    builder.withSyncFolder(true);
+    Assert.assertTrue(builder.build().shouldSyncFolder());
+  }
+
+  @Test
+  public void testSetDeleteMissing() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertFalse(builder.build().shouldDeleteMissing());
+
+    DistCpOptions options = builder.withSyncFolder(true)
+        .withDeleteMissing(true)
+        .build();
+    Assert.assertTrue(options.shouldSyncFolder());
+    Assert.assertTrue(options.shouldDeleteMissing());
+
+    options = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"))
+        .withOverwrite(true)
+        .withDeleteMissing(true)
+        .build();
+    Assert.assertTrue(options.shouldOverwrite());
+    Assert.assertTrue(options.shouldDeleteMissing());
+
+    try {
+      new DistCpOptions.Builder(
+          Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+          new Path("hdfs://localhost:8020/target/"))
+          .withDeleteMissing(true)
+          .build();
+      fail("Delete missing should fail without update or overwrite options");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains("Delete missing is applicable only with update " +
+          "or overwrite options", e);
+    }
+    try {
+      new DistCpOptions.Builder(
+          new Path("hdfs://localhost:8020/source/first"),
+          new Path("hdfs://localhost:8020/target/"))
+          .withSyncFolder(true)
+          .withDeleteMissing(true)
+          .withUseDiff("s1", "s2")
+          .build();
+      fail("Should have failed as -delete and -diff are mutually exclusive.");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive.", e);
+    }
+  }
+
+  @Test
+  public void testSetMaps() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertEquals(DistCpConstants.DEFAULT_MAPS,
+        builder.build().getMaxMaps());
+
+    builder.maxMaps(1);
+    Assert.assertEquals(1, builder.build().getMaxMaps());
+
+    builder.maxMaps(0);
+    Assert.assertEquals(1, builder.build().getMaxMaps());
+  }
+
+  @Test
+  public void testSetNumListtatusThreads() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"));
+    // If command line argument isn't set, we expect .getNumListstatusThreads
+    // option to be zero (so that we know when to override conf properties).
+    Assert.assertEquals(0, builder.build().getNumListstatusThreads());
+
+    builder.withNumListstatusThreads(12);
+    Assert.assertEquals(12, builder.build().getNumListstatusThreads());
+
+    builder.withNumListstatusThreads(0);
+    Assert.assertEquals(0, builder.build().getNumListstatusThreads());
+
+    // Ignore large number of threads.
+    builder.withNumListstatusThreads(MAX_NUM_LISTSTATUS_THREADS * 2);
+    Assert.assertEquals(MAX_NUM_LISTSTATUS_THREADS,
+        builder.build().getNumListstatusThreads());
+  }
+
+  @Test
+  public void testSourceListing() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertEquals(new Path("hdfs://localhost:8020/source/first"),
+        builder.build().getSourceFileListing());
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testMissingTarget() {
+    new DistCpOptions.Builder(new Path("hdfs://localhost:8020/source/first"),
+        null);
+  }
+
+  @Test
+  public void testToString() {
+    DistCpOptions option = new DistCpOptions.Builder(new Path("abc"),
+        new Path("xyz")).build();
+    String val = "DistCpOptions{atomicCommit=false, syncFolder=false, " +
+        "deleteMissing=false, ignoreFailures=false, overwrite=false, " +
+        "append=false, useDiff=false, useRdiff=false, " +
+        "fromSnapshot=null, toSnapshot=null, " +
+        "skipCRC=false, blocking=true, numListstatusThreads=0, maxMaps=20, " +
+        "mapBandwidth=0.0, copyStrategy='uniformsize', preserveStatus=[], " +
+        "atomicWorkPath=null, logPath=null, sourceFileListing=abc, " +
+        "sourcePaths=null, targetPath=xyz, filtersFile='null'," +
+        " blocksPerChunk=0}";
+    String optionString = option.toString();
+    Assert.assertEquals(val, optionString);
+    Assert.assertNotSame(DistCpOptionSwitch.ATOMIC_COMMIT.toString(),
+        DistCpOptionSwitch.ATOMIC_COMMIT.name());
+  }
+
+  @Test
+  public void testCopyStrategy() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertEquals(DistCpConstants.UNIFORMSIZE,
+        builder.build().getCopyStrategy());
+    builder.withCopyStrategy("dynamic");
+    Assert.assertEquals("dynamic", builder.build().getCopyStrategy());
+  }
+
+  @Test
+  public void testTargetPath() {
+    final DistCpOptions options = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/")).build();
+    Assert.assertEquals(new Path("hdfs://localhost:8020/target/"),
+        options.getTargetPath());
+  }
+
+  @Test
+  public void testPreserve() {
+    DistCpOptions options = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"))
+        .build();
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.BLOCKSIZE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.REPLICATION));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.USER));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.GROUP));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+
+    options = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"))
+        .preserve(FileAttribute.ACL)
+        .build();
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.BLOCKSIZE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.REPLICATION));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.USER));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.GROUP));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.ACL));
+
+    options = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"))
+        .preserve(FileAttribute.BLOCKSIZE)
+        .preserve(FileAttribute.REPLICATION)
+        .preserve(FileAttribute.PERMISSION)
+        .preserve(FileAttribute.USER)
+        .preserve(FileAttribute.GROUP)
+        .preserve(FileAttribute.CHECKSUMTYPE)
+        .build();
+
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.BLOCKSIZE));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.REPLICATION));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.USER));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP));
+    Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE));
+    Assert.assertFalse(options.shouldPreserve(FileAttribute.XATTR));
+  }
+
+  @Test
+  public void testAppendOption() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+        new Path("hdfs://localhost:8020/target/"))
+        .withSyncFolder(true)
+        .withAppend(true);
+    Assert.assertTrue(builder.build().shouldAppend());
+
+    try {
+      // make sure -append is only valid when -update is specified
+      new DistCpOptions.Builder(
+          Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+          new Path("hdfs://localhost:8020/target/"))
+          .withAppend(true)
+          .build();
+      fail("Append should fail if update option is not specified");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "Append is valid only with update options", e);
+    }
+
+    try {
+      // make sure -append is invalid when skipCrc is specified
+      new DistCpOptions.Builder(
+          Collections.singletonList(new Path("hdfs://localhost:8020/source")),
+          new Path("hdfs://localhost:8020/target/"))
+          .withSyncFolder(true)
+          .withAppend(true)
+          .withCRC(true)
+          .build();
+      fail("Append should fail if skipCrc option is specified");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "Append is disallowed when skipping CRC", e);
+    }
+  }
+
+  @Test
+  public void testDiffOption() {
+    DistCpOptions options = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"))
+        .withSyncFolder(true)
+        .withUseDiff("s1", "s2")
+        .build();
+    Assert.assertTrue(options.shouldUseDiff());
+    Assert.assertEquals("s1", options.getFromSnapshot());
+    Assert.assertEquals("s2", options.getToSnapshot());
+
+    options = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"))
+        .withSyncFolder(true)
+        .withUseDiff("s1", ".")
+        .build();
+    Assert.assertTrue(options.shouldUseDiff());
+    Assert.assertEquals("s1", options.getFromSnapshot());
+    Assert.assertEquals(".", options.getToSnapshot());
+
+    // make sure -diff is only valid when -update is specified
+    try {
+      new DistCpOptions.Builder(new Path("hdfs://localhost:8020/source/first"),
+          new Path("hdfs://localhost:8020/target/"))
+          .withUseDiff("s1", "s2")
+          .build();
+      fail("-diff should fail if -update option is not specified");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "-diff/-rdiff is valid only with -update option", e);
+    }
+
+    try {
+      new DistCpOptions.Builder(
+          new Path("hdfs://localhost:8020/source/first"),
+          new Path("hdfs://localhost:8020/target/"))
+          .withSyncFolder(true)
+          .withUseDiff("s1", "s2")
+          .withDeleteMissing(true)
+          .build();
+      fail("Should fail as -delete and -diff/-rdiff are mutually exclusive.");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive.", e);
+    }
+
+    try {
+      new DistCpOptions.Builder(new Path("hdfs://localhost:8020/source/first"),
+          new Path("hdfs://localhost:8020/target/"))
+          .withUseDiff("s1", "s2")
+          .withDeleteMissing(true)
+          .build();
+      fail("-diff should fail if -update option is not specified");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive.", e);
+    }
+
+    try {
+      new DistCpOptions.Builder(new Path("hdfs://localhost:8020/source/first"),
+          new Path("hdfs://localhost:8020/target/"))
+          .withDeleteMissing(true)
+          .withUseDiff("s1", "s2")
+          .build();
+      fail("Should have failed as -delete and -diff are mutually exclusive");
+    } catch (IllegalArgumentException e) {
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive", e);
+    }
+  }
+
+  @Test
+  public void testExclusionsOption() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/first"),
+        new Path("hdfs://localhost:8020/target/"));
+    Assert.assertNull(builder.build().getFiltersFile());
+
+    builder.withFiltersFile("/tmp/filters.txt");
+    Assert.assertEquals("/tmp/filters.txt", builder.build().getFiltersFile());
+  }
+
+  @Test
+  public void testSetOptionsForSplitLargeFile() {
+    final DistCpOptions.Builder builder = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:8020/source/"),
+        new Path("hdfs://localhost:8020/target/"))
+        .withAppend(true)
+        .withSyncFolder(true);
+    Assert.assertFalse(builder.build().shouldPreserve(FileAttribute.BLOCKSIZE));
+    Assert.assertTrue(builder.build().shouldAppend());
+
+    builder.withBlocksPerChunk(5440);
+    Assert.assertTrue(builder.build().shouldPreserve(FileAttribute.BLOCKSIZE));
+    Assert.assertFalse(builder.build().shouldAppend());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSync.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSync.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSync.java
index 94e8604..717b2f0 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSync.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSync.java
@@ -39,7 +39,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -47,7 +47,7 @@ public class TestDistCpSync {
   private MiniDFSCluster cluster;
   private final Configuration conf = new HdfsConfiguration();
   private DistributedFileSystem dfs;
-  private DistCpOptions options;
+  private DistCpContext context;
   private final Path source = new Path("/source");
   private final Path target = new Path("/target");
   private final long BLOCK_SIZE = 1024;
@@ -62,10 +62,13 @@ public class TestDistCpSync {
     dfs.mkdirs(source);
     dfs.mkdirs(target);
 
-    options = new DistCpOptions(Arrays.asList(source), target);
-    options.setSyncFolder(true);
-    options.setUseDiff("s1", "s2");
+    final DistCpOptions options = new DistCpOptions.Builder(
+        Collections.singletonList(source), target)
+        .withSyncFolder(true)
+        .withUseDiff("s1", "s2")
+        .build();
     options.appendToConf(conf);
+    context = new DistCpContext(options);
 
     conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, target.toString());
     conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, target.toString());
@@ -92,34 +95,34 @@ public class TestDistCpSync {
     // make sure the source path has been updated to the snapshot path
     final Path spath = new Path(source,
         HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s2");
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, context.getSourcePaths().get(0));
 
     // reset source path in options
-    options.setSourcePaths(Arrays.asList(source));
+    context.setSourcePaths(Collections.singletonList(source));
     // the source/target does not have the given snapshots
     dfs.allowSnapshot(source);
     dfs.allowSnapshot(target);
     Assert.assertFalse(sync());
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, context.getSourcePaths().get(0));
 
     // reset source path in options
-    options.setSourcePaths(Arrays.asList(source));
+    context.setSourcePaths(Collections.singletonList(source));
     dfs.createSnapshot(source, "s1");
     dfs.createSnapshot(source, "s2");
     dfs.createSnapshot(target, "s1");
     Assert.assertTrue(sync());
 
     // reset source paths in options
-    options.setSourcePaths(Arrays.asList(source));
+    context.setSourcePaths(Collections.singletonList(source));
     // changes have been made in target
     final Path subTarget = new Path(target, "sub");
     dfs.mkdirs(subTarget);
     Assert.assertFalse(sync());
     // make sure the source path has been updated to the snapshot path
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, context.getSourcePaths().get(0));
 
     // reset source paths in options
-    options.setSourcePaths(Arrays.asList(source));
+    context.setSourcePaths(Collections.singletonList(source));
     dfs.delete(subTarget, true);
     Assert.assertTrue(sync());
   }
@@ -137,7 +140,7 @@ public class TestDistCpSync {
   }
 
   private boolean sync() throws Exception {
-    DistCpSync distCpSync = new DistCpSync(options, conf);
+    DistCpSync distCpSync = new DistCpSync(context, conf);
     return distCpSync.sync();
   }
 
@@ -231,7 +234,7 @@ public class TestDistCpSync {
     SnapshotDiffReport report = dfs.getSnapshotDiffReport(source, "s1", "s2");
     System.out.println(report);
 
-    DistCpSync distCpSync = new DistCpSync(options, conf);
+    DistCpSync distCpSync = new DistCpSync(context, conf);
 
     // do the sync
     Assert.assertTrue(distCpSync.sync());
@@ -239,24 +242,24 @@ public class TestDistCpSync {
     // make sure the source path has been updated to the snapshot path
     final Path spath = new Path(source,
             HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s2");
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, context.getSourcePaths().get(0));
 
     // build copy listing
     final Path listingPath = new Path("/tmp/META/fileList.seq");
     CopyListing listing = new SimpleCopyListing(conf, new Credentials(), distCpSync);
-    listing.buildListing(listingPath, options);
+    listing.buildListing(listingPath, context);
 
     Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
     CopyMapper copyMapper = new CopyMapper();
     StubContext stubContext = new StubContext(conf, null, 0);
-    Mapper<Text, CopyListingFileStatus, Text, Text>.Context context =
+    Mapper<Text, CopyListingFileStatus, Text, Text>.Context mapContext =
         stubContext.getContext();
     // Enable append
-    context.getConfiguration().setBoolean(
+    mapContext.getConfiguration().setBoolean(
         DistCpOptionSwitch.APPEND.getConfigLabel(), true);
-    copyMapper.setup(context);
+    copyMapper.setup(mapContext);
     for (Map.Entry<Text, CopyListingFileStatus> entry : copyListing.entrySet()) {
-      copyMapper.map(entry.getKey(), entry.getValue(), context);
+      copyMapper.map(entry.getKey(), entry.getValue(), mapContext);
     }
 
     // verify that we only list modified and created files/directories
@@ -312,7 +315,12 @@ public class TestDistCpSync {
    */
   @Test
   public void testSyncWithCurrent() throws Exception {
-    options.setUseDiff("s1", ".");
+    final DistCpOptions options = new DistCpOptions.Builder(
+        Collections.singletonList(source), target)
+        .withSyncFolder(true)
+        .withUseDiff("s1", ".")
+        .build();
+    context = new DistCpContext(options);
     initData(source);
     initData(target);
     enableAndCreateFirstSnapshot();
@@ -323,7 +331,7 @@ public class TestDistCpSync {
     // do the sync
     sync();
     // make sure the source path is still unchanged
-    Assert.assertEquals(source, options.getSourcePaths().get(0));
+    Assert.assertEquals(source, context.getSourcePaths().get(0));
   }
 
   private void initData2(Path dir) throws Exception {
@@ -501,32 +509,32 @@ public class TestDistCpSync {
     SnapshotDiffReport report = dfs.getSnapshotDiffReport(source, "s1", "s2");
     System.out.println(report);
 
-    DistCpSync distCpSync = new DistCpSync(options, conf);
+    DistCpSync distCpSync = new DistCpSync(context, conf);
     // do the sync
     Assert.assertTrue(distCpSync.sync());
 
     // make sure the source path has been updated to the snapshot path
     final Path spath = new Path(source,
             HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s2");
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, context.getSourcePaths().get(0));
 
     // build copy listing
     final Path listingPath = new Path("/tmp/META/fileList.seq");
     CopyListing listing = new SimpleCopyListing(conf, new Credentials(), distCpSync);
-    listing.buildListing(listingPath, options);
+    listing.buildListing(listingPath, context);
 
     Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
     CopyMapper copyMapper = new CopyMapper();
     StubContext stubContext = new StubContext(conf, null, 0);
-    Mapper<Text, CopyListingFileStatus, Text, Text>.Context context =
+    Mapper<Text, CopyListingFileStatus, Text, Text>.Context mapContext =
             stubContext.getContext();
     // Enable append
-    context.getConfiguration().setBoolean(
+    mapContext.getConfiguration().setBoolean(
             DistCpOptionSwitch.APPEND.getConfigLabel(), true);
-    copyMapper.setup(context);
+    copyMapper.setup(mapContext);
     for (Map.Entry<Text, CopyListingFileStatus> entry :
             copyListing.entrySet()) {
-      copyMapper.map(entry.getKey(), entry.getValue(), context);
+      copyMapper.map(entry.getKey(), entry.getValue(), mapContext);
     }
 
     // verify that we only list modified and created files/directories
@@ -729,7 +737,7 @@ public class TestDistCpSync {
 
     boolean threwException = false;
     try {
-      DistCpSync distCpSync = new DistCpSync(options, conf);
+      DistCpSync distCpSync = new DistCpSync(context, conf);
       // do the sync
       distCpSync.sync();
     } catch (HadoopIllegalArgumentException e) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSyncReverseBase.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSyncReverseBase.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSyncReverseBase.java
index fea374e..cca1c53 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSyncReverseBase.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpSyncReverseBase.java
@@ -56,7 +56,8 @@ public abstract class TestDistCpSyncReverseBase {
   private MiniDFSCluster cluster;
   private final Configuration conf = new HdfsConfiguration();
   private DistributedFileSystem dfs;
-  private DistCpOptions options;
+  private DistCpOptions.Builder optionsBuilder;
+  private DistCpContext distCpContext;
   private Path source;
   private boolean isSrcNotSameAsTgt = true;
   private final Path target = new Path("/target");
@@ -139,10 +140,12 @@ public abstract class TestDistCpSyncReverseBase {
     }
     dfs.mkdirs(target);
 
-    options = new DistCpOptions(Arrays.asList(source), target);
-    options.setSyncFolder(true);
-    options.setUseRdiff("s2", "s1");
+    optionsBuilder = new DistCpOptions.Builder(Arrays.asList(source), target)
+        .withSyncFolder(true)
+        .withUseRdiff("s2", "s1");
+    final DistCpOptions options = optionsBuilder.build();
     options.appendToConf(conf);
+    distCpContext = new DistCpContext(options);
 
     conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, target.toString());
     conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, target.toString());
@@ -169,33 +172,33 @@ public abstract class TestDistCpSyncReverseBase {
     // make sure the source path has been updated to the snapshot path
     final Path spath = new Path(source,
         HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s1");
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, distCpContext.getSourcePaths().get(0));
 
     // reset source path in options
-    options.setSourcePaths(Arrays.asList(source));
+    optionsBuilder.withSourcePaths(Arrays.asList(source));
     // the source/target does not have the given snapshots
     dfs.allowSnapshot(source);
     dfs.allowSnapshot(target);
     Assert.assertFalse(sync());
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, distCpContext.getSourcePaths().get(0));
 
     // reset source path in options
-    options.setSourcePaths(Arrays.asList(source));
+    optionsBuilder.withSourcePaths(Arrays.asList(source));
     this.enableAndCreateFirstSnapshot();
     dfs.createSnapshot(target, "s2");
     Assert.assertTrue(sync());
 
     // reset source paths in options
-    options.setSourcePaths(Arrays.asList(source));
+    optionsBuilder.withSourcePaths(Arrays.asList(source));
     // changes have been made in target
     final Path subTarget = new Path(target, "sub");
     dfs.mkdirs(subTarget);
     Assert.assertFalse(sync());
     // make sure the source path has been updated to the snapshot path
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, distCpContext.getSourcePaths().get(0));
 
     // reset source paths in options
-    options.setSourcePaths(Arrays.asList(source));
+    optionsBuilder.withSourcePaths(Arrays.asList(source));
     dfs.delete(subTarget, true);
     Assert.assertTrue(sync());
   }
@@ -215,7 +218,8 @@ public abstract class TestDistCpSyncReverseBase {
   }
 
   private boolean sync() throws Exception {
-    DistCpSync distCpSync = new DistCpSync(options, conf);
+    distCpContext = new DistCpContext(optionsBuilder.build());
+    final DistCpSync distCpSync = new DistCpSync(distCpContext, conf);
     return distCpSync.sync();
   }
 
@@ -328,7 +332,7 @@ public abstract class TestDistCpSyncReverseBase {
     SnapshotDiffReport report = dfs.getSnapshotDiffReport(target, "s2", "s1");
     System.out.println(report);
 
-    DistCpSync distCpSync = new DistCpSync(options, conf);
+    final DistCpSync distCpSync = new DistCpSync(distCpContext, conf);
 
     lsr("Before sync target: ", shell, target);
 
@@ -340,13 +344,13 @@ public abstract class TestDistCpSyncReverseBase {
     // make sure the source path has been updated to the snapshot path
     final Path spath = new Path(source,
         HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s1");
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, distCpContext.getSourcePaths().get(0));
 
     // build copy listing
     final Path listingPath = new Path("/tmp/META/fileList.seq");
     CopyListing listing = new SimpleCopyListing(conf, new Credentials(),
         distCpSync);
-    listing.buildListing(listingPath, options);
+    listing.buildListing(listingPath, distCpContext);
 
     Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
     CopyMapper copyMapper = new CopyMapper();
@@ -425,7 +429,7 @@ public abstract class TestDistCpSyncReverseBase {
    */
   @Test
   public void testSyncWithCurrent() throws Exception {
-    options.setUseRdiff(".", "s1");
+    optionsBuilder.withUseRdiff(".", "s1");
     if (isSrcNotSameAsTgt) {
       initData(source);
     }
@@ -440,7 +444,7 @@ public abstract class TestDistCpSyncReverseBase {
     final Path spath = new Path(source,
         HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s1");
     // make sure the source path is still unchanged
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, distCpContext.getSourcePaths().get(0));
   }
 
   private void initData2(Path dir) throws Exception {
@@ -649,7 +653,7 @@ public abstract class TestDistCpSyncReverseBase {
     lsrSource("Before sync source: ", shell, source);
     lsr("Before sync target: ", shell, target);
 
-    DistCpSync distCpSync = new DistCpSync(options, conf);
+    DistCpSync distCpSync = new DistCpSync(distCpContext, conf);
     // do the sync
     distCpSync.sync();
 
@@ -658,12 +662,12 @@ public abstract class TestDistCpSyncReverseBase {
     // make sure the source path has been updated to the snapshot path
     final Path spath = new Path(source,
             HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s1");
-    Assert.assertEquals(spath, options.getSourcePaths().get(0));
+    Assert.assertEquals(spath, distCpContext.getSourcePaths().get(0));
 
     // build copy listing
     final Path listingPath = new Path("/tmp/META/fileList.seq");
     CopyListing listing = new SimpleCopyListing(conf, new Credentials(), distCpSync);
-    listing.buildListing(listingPath, options);
+    listing.buildListing(listingPath, distCpContext);
 
     Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
     CopyMapper copyMapper = new CopyMapper();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
index 5511e09..d6d0542 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpViewFs.java
@@ -413,11 +413,13 @@ public class TestDistCpViewFs {
 
   private void runTest(Path listFile, Path target, boolean targetExists, 
       boolean sync) throws IOException {
-    DistCpOptions options = new DistCpOptions(listFile, target);
-    options.setSyncFolder(sync);
-    options.setTargetPathExists(targetExists);
+    final DistCpOptions options = new DistCpOptions.Builder(listFile, target)
+        .withSyncFolder(sync)
+        .build();
     try {
-      new DistCp(getConf(), options).execute();
+      final DistCp distcp = new DistCp(getConf(), options);
+      distcp.context.setTargetPathExists(targetExists);
+      distcp.execute();
     } catch (Exception e) {
       LOG.error("Exception encountered ", e);
       throw new IOException(e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
index fe2c668..203de1a 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestFileBasedCopyListing.java
@@ -514,10 +514,11 @@ public class TestFileBasedCopyListing {
   private void runTest(Path listFile, Path target, boolean targetExists,
       boolean sync) throws IOException {
     CopyListing listing = new FileBasedCopyListing(config, CREDENTIALS);
-    DistCpOptions options = new DistCpOptions(listFile, target);
-    options.setSyncFolder(sync);
-    options.setTargetPathExists(targetExists);
-    listing.buildListing(listFile, options);
+    final DistCpOptions options = new DistCpOptions.Builder(listFile, target)
+        .withSyncFolder(sync).build();
+    final DistCpContext context = new DistCpContext(options);
+    context.setTargetPathExists(targetExists);
+    listing.buildListing(listFile, context);
   }
 
   private void checkResult(Path listFile, int count) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
index 6c03b4e..1c92a9c 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestGlobbedCopyListing.java
@@ -34,7 +34,7 @@ import org.junit.Test;
 
 import java.io.DataOutputStream;
 import java.net.URI;
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -109,9 +109,12 @@ public class TestGlobbedCopyListing {
     Path source = new Path(fileSystemPath.toString() + "/tmp/source");
     Path target = new Path(fileSystemPath.toString() + "/tmp/target");
     Path listingPath = new Path(fileSystemPath.toString() + "/tmp/META/fileList.seq");
-    DistCpOptions options = new DistCpOptions(Arrays.asList(source), target);
-    options.setTargetPathExists(false);
-    new GlobbedCopyListing(new Configuration(), CREDENTIALS).buildListing(listingPath, options);
+    DistCpOptions options = new DistCpOptions.Builder(
+        Collections.singletonList(source), target).build();
+    DistCpContext context = new DistCpContext(options);
+    context.setTargetPathExists(false);
+    new GlobbedCopyListing(new Configuration(), CREDENTIALS)
+        .buildListing(listingPath, context);
 
     verifyContents(listingPath);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
index ee8e7cc..7574ded 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Cluster;
 import org.apache.hadoop.mapreduce.JobSubmissionFiles;
-import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.tools.util.TestDistCpUtils;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -493,7 +492,8 @@ public class TestIntegration {
       List<Path> sources = new ArrayList<Path>();
       sources.add(sourcePath);
 
-      DistCpOptions options = new DistCpOptions(sources, target);
+      DistCpOptions options = new DistCpOptions.Builder(sources, target)
+          .build();
 
       Configuration conf = getConf();
       Path stagingDir = JobSubmissionFiles.getStagingDir(
@@ -559,14 +559,16 @@ public class TestIntegration {
   private void runTest(Path listFile, Path target, boolean targetExists, 
       boolean sync, boolean delete,
       boolean overwrite) throws IOException {
-    DistCpOptions options = new DistCpOptions(listFile, target);
-    options.setSyncFolder(sync);
-    options.setDeleteMissing(delete);
-    options.setOverwrite(overwrite);
-    options.setTargetPathExists(targetExists);
-    options.setNumListstatusThreads(numListstatusThreads);
+    final DistCpOptions options = new DistCpOptions.Builder(listFile, target)
+        .withSyncFolder(sync)
+        .withDeleteMissing(delete)
+        .withOverwrite(overwrite)
+        .withNumListstatusThreads(numListstatusThreads)
+        .build();
     try {
-      new DistCp(getConf(), options).execute();
+      final DistCp distCp = new DistCp(getConf(), options);
+      distCp.context.setTargetPathExists(targetExists);
+      distCp.execute();
     } catch (Exception e) {
       LOG.error("Exception encountered ", e);
       throw new IOException(e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
index f94ba97..e7fdc51 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.tools;
 
-import static org.junit.Assert.assertFalse;
+import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
 import static org.junit.Assert.fail;
 
 import org.junit.Assert;
@@ -28,7 +28,6 @@ import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.tools.DistCpOptions.*;
 import org.apache.hadoop.conf.Configuration;
 
-import java.util.Iterator;
 import java.util.NoSuchElementException;
 
 public class TestOptionsParser {
@@ -329,7 +328,7 @@ public class TestOptionsParser {
         "100",
         "hdfs://localhost:9820/source/first",
         "hdfs://localhost:9820/target/"});
-    Assert.assertEquals(DistCpOptions.maxNumListstatusThreads,
+    Assert.assertEquals(DistCpOptions.MAX_NUM_LISTSTATUS_THREADS,
                         options.getNumListstatusThreads());
   }
 
@@ -383,25 +382,6 @@ public class TestOptionsParser {
   }
 
   @Test
-  public void testToString() {
-    DistCpOptions option = new DistCpOptions(new Path("abc"), new Path("xyz"));
-    String val = "DistCpOptions{atomicCommit=false, syncFolder=false, "
-        + "deleteMissing=false, ignoreFailures=false, overwrite=false, "
-        + "append=false, useDiff=false, useRdiff=false, "
-        + "fromSnapshot=null, toSnapshot=null, "
-        + "skipCRC=false, blocking=true, numListstatusThreads=0, maxMaps=20, "
-        + "mapBandwidth=0.0, "
-        + "copyStrategy='uniformsize', preserveStatus=[], "
-        + "preserveRawXattrs=false, atomicWorkPath=null, logPath=null, "
-        + "sourceFileListing=abc, sourcePaths=null, targetPath=xyz, "
-        + "targetPathExists=true, filtersFile='null', blocksPerChunk=0}";
-    String optionString = option.toString();
-    Assert.assertEquals(val, optionString);
-    Assert.assertNotSame(DistCpOptionSwitch.ATOMIC_COMMIT.toString(),
-        DistCpOptionSwitch.ATOMIC_COMMIT.name());
-  }
-
-  @Test
   public void testCopyStrategy() {
     DistCpOptions options = OptionsParser.parse(new String[] {
         "-strategy",
@@ -529,13 +509,8 @@ public class TestOptionsParser {
         "-f",
         "hdfs://localhost:9820/source/first",
         "hdfs://localhost:9820/target/"});
-    int i = 0;
-    Iterator<FileAttribute> attribIterator = options.preserveAttributes();
-    while (attribIterator.hasNext()) {
-      attribIterator.next();
-      i++;
-    }
-    Assert.assertEquals(i, DistCpOptionSwitch.PRESERVE_STATUS_DEFAULT.length() - 2);
+    Assert.assertEquals(DistCpOptionSwitch.PRESERVE_STATUS_DEFAULT.length() - 2,
+        options.getPreserveAttributes().size());
 
     try {
       OptionsParser.parse(new String[] {
@@ -545,19 +520,18 @@ public class TestOptionsParser {
           "hdfs://localhost:9820/target"});
       Assert.fail("Invalid preserve attribute");
     }
-    catch (IllegalArgumentException ignore) {}
     catch (NoSuchElementException ignore) {}
 
-    options = OptionsParser.parse(new String[] {
-        "-f",
-        "hdfs://localhost:9820/source/first",
-        "hdfs://localhost:9820/target/"});
-    Assert.assertFalse(options.shouldPreserve(FileAttribute.PERMISSION));
-    options.preserve(FileAttribute.PERMISSION);
-    Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
+    Builder builder = new DistCpOptions.Builder(
+        new Path("hdfs://localhost:9820/source/first"),
+        new Path("hdfs://localhost:9820/target/"));
+    Assert.assertFalse(
+        builder.build().shouldPreserve(FileAttribute.PERMISSION));
+    builder.preserve(FileAttribute.PERMISSION);
+    Assert.assertTrue(builder.build().shouldPreserve(FileAttribute.PERMISSION));
 
-    options.preserve(FileAttribute.PERMISSION);
-    Assert.assertTrue(options.shouldPreserve(FileAttribute.PERMISSION));
+    builder.preserve(FileAttribute.PERMISSION);
+    Assert.assertTrue(builder.build().shouldPreserve(FileAttribute.PERMISSION));
   }
 
   @Test
@@ -756,28 +730,25 @@ public class TestOptionsParser {
     }
 
     try {
-      options = OptionsParser.parse(new String[] {
-          optionStr, "s1", "s2", "-update", "-delete",
+      OptionsParser.parse(new String[] {
+          "-diff", "s1", "s2", "-update", "-delete",
           "hdfs://localhost:9820/source/first",
           "hdfs://localhost:9820/target/" });
-      assertFalse("-delete should be ignored when "
-          + optionStr + " is specified",
-          options.shouldDeleteMissing());
+      fail("Should fail as -delete and -diff/-rdiff are mutually exclusive");
     } catch (IllegalArgumentException e) {
-      fail("Got unexpected IllegalArgumentException: " + e.getMessage());
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive", e);
     }
 
     try {
-      options = OptionsParser.parse(new String[] {
-          optionStr, "s1", "s2", "-delete",
+      OptionsParser.parse(new String[] {
+          "-diff", "s1", "s2", "-delete",
           "hdfs://localhost:9820/source/first",
           "hdfs://localhost:9820/target/" });
-      fail(optionStr + " should fail if -update option is not specified");
+      fail("Should fail as -delete and -diff/-rdiff are mutually exclusive");
     } catch (IllegalArgumentException e) {
-      assertFalse("-delete should be ignored when -diff is specified",
-          options.shouldDeleteMissing());
-      GenericTestUtils.assertExceptionContains(
-          "-diff/-rdiff is valid only with -update option", e);
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive", e);
     }
 
     try {
@@ -785,10 +756,10 @@ public class TestOptionsParser {
           "-delete", "-overwrite",
           "hdfs://localhost:9820/source/first",
           "hdfs://localhost:9820/target/" });
-      fail(optionStr + " should fail if -update option is not specified");
+      fail("Should fail as -delete and -diff are mutually exclusive");
     } catch (IllegalArgumentException e) {
-      GenericTestUtils.assertExceptionContains(
-          "-diff/-rdiff is valid only with -update option", e);
+      assertExceptionContains(
+          "-delete and -diff/-rdiff are mutually exclusive", e);
     }
 
     final String optionStrOther = isDiff? "-rdiff" : "-diff";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java
index 21a14d3..fb1a64d 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/contract/AbstractContractDistCpTest.java
@@ -19,9 +19,8 @@
 package org.apache.hadoop.tools.contract;
 
 import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
-import static org.junit.Assert.*;
 
-import java.util.Arrays;
+import java.util.Collections;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -184,7 +183,8 @@ public abstract class AbstractContractDistCpTest
    * @throws Exception if there is a failure
    */
   private void runDistCp(Path src, Path dst) throws Exception {
-    DistCpOptions options = new DistCpOptions(Arrays.asList(src), dst);
+    DistCpOptions options = new DistCpOptions.Builder(
+        Collections.singletonList(src), dst).build();
     Job job = new DistCp(conf, options).execute();
     assertNotNull("Unexpected null job returned from DistCp execution.", job);
     assertTrue("DistCp job did not complete.", job.isComplete());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
index 2452d6f..6ee37cc 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.apache.hadoop.tools.CopyListing;
 import org.apache.hadoop.tools.DistCpConstants;
+import org.apache.hadoop.tools.DistCpContext;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
 import org.apache.hadoop.tools.GlobbedCopyListing;
@@ -146,15 +147,16 @@ public class TestCopyCommitter {
       sourceBase = TestDistCpUtils.createTestSetup(fs, sourcePerm);
       targetBase = TestDistCpUtils.createTestSetup(fs, initialPerm);
 
-      DistCpOptions options = new DistCpOptions(Arrays.asList(new Path(sourceBase)),
-          new Path("/out"));
-      options.preserve(FileAttribute.PERMISSION);
+      final DistCpOptions options = new DistCpOptions.Builder(
+          Collections.singletonList(new Path(sourceBase)), new Path("/out"))
+          .preserve(FileAttribute.PERMISSION).build();
       options.appendToConf(conf);
-      options.setTargetPathExists(false);
-      
+      final DistCpContext context = new DistCpContext(options);
+      context.setTargetPathExists(false);
+
       CopyListing listing = new GlobbedCopyListing(conf, CREDENTIALS);
       Path listingFile = new Path("/tmp1/" + String.valueOf(rand.nextLong()));
-      listing.buildListing(listingFile, options);
+      listing.buildListing(listingFile, context);
 
       conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, targetBase);
 
@@ -197,15 +199,15 @@ public class TestCopyCommitter {
       String targetBaseAdd = TestDistCpUtils.createTestSetup(fs, FsPermission.getDefault());
       fs.rename(new Path(targetBaseAdd), new Path(targetBase));
 
-      DistCpOptions options = new DistCpOptions(Arrays.asList(new Path(sourceBase)),
-          new Path("/out"));
-      options.setSyncFolder(true);
-      options.setDeleteMissing(true);
+      final DistCpOptions options = new DistCpOptions.Builder(
+          Collections.singletonList(new Path(sourceBase)), new Path("/out"))
+          .withSyncFolder(true).withDeleteMissing(true).build();
       options.appendToConf(conf);
+      final DistCpContext context = new DistCpContext(options);
 
       CopyListing listing = new GlobbedCopyListing(conf, CREDENTIALS);
       Path listingFile = new Path("/tmp1/" + String.valueOf(rand.nextLong()));
-      listing.buildListing(listingFile, options);
+      listing.buildListing(listingFile, context);
 
       conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, targetBase);
       conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, targetBase);
@@ -266,15 +268,15 @@ public class TestCopyCommitter {
       TestDistCpUtils.createFile(fs, targetBase + "/9");
       TestDistCpUtils.createFile(fs, targetBase + "/A");
 
-      DistCpOptions options = new DistCpOptions(Arrays.asList(new Path(sourceBase)), 
-          new Path("/out"));
-      options.setSyncFolder(true);
-      options.setDeleteMissing(true);
+      final DistCpOptions options = new DistCpOptions.Builder(
+          Collections.singletonList(new Path(sourceBase)), new Path("/out"))
+          .withSyncFolder(true).withDeleteMissing(true).build();
       options.appendToConf(conf);
+      final DistCpContext context = new DistCpContext(options);
 
       CopyListing listing = new GlobbedCopyListing(conf, CREDENTIALS);
       Path listingFile = new Path("/tmp1/" + String.valueOf(rand.nextLong()));
-      listing.buildListing(listingFile, options);
+      listing.buildListing(listingFile, context);
 
       conf.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, targetBase);
       conf.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, targetBase);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
index 78e2262..5315137 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.tools.CopyListing;
 import org.apache.hadoop.tools.CopyListingFileStatus;
+import org.apache.hadoop.tools.DistCpContext;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.StubContext;
 import org.apache.hadoop.security.Credentials;
@@ -74,9 +75,9 @@ public class TestUniformSizeInputFormat {
 
     List<Path> sourceList = new ArrayList<Path>();
     sourceList.add(sourcePath);
-    final DistCpOptions distCpOptions = new DistCpOptions(sourceList, targetPath);
-    distCpOptions.setMaxMaps(nMaps);
-    return distCpOptions;
+    return new DistCpOptions.Builder(sourceList, targetPath)
+        .maxMaps(nMaps)
+        .build();
   }
 
   private static int createFile(String path, int fileSize) throws Exception {
@@ -100,14 +101,14 @@ public class TestUniformSizeInputFormat {
   }
 
   public void testGetSplits(int nMaps) throws Exception {
-    DistCpOptions options = getOptions(nMaps);
+    DistCpContext context = new DistCpContext(getOptions(nMaps));
     Configuration configuration = new Configuration();
     configuration.set("mapred.map.tasks",
-                      String.valueOf(options.getMaxMaps()));
+                      String.valueOf(context.getMaxMaps()));
     Path listFile = new Path(cluster.getFileSystem().getUri().toString()
         + "/tmp/testGetSplits_1/fileList.seq");
-    CopyListing.getCopyListing(configuration, CREDENTIALS, options).
-        buildListing(listFile, options);
+    CopyListing.getCopyListing(configuration, CREDENTIALS, context)
+        .buildListing(listFile, context);
 
     JobContext jobContext = new JobContextImpl(configuration, new JobID());
     UniformSizeInputFormat uniformSizeInputFormat = new UniformSizeInputFormat();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1543f9c/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
index bb2dd9d..87290ca 100644
--- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
+++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/lib/TestDynamicInputFormat.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.tools.mapred.lib;
 
 import org.apache.hadoop.tools.DistCpConstants;
+import org.apache.hadoop.tools.DistCpContext;
 import org.junit.Assert;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -84,9 +85,9 @@ public class TestDynamicInputFormat {
 
     List<Path> sourceList = new ArrayList<Path>();
     sourceList.add(sourcePath);
-    DistCpOptions options = new DistCpOptions(sourceList, targetPath);
-    options.setMaxMaps(NUM_SPLITS);
-    return options;
+    return new DistCpOptions.Builder(sourceList, targetPath)
+        .maxMaps(NUM_SPLITS)
+        .build();
   }
 
   private static void createFile(String path) throws Exception {
@@ -110,13 +111,13 @@ public class TestDynamicInputFormat {
 
   @Test
   public void testGetSplits() throws Exception {
-    DistCpOptions options = getOptions();
+    final DistCpContext context = new DistCpContext(getOptions());
     Configuration configuration = new Configuration();
     configuration.set("mapred.map.tasks",
-                      String.valueOf(options.getMaxMaps()));
-    CopyListing.getCopyListing(configuration, CREDENTIALS, options).buildListing(
-            new Path(cluster.getFileSystem().getUri().toString()
-                    +"/tmp/testDynInputFormat/fileList.seq"), options);
+                      String.valueOf(context.getMaxMaps()));
+    CopyListing.getCopyListing(configuration, CREDENTIALS, context)
+        .buildListing(new Path(cluster.getFileSystem().getUri().toString()
+            +"/tmp/testDynInputFormat/fileList.seq"), context);
 
     JobContext jobContext = new JobContextImpl(configuration, new JobID());
     DynamicInputFormat<Text, CopyListingFileStatus> inputFormat =


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message