Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1529011AC3 for ; Sat, 7 Jun 2014 16:29:44 +0000 (UTC) Received: (qmail 36066 invoked by uid 500); 7 Jun 2014 16:29:43 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 36002 invoked by uid 500); 7 Jun 2014 16:29:43 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 35995 invoked by uid 99); 7 Jun 2014 16:29:43 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 07 Jun 2014 16:29:43 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 07 Jun 2014 16:29:41 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 0F0262388831 for ; Sat, 7 Jun 2014 16:29:16 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1601151 - in /hadoop/common/branches/HDFS-5442: ./ hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/ hadoop-tools/hadoop-distcp/src/main/java/org/apac... Date: Sat, 07 Jun 2014 16:29:15 -0000 To: common-commits@hadoop.apache.org From: vinayakumarb@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140607162916.0F0262388831@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: vinayakumarb Date: Sat Jun 7 16:29:10 2014 New Revision: 1601151 URL: http://svn.apache.org/r1601151 Log: Merged changes from trunk Added: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java - copied unchanged from r1601150, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java Modified: hadoop/common/branches/HDFS-5442/ (props changed) hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java Propchange: hadoop/common/branches/HDFS-5442/ ------------------------------------------------------------------------------ Merged /hadoop/common/branches/branch-2:r1600970 Merged /hadoop/common/trunk:r1598456-1601150 Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java Sat Jun 7 16:29:10 2014 @@ -129,6 +129,7 @@ public abstract class CopyListing extend /** * Validate the final resulting path listing. Checks if there are duplicate * entries. If preserving ACLs, checks that file system can support ACLs. + * If preserving XAttrs, checks that file system can support XAttrs. * * @param pathToListFile - path listing build by doBuildListing * @param options - Input options to distcp @@ -151,6 +152,7 @@ public abstract class CopyListing extend Text currentKey = new Text(); Set aclSupportCheckFsSet = Sets.newHashSet(); + Set xAttrSupportCheckFsSet = Sets.newHashSet(); while (reader.next(currentKey)) { if (currentKey.equals(lastKey)) { CopyListingFileStatus currentFileStatus = new CopyListingFileStatus(); @@ -167,6 +169,14 @@ public abstract class CopyListing extend aclSupportCheckFsSet.add(lastFsUri); } } + if (options.shouldPreserve(DistCpOptions.FileAttribute.XATTR)) { + FileSystem lastFs = lastFileStatus.getPath().getFileSystem(config); + URI lastFsUri = lastFs.getUri(); + if (!xAttrSupportCheckFsSet.contains(lastFsUri)) { + DistCpUtils.checkFileSystemXAttrSupport(lastFs); + xAttrSupportCheckFsSet.add(lastFsUri); + } + } lastKey.set(currentKey); } } finally { @@ -256,4 +266,10 @@ public abstract class CopyListing extend super(message); } } + + public static class XAttrsNotSupportedException extends RuntimeException { + public XAttrsNotSupportedException(String message) { + super(message); + } + } } Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java Sat Jun 7 16:29:10 2014 @@ -21,7 +21,10 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Collections; +import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FileStatus; @@ -34,6 +37,7 @@ import org.apache.hadoop.io.WritableUtil import com.google.common.base.Objects; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; /** * CopyListingFileStatus is a specialized subclass of {@link FileStatus} for @@ -45,6 +49,7 @@ import com.google.common.collect.Lists; public final class CopyListingFileStatus extends FileStatus { private static final byte NO_ACL_ENTRIES = -1; + private static final int NO_XATTRS = -1; // Retain static arrays of enum values to prevent repeated allocation of new // arrays during deserialization. @@ -53,6 +58,7 @@ public final class CopyListingFileStatus private static final FsAction[] FS_ACTIONS = FsAction.values(); private List aclEntries; + private Map xAttrs; /** * Default constructor. @@ -88,6 +94,24 @@ public final class CopyListingFileStatus public void setAclEntries(List aclEntries) { this.aclEntries = aclEntries; } + + /** + * Returns all xAttrs. + * + * @return Map containing all xAttrs + */ + public Map getXAttrs() { + return xAttrs; + } + + /** + * Sets optional xAttrs. + * + * @param xAttrs Map containing all xAttrs + */ + public void setXAttrs(Map xAttrs) { + this.xAttrs = xAttrs; + } @Override public void write(DataOutput out) throws IOException { @@ -104,6 +128,26 @@ public final class CopyListingFileStatus } else { out.writeByte(NO_ACL_ENTRIES); } + + if (xAttrs != null) { + out.writeInt(xAttrs.size()); + Iterator> iter = xAttrs.entrySet().iterator(); + while (iter.hasNext()) { + Entry entry = iter.next(); + WritableUtils.writeString(out, entry.getKey()); + final byte[] value = entry.getValue(); + if (value != null) { + out.writeInt(value.length); + if (value.length > 0) { + out.write(value); + } + } else { + out.writeInt(-1); + } + } + } else { + out.writeInt(NO_XATTRS); + } } @Override @@ -123,6 +167,25 @@ public final class CopyListingFileStatus } else { aclEntries = null; } + + int xAttrsSize = in.readInt(); + if (xAttrsSize != NO_XATTRS) { + xAttrs = Maps.newHashMap(); + for (int i = 0; i < xAttrsSize; ++i) { + final String name = WritableUtils.readString(in); + final int valueLen = in.readInt(); + byte[] value = null; + if (valueLen > -1) { + value = new byte[valueLen]; + if (valueLen > 0) { + in.readFully(value); + } + } + xAttrs.put(name, value); + } + } else { + xAttrs = null; + } } @Override @@ -134,12 +197,13 @@ public final class CopyListingFileStatus return false; } CopyListingFileStatus other = (CopyListingFileStatus)o; - return Objects.equal(aclEntries, other.aclEntries); + return Objects.equal(aclEntries, other.aclEntries) && + Objects.equal(xAttrs, other.xAttrs); } @Override public int hashCode() { - return Objects.hashCode(super.hashCode(), aclEntries); + return Objects.hashCode(super.hashCode(), aclEntries, xAttrs); } @Override @@ -147,6 +211,7 @@ public final class CopyListingFileStatus StringBuilder sb = new StringBuilder(super.toString()); sb.append('{'); sb.append("aclEntries = " + aclEntries); + sb.append(", xAttrs = " + xAttrs); sb.append('}'); return sb.toString(); } Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Sat Jun 7 16:29:10 2014 @@ -128,6 +128,9 @@ public class DistCp extends Configured i } catch (AclsNotSupportedException e) { LOG.error("ACLs not supported on at least one file system: ", e); return DistCpConstants.ACLS_NOT_SUPPORTED; + } catch (XAttrsNotSupportedException e) { + LOG.error("XAttrs not supported on at least one file system: ", e); + return DistCpConstants.XATTRS_NOT_SUPPORTED; } catch (Exception e) { LOG.error("Exception encountered ", e); return DistCpConstants.UNKNOWN_ERROR; @@ -304,6 +307,9 @@ public class DistCp extends Configured i if (inputOptions.shouldPreserve(DistCpOptions.FileAttribute.ACL)) { DistCpUtils.checkFileSystemAclSupport(targetFS); } + if (inputOptions.shouldPreserve(DistCpOptions.FileAttribute.XATTR)) { + DistCpUtils.checkFileSystemXAttrSupport(targetFS); + } if (inputOptions.shouldAtomicCommit()) { Path workDir = inputOptions.getAtomicWorkPath(); if (workDir == null) { Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java Sat Jun 7 16:29:10 2014 @@ -117,6 +117,7 @@ public class DistCpConstants { public static final int INVALID_ARGUMENT = -1; public static final int DUPLICATE_INPUT = -2; public static final int ACLS_NOT_SUPPORTED = -3; + public static final int XATTRS_NOT_SUPPORTED = -4; public static final int UNKNOWN_ERROR = -999; /** Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java Sat Jun 7 16:29:10 2014 @@ -45,10 +45,10 @@ public enum DistCpOptionSwitch { * */ PRESERVE_STATUS(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, - new Option("p", true, "preserve status (rbugpca)(replication, " + - "block-size, user, group, permission, checksum-type, ACL). If " + - "-p is specified with no , then preserves replication, block " + - "size, user, group, permission and checksum type.")), + new Option("p", true, "preserve status (rbugpcax)(replication, " + + "block-size, user, group, permission, checksum-type, ACL, XATTR). " + + "If -p is specified with no , then preserves replication, " + + "block size, user, group, permission and checksum type.")), /** * Update target location by copying only files that are missing Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java Sat Jun 7 16:29:10 2014 @@ -66,7 +66,7 @@ public class DistCpOptions { private boolean targetPathExists = true; public static enum FileAttribute{ - REPLICATION, BLOCKSIZE, USER, GROUP, PERMISSION, CHECKSUMTYPE, ACL; + REPLICATION, BLOCKSIZE, USER, GROUP, PERMISSION, CHECKSUMTYPE, ACL, XATTR; public static FileAttribute getAttribute(char symbol) { for (FileAttribute attribute : values()) { Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java Sat Jun 7 16:29:10 2014 @@ -23,7 +23,6 @@ import org.apache.commons.logging.LogFac import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.IOUtils; @@ -36,7 +35,6 @@ import org.apache.hadoop.security.Creden import com.google.common.annotations.VisibleForTesting; import java.io.*; -import java.util.List; import java.util.Stack; /** @@ -123,7 +121,7 @@ public class SimpleCopyListing extends C * the the source root is a directory, then the source root entry is not * written to the sequence file, because only the contents of the source * directory need to be copied in this case. - * See {@link org.apache.hadoop.tools.util.DistCpUtils.getRelativePath} for + * See {@link org.apache.hadoop.tools.util.DistCpUtils#getRelativePath} for * how relative path is computed. * See computeSourceRootPath method for how the root path of the source is * computed. @@ -147,7 +145,8 @@ public class SimpleCopyListing extends C if (!explore || rootStatus.isDirectory()) { CopyListingFileStatus rootCopyListingStatus = DistCpUtils.toCopyListingFileStatus(sourceFS, rootStatus, - options.shouldPreserve(FileAttribute.ACL)); + options.shouldPreserve(FileAttribute.ACL), + options.shouldPreserve(FileAttribute.XATTR)); writeToFileListingRoot(fileListWriter, rootCopyListingStatus, sourcePathRoot, options); } @@ -159,7 +158,8 @@ public class SimpleCopyListing extends C CopyListingFileStatus sourceCopyListingStatus = DistCpUtils.toCopyListingFileStatus(sourceFS, sourceStatus, options.shouldPreserve(FileAttribute.ACL) && - sourceStatus.isDirectory()); + sourceStatus.isDirectory(), options.shouldPreserve( + FileAttribute.XATTR) && sourceStatus.isDirectory()); writeToFileListing(fileListWriter, sourceCopyListingStatus, sourcePathRoot, options); @@ -271,7 +271,8 @@ public class SimpleCopyListing extends C + sourceStatus.getPath() + " for copy."); CopyListingFileStatus childCopyListingStatus = DistCpUtils.toCopyListingFileStatus(sourceFS, child, - options.shouldPreserve(FileAttribute.ACL) && child.isDirectory()); + options.shouldPreserve(FileAttribute.ACL) && child.isDirectory(), + options.shouldPreserve(FileAttribute.XATTR) && child.isDirectory()); writeToFileListing(fileListWriter, childCopyListingStatus, sourcePathRoot, options); if (isDirectoryAndNotEmpty(sourceFS, child)) { Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java Sat Jun 7 16:29:10 2014 @@ -213,7 +213,8 @@ public class CopyMapper extends Mapper srcXAttrs = srcFileStatus.getXAttrs(); + Map targetXAttrs = getXAttrs(targetFS, path); + if (!srcXAttrs.equals(targetXAttrs)) { + Iterator> iter = srcXAttrs.entrySet().iterator(); + while (iter.hasNext()) { + Entry entry = iter.next(); + targetFS.setXAttr(path, entry.getKey(), entry.getValue()); + } + } + } if (attributes.contains(FileAttribute.REPLICATION) && ! targetFileStatus.isDirectory() && srcFileStatus.getReplication() != targetFileStatus.getReplication()) { @@ -247,19 +263,34 @@ public class DistCpUtils { .getEntries(); return AclUtil.getAclFromPermAndEntries(fileStatus.getPermission(), entries); } + + /** + * Returns a file's all xAttrs. + * + * @param fileSystem FileSystem containing the file + * @param path file path + * @return Map containing all xAttrs + * @throws IOException if there is an I/O error + */ + public static Map getXAttrs(FileSystem fileSystem, + Path path) throws IOException { + return fileSystem.getXAttrs(path); + } /** * Converts a FileStatus to a CopyListingFileStatus. If preserving ACLs, - * populates the CopyListingFileStatus with the ACLs. + * populates the CopyListingFileStatus with the ACLs. If preserving XAttrs, + * populates the CopyListingFileStatus with the XAttrs. * * @param fileSystem FileSystem containing the file * @param fileStatus FileStatus of file * @param preserveAcls boolean true if preserving ACLs + * @param preserveXAttrs boolean true if preserving XAttrs * @throws IOException if there is an I/O error */ public static CopyListingFileStatus toCopyListingFileStatus( - FileSystem fileSystem, FileStatus fileStatus, boolean preserveAcls) - throws IOException { + FileSystem fileSystem, FileStatus fileStatus, boolean preserveAcls, + boolean preserveXAttrs) throws IOException { CopyListingFileStatus copyListingFileStatus = new CopyListingFileStatus(fileStatus); if (preserveAcls) { @@ -270,6 +301,10 @@ public class DistCpUtils { copyListingFileStatus.setAclEntries(aclEntries); } } + if (preserveXAttrs) { + Map xAttrs = fileSystem.getXAttrs(fileStatus.getPath()); + copyListingFileStatus.setXAttrs(xAttrs); + } return copyListingFileStatus; } @@ -314,6 +349,25 @@ public class DistCpUtils { + fs.getUri()); } } + + /** + * Determines if a file system supports XAttrs by running a getXAttrs request + * on the file system root. This method is used before distcp job submission + * to fail fast if the user requested preserving XAttrs, but the file system + * cannot support XAttrs. + * + * @param fs FileSystem to check + * @throws XAttrsNotSupportedException if fs does not support XAttrs + */ + public static void checkFileSystemXAttrSupport(FileSystem fs) + throws XAttrsNotSupportedException { + try { + fs.getXAttrs(new Path(Path.SEPARATOR)); + } catch (Exception e) { + throw new XAttrsNotSupportedException("XAttrs not supported for file system: " + + fs.getUri()); + } + } /** * String utility to convert a number-of-bytes to human readable format. Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java Sat Jun 7 16:29:10 2014 @@ -414,6 +414,7 @@ public class TestOptionsParser { Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP)); Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE)); Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL)); + Assert.assertFalse(options.shouldPreserve(FileAttribute.XATTR)); options = OptionsParser.parse(new String[] { "-p", @@ -426,6 +427,7 @@ public class TestOptionsParser { Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP)); Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE)); Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL)); + Assert.assertFalse(options.shouldPreserve(FileAttribute.XATTR)); options = OptionsParser.parse(new String[] { "-pbr", @@ -439,6 +441,7 @@ public class TestOptionsParser { Assert.assertFalse(options.shouldPreserve(FileAttribute.GROUP)); Assert.assertFalse(options.shouldPreserve(FileAttribute.CHECKSUMTYPE)); Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL)); + Assert.assertFalse(options.shouldPreserve(FileAttribute.XATTR)); options = OptionsParser.parse(new String[] { "-pbrgup", @@ -452,9 +455,10 @@ public class TestOptionsParser { Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP)); Assert.assertFalse(options.shouldPreserve(FileAttribute.CHECKSUMTYPE)); Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL)); + Assert.assertFalse(options.shouldPreserve(FileAttribute.XATTR)); options = OptionsParser.parse(new String[] { - "-pbrgupca", + "-pbrgupcax", "-f", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); @@ -465,6 +469,7 @@ public class TestOptionsParser { Assert.assertTrue(options.shouldPreserve(FileAttribute.GROUP)); Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE)); Assert.assertTrue(options.shouldPreserve(FileAttribute.ACL)); + Assert.assertTrue(options.shouldPreserve(FileAttribute.XATTR)); options = OptionsParser.parse(new String[] { "-pc", @@ -478,6 +483,7 @@ public class TestOptionsParser { Assert.assertFalse(options.shouldPreserve(FileAttribute.GROUP)); Assert.assertTrue(options.shouldPreserve(FileAttribute.CHECKSUMTYPE)); Assert.assertFalse(options.shouldPreserve(FileAttribute.ACL)); + Assert.assertFalse(options.shouldPreserve(FileAttribute.XATTR)); options = OptionsParser.parse(new String[] { "-p", Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java Sat Jun 7 16:29:10 2014 @@ -450,6 +450,7 @@ public class TestCopyMapper { EnumSet preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class); preserveStatus.remove(DistCpOptions.FileAttribute.ACL); + preserveStatus.remove(DistCpOptions.FileAttribute.XATTR); context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus)); @@ -588,6 +589,7 @@ public class TestCopyMapper { EnumSet preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class); preserveStatus.remove(DistCpOptions.FileAttribute.ACL); + preserveStatus.remove(DistCpOptions.FileAttribute.XATTR); context.getConfiguration().set(DistCpConstants.CONF_LABEL_PRESERVE_STATUS, DistCpUtils.packAttributes(preserveStatus)); @@ -663,6 +665,7 @@ public class TestCopyMapper { EnumSet preserveStatus = EnumSet.allOf(DistCpOptions.FileAttribute.class); preserveStatus.remove(DistCpOptions.FileAttribute.ACL); + preserveStatus.remove(DistCpOptions.FileAttribute.XATTR); final Mapper.Context context = stubContext.getContext(); Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java Sat Jun 7 16:29:10 2014 @@ -927,7 +927,7 @@ public class SwiftNativeFileSystemStore } if (LOG.isDebugEnabled()) { - SwiftUtils.debug(LOG, SwiftUtils.fileStatsToString(statuses, "\n")); + SwiftUtils.debug(LOG, "%s", SwiftUtils.fileStatsToString(statuses, "\n")); } if (filecount == 1 && swiftPath.equals(statuses[0].getPath())) { Modified: hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java?rev=1601151&r1=1601150&r2=1601151&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java (original) +++ hadoop/common/branches/HDFS-5442/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java Sat Jun 7 16:29:10 2014 @@ -61,10 +61,13 @@ import org.apache.hadoop.yarn.server.res import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; @@ -89,11 +92,13 @@ import com.codahale.metrics.MetricRegist import com.codahale.metrics.SlidingWindowReservoir; import com.codahale.metrics.Timer; -public class ResourceSchedulerWrapper implements - SchedulerWrapper,ResourceScheduler,Configurable { +final public class ResourceSchedulerWrapper + extends AbstractYarnScheduler + implements SchedulerWrapper, ResourceScheduler, Configurable { private static final String EOL = System.getProperty("line.separator"); private static final int SAMPLING_SIZE = 60; private ScheduledExecutorService pool; + private RMContext rmContext; // counters for scheduler allocate/handle operations private Counter schedulerAllocateCounter; private Counter schedulerHandleCounter; @@ -146,6 +151,7 @@ public class ResourceSchedulerWrapper im public final Logger LOG = Logger.getLogger(ResourceSchedulerWrapper.class); public ResourceSchedulerWrapper() { + super(ResourceSchedulerWrapper.class.getName()); samplerLock = new ReentrantLock(); queueLock = new ReentrantLock(); } @@ -794,10 +800,39 @@ public class ResourceSchedulerWrapper im return conf; } + @SuppressWarnings("unchecked") @Override - public void reinitialize(Configuration entries, RMContext rmContext) - throws IOException { - scheduler.reinitialize(entries, rmContext); + public void serviceInit(Configuration conf) throws Exception { + ((AbstractYarnScheduler) + scheduler).init(conf); + super.serviceInit(conf); + } + + @SuppressWarnings("unchecked") + @Override + public void serviceStart() throws Exception { + ((AbstractYarnScheduler) + scheduler).start(); + super.serviceStart(); + } + + @SuppressWarnings("unchecked") + @Override + public void serviceStop() throws Exception { + ((AbstractYarnScheduler) + scheduler).stop(); + super.serviceStop(); + } + + @Override + public void setRMContext(RMContext rmContext) { + scheduler.setRMContext(rmContext); + } + + @Override + public void reinitialize(Configuration conf, RMContext rmContext) + throws IOException { + scheduler.reinitialize(conf, rmContext); } @Override