Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 3120B11BAC for ; Tue, 19 Aug 2014 23:51:10 +0000 (UTC) Received: (qmail 31295 invoked by uid 500); 19 Aug 2014 23:51:09 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 31111 invoked by uid 500); 19 Aug 2014 23:51:09 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 30778 invoked by uid 99); 19 Aug 2014 23:51:09 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 19 Aug 2014 23:51:09 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 19 Aug 2014 23:51:03 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id C89882388C64; Tue, 19 Aug 2014 23:50:24 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1619012 [7/14] - in /hadoop/common/branches/HADOOP-10388/hadoop-common-project: ./ hadoop-auth/ hadoop-auth/dev-support/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apache/hadoo... Date: Tue, 19 Aug 2014 23:50:11 -0000 To: common-commits@hadoop.apache.org From: cmccabe@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140819235024.C89882388C64@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java Tue Aug 19 23:49:39 2014 @@ -42,16 +42,22 @@ public class Count extends FsCommand { factory.addClass(Count.class, "-count"); } + private static final String OPTION_QUOTA = "q"; + private static final String OPTION_HUMAN = "h"; + public static final String NAME = "count"; - public static final String USAGE = "[-q] ..."; + public static final String USAGE = + "[-" + OPTION_QUOTA + "] [-" + OPTION_HUMAN + "] ..."; public static final String DESCRIPTION = "Count the number of directories, files and bytes under the paths\n" + "that match the specified file pattern. The output columns are:\n" + "DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME or\n" + "QUOTA REMAINING_QUOTA SPACE_QUOTA REMAINING_SPACE_QUOTA \n" + - " DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME"; + " DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME\n" + + "The -h option shows file sizes in human readable format."; private boolean showQuotas; + private boolean humanReadable; /** Constructor */ public Count() {} @@ -70,17 +76,37 @@ public class Count extends FsCommand { @Override protected void processOptions(LinkedList args) { - CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE, "q"); + CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE, + OPTION_QUOTA, OPTION_HUMAN); cf.parse(args); if (args.isEmpty()) { // default path is the current working directory args.add("."); } - showQuotas = cf.getOpt("q"); + showQuotas = cf.getOpt(OPTION_QUOTA); + humanReadable = cf.getOpt(OPTION_HUMAN); } @Override protected void processPath(PathData src) throws IOException { ContentSummary summary = src.fs.getContentSummary(src.path); - out.println(summary.toString(showQuotas) + src); + out.println(summary.toString(showQuotas, isHumanReadable()) + src); + } + + /** + * Should quotas get shown as part of the report? + * @return if quotas should be shown then true otherwise false + */ + @InterfaceAudience.Private + boolean isShowQuotas() { + return showQuotas; + } + + /** + * Should sizes be shown in human readable format rather than bytes? + * @return true if human readable format + */ + @InterfaceAudience.Private + boolean isHumanReadable() { + return humanReadable; } } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java Tue Aug 19 23:49:39 2014 @@ -51,13 +51,13 @@ class Delete { public static final String NAME = "rm"; public static final String USAGE = "[-f] [-r|-R] [-skipTrash] ..."; public static final String DESCRIPTION = - "Delete all files that match the specified file pattern.\n" + + "Delete all files that match the specified file pattern. " + "Equivalent to the Unix command \"rm \"\n" + - "-skipTrash option bypasses trash, if enabled, and immediately\n" + + "-skipTrash: option bypasses trash, if enabled, and immediately " + "deletes \n" + - " -f If the file does not exist, do not display a diagnostic\n" + - " message or modify the exit status to reflect an error.\n" + - " -[rR] Recursively deletes directories"; + "-f: If the file does not exist, do not display a diagnostic " + + "message or modify the exit status to reflect an error.\n" + + "-[rR]: Recursively deletes directories"; private boolean skipTrash = false; private boolean deleteDirs = false; @@ -118,7 +118,11 @@ class Delete { } catch(FileNotFoundException fnfe) { throw fnfe; } catch (IOException ioe) { - throw new IOException(ioe.getMessage() + ". Consider using -skipTrash option", ioe); + String msg = ioe.getMessage(); + if (ioe.getCause() != null) { + msg += ": " + ioe.getCause().getMessage(); + } + throw new IOException(msg + ". Consider using -skipTrash option", ioe); } } return success; @@ -147,7 +151,7 @@ class Delete { public static final String USAGE = "[--ignore-fail-on-non-empty] ..."; public static final String DESCRIPTION = - "Removes the directory entry specified by each directory argument,\n" + + "Removes the directory entry specified by each directory argument, " + "provided it is empty.\n"; private boolean ignoreNonEmpty = false; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Tue Aug 19 23:49:39 2014 @@ -75,7 +75,7 @@ class Display extends FsCommand { public static final String NAME = "cat"; public static final String USAGE = "[-ignoreCrc] ..."; public static final String DESCRIPTION = - "Fetch all files that match the file pattern \n" + + "Fetch all files that match the file pattern " + "and display their content on stdout.\n"; private boolean verifyChecksum = true; @@ -170,11 +170,11 @@ class Display extends FsCommand { public static final String NAME = "checksum"; public static final String USAGE = " ..."; public static final String DESCRIPTION = - "Dump checksum information for files that match the file\n" + - "pattern to stdout. Note that this requires a round-trip\n" + - "to a datanode storing each block of the file, and thus is not\n" + - "efficient to run on a large number of files. The checksum of a\n" + - "file depends on its content, block size and the checksum\n" + + "Dump checksum information for files that match the file " + + "pattern to stdout. Note that this requires a round-trip " + + "to a datanode storing each block of the file, and thus is not " + + "efficient to run on a large number of files. The checksum of a " + + "file depends on its content, block size and the checksum " + "algorithm and parameters used for creating the file."; @Override Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsCommand.java Tue Aug 19 23:49:39 2014 @@ -59,6 +59,7 @@ abstract public class FsCommand extends factory.registerCommands(Test.class); factory.registerCommands(Touch.class); factory.registerCommands(SnapshotCommands.class); + factory.registerCommands(XAttrCommands.class); } protected FsCommand() {} Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/FsUsage.java Tue Aug 19 23:49:39 2014 @@ -57,12 +57,12 @@ class FsUsage extends FsCommand { public static final String NAME = "df"; public static final String USAGE = "[-h] [ ...]"; public static final String DESCRIPTION = - "Shows the capacity, free and used space of the filesystem.\n"+ - "If the filesystem has multiple partitions, and no path to a\n" + - "particular partition is specified, then the status of the root\n" + + "Shows the capacity, free and used space of the filesystem. "+ + "If the filesystem has multiple partitions, and no path to a " + + "particular partition is specified, then the status of the root " + "partitions will be shown.\n" + - " -h Formats the sizes of files in a human-readable fashion\n" + - " rather than a number of bytes.\n\n"; + "-h: Formats the sizes of files in a human-readable fashion " + + "rather than a number of bytes."; @Override protected void processOptions(LinkedList args) @@ -108,14 +108,14 @@ class FsUsage extends FsCommand { public static final String NAME = "du"; public static final String USAGE = "[-s] [-h] ..."; public static final String DESCRIPTION = - "Show the amount of space, in bytes, used by the files that\n" + + "Show the amount of space, in bytes, used by the files that " + "match the specified file pattern. The following flags are optional:\n" + - " -s Rather than showing the size of each individual file that\n" + - " matches the pattern, shows the total (summary) size.\n" + - " -h Formats the sizes of files in a human-readable fashion\n" + - " rather than a number of bytes.\n\n" + - "Note that, even without the -s option, this only shows size summaries\n" + - "one level deep into a directory.\n" + + "-s: Rather than showing the size of each individual file that" + + " matches the pattern, shows the total (summary) size.\n" + + "-h: Formats the sizes of files in a human-readable fashion" + + " rather than a number of bytes.\n\n" + + "Note that, even without the -s option, this only shows size summaries " + + "one level deep into a directory.\n\n" + "The output is in the form \n" + "\tsize\tname(full path)\n"; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java Tue Aug 19 23:49:39 2014 @@ -19,22 +19,15 @@ package org.apache.hadoop.fs.shell; import java.io.IOException; -import java.net.URI; import java.text.SimpleDateFormat; import java.util.Date; import java.util.LinkedList; -import java.util.Set; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.ipc.RpcNoSuchMethodException; - -import com.google.common.collect.Sets; /** * Get a listing of all files in that match the file patterns. @@ -51,16 +44,16 @@ class Ls extends FsCommand { public static final String NAME = "ls"; public static final String USAGE = "[-d] [-h] [-R] [ ...]"; public static final String DESCRIPTION = - "List the contents that match the specified file pattern. If\n" + - "path is not specified, the contents of /user/\n" + - "will be listed. Directory entries are of the form \n" + - "\tpermissions - userid groupid size_of_directory(in bytes) modification_date(yyyy-MM-dd HH:mm) directoryName \n" + - "and file entries are of the form \n" + - "\tpermissions number_of_replicas userid groupid size_of_file(in bytes) modification_date(yyyy-MM-dd HH:mm) fileName \n" + - " -d Directories are listed as plain files.\n" + - " -h Formats the sizes of files in a human-readable fashion\n" + - " rather than a number of bytes.\n" + - " -R Recursively list the contents of directories."; + "List the contents that match the specified file pattern. If " + + "path is not specified, the contents of /user/ " + + "will be listed. Directory entries are of the form:\n" + + "\tpermissions - userId groupId sizeOfDirectory(in bytes) modificationDate(yyyy-MM-dd HH:mm) directoryName\n\n" + + "and file entries are of the form:\n" + + "\tpermissions numberOfReplicas userId groupId sizeOfFile(in bytes) modificationDate(yyyy-MM-dd HH:mm) fileName\n" + + "-d: Directories are listed as plain files.\n" + + "-h: Formats the sizes of files in a human-readable fashion " + + "rather than a number of bytes.\n" + + "-R: Recursively list the contents of directories."; @@ -72,7 +65,6 @@ class Ls extends FsCommand { protected boolean dirRecurse; protected boolean humanReadable = false; - private Set aclNotSupportedFsSet = Sets.newHashSet(); protected String formatSize(long size) { return humanReadable @@ -116,7 +108,7 @@ class Ls extends FsCommand { FileStatus stat = item.stat; String line = String.format(lineFormat, (stat.isDirectory() ? "d" : "-"), - stat.getPermission() + (hasAcl(item) ? "+" : " "), + stat.getPermission() + (stat.getPermission().getAclBit() ? "+" : " "), (stat.isFile() ? stat.getReplication() : "-"), stat.getOwner(), stat.getGroup(), @@ -153,49 +145,6 @@ class Ls extends FsCommand { lineFormat = fmt.toString(); } - /** - * Calls getAclStatus to determine if the given item has an ACL. For - * compatibility, this method traps errors caused by the RPC method missing - * from the server side. This would happen if the client was connected to an - * old NameNode that didn't have the ACL APIs. This method also traps the - * case of the client-side FileSystem not implementing the ACL APIs. - * FileSystem instances that do not support ACLs are remembered. This - * prevents the client from sending multiple failing RPC calls during a - * recursive ls. - * - * @param item PathData item to check - * @return boolean true if item has an ACL - * @throws IOException if there is a failure - */ - private boolean hasAcl(PathData item) throws IOException { - FileSystem fs = item.fs; - if (aclNotSupportedFsSet.contains(fs.getUri())) { - // This FileSystem failed to run the ACL API in an earlier iteration. - return false; - } - try { - return !fs.getAclStatus(item.path).getEntries().isEmpty(); - } catch (RemoteException e) { - // If this is a RpcNoSuchMethodException, then the client is connected to - // an older NameNode that doesn't support ACLs. Keep going. - IOException e2 = e.unwrapRemoteException(RpcNoSuchMethodException.class); - if (!(e2 instanceof RpcNoSuchMethodException)) { - throw e; - } - } catch (IOException e) { - // The NameNode supports ACLs, but they are not enabled. Keep going. - String message = e.getMessage(); - if (message != null && !message.contains("ACLs has been disabled")) { - throw e; - } - } catch (UnsupportedOperationException e) { - // The underlying FileSystem doesn't implement ACLs. Keep going. - } - // Remember that this FileSystem cannot support ACLs. - aclNotSupportedFsSet.add(fs.getUri()); - return false; - } - private int maxLength(int n, Object value) { return Math.max(n, (value != null) ? String.valueOf(value).length() : 0); } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Mkdir.java Tue Aug 19 23:49:39 2014 @@ -44,7 +44,7 @@ class Mkdir extends FsCommand { public static final String USAGE = "[-p] ..."; public static final String DESCRIPTION = "Create a directory in specified location.\n" + - " -p Do not fail if the directory already exists"; + "-p: Do not fail if the directory already exists"; private boolean createParents; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java Tue Aug 19 23:49:39 2014 @@ -45,7 +45,7 @@ class MoveCommands { public static final String NAME = "moveFromLocal"; public static final String USAGE = " ... "; public static final String DESCRIPTION = - "Same as -put, except that the source is\n" + + "Same as -put, except that the source is " + "deleted after it's copied."; @Override @@ -87,8 +87,8 @@ class MoveCommands { public static final String NAME = "mv"; public static final String USAGE = " ... "; public static final String DESCRIPTION = - "Move files that match the specified file pattern \n" + - "to a destination . When moving multiple files, the\n" + + "Move files that match the specified file pattern " + + "to a destination . When moving multiple files, the " + "destination must be a directory."; @Override @@ -104,6 +104,9 @@ class MoveCommands { throw new PathIOException(src.toString(), "Does not match target filesystem"); } + if (target.exists) { + throw new PathExistsException(target.toString()); + } if (!target.fs.rename(src.path, target.path)) { // we have no way to know the actual error... throw new PathIOException(src.toString()); Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/SetReplication.java Tue Aug 19 23:49:39 2014 @@ -41,12 +41,12 @@ class SetReplication extends FsCommand { public static final String NAME = "setrep"; public static final String USAGE = "[-R] [-w] ..."; public static final String DESCRIPTION = - "Set the replication level of a file. If is a directory\n" + - "then the command recursively changes the replication factor of\n" + + "Set the replication level of a file. If is a directory " + + "then the command recursively changes the replication factor of " + "all files under the directory tree rooted at .\n" + - "The -w flag requests that the command wait for the replication\n" + + "-w: It requests that the command waits for the replication " + "to complete. This can potentially take a very long time.\n" + - "The -R flag is accepted for backwards compatibility. It has no effect."; + "-R: It is accepted for backwards compatibility. It has no effect."; protected short newRep = 0; protected List waitList = new LinkedList(); Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Stat.java Tue Aug 19 23:49:39 2014 @@ -51,8 +51,8 @@ class Stat extends FsCommand { public static final String NAME = "stat"; public static final String USAGE = "[format] ..."; public static final String DESCRIPTION = - "Print statistics about the file/directory at \n" + - "in the specified format. Format accepts filesize in blocks (%b), group name of owner(%g),\n" + + "Print statistics about the file/directory at " + + "in the specified format. Format accepts filesize in blocks (%b), group name of owner(%g), " + "filename (%n), block size (%o), replication (%r), user name of owner(%u), modification date (%y, %Y)\n"; protected static final SimpleDateFormat timeFmt; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Tail.java Tue Aug 19 23:49:39 2014 @@ -43,7 +43,7 @@ class Tail extends FsCommand { public static final String USAGE = "[-f] "; public static final String DESCRIPTION = "Show the last 1KB of the file.\n" + - "\t\tThe -f option shows appended data as the file grows.\n"; + "-f: Shows appended data as the file grows.\n"; private long startingOffset = -1024; private boolean follow = false; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java Tue Aug 19 23:49:39 2014 @@ -43,8 +43,7 @@ class Test extends FsCommand { " -e return 0 if exists.\n" + " -f return 0 if is a file.\n" + " -s return 0 if file is greater than zero bytes in size.\n" + - " -z return 0 if file is zero bytes in size.\n" + - "else, return 1."; + " -z return 0 if file is zero bytes in size, else return 1."; private char flag; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Touchz.java Tue Aug 19 23:49:39 2014 @@ -47,8 +47,8 @@ class Touch extends FsCommand { public static final String NAME = "touchz"; public static final String USAGE = " ..."; public static final String DESCRIPTION = - "Creates a file of zero length\n" + - "at with current time as the timestamp of that .\n" + + "Creates a file of zero length " + + "at with current time as the timestamp of that . " + "An error is returned if the file exists with non-zero length\n"; @Override Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java Tue Aug 19 23:49:39 2014 @@ -21,6 +21,7 @@ import java.io.IOException; import java.net.URI; import java.util.EnumSet; import java.util.List; +import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -37,9 +38,12 @@ import org.apache.hadoop.fs.FilterFileSy import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FsStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.Progressable; /** @@ -221,6 +225,12 @@ class ChRootedFileSystem extends FilterF } @Override + public void access(Path path, FsAction mode) throws AccessControlException, + FileNotFoundException, IOException { + super.access(fullPath(path), mode); + } + + @Override public FsStatus getStatus(Path p) throws IOException { return super.getStatus(fullPath(p)); } @@ -314,6 +324,38 @@ class ChRootedFileSystem extends FilterF } @Override + public void setXAttr(Path path, String name, byte[] value, + EnumSet flag) throws IOException { + super.setXAttr(fullPath(path), name, value, flag); + } + + @Override + public byte[] getXAttr(Path path, String name) throws IOException { + return super.getXAttr(fullPath(path), name); + } + + @Override + public Map getXAttrs(Path path) throws IOException { + return super.getXAttrs(fullPath(path)); + } + + @Override + public Map getXAttrs(Path path, List names) + throws IOException { + return super.getXAttrs(fullPath(path), names); + } + + @Override + public List listXAttrs(Path path) throws IOException { + return super.listXAttrs(fullPath(path)); + } + + @Override + public void removeXAttr(Path path, String name) throws IOException { + super.removeXAttr(fullPath(path), name); + } + + @Override public Path resolvePath(final Path p) throws IOException { return super.resolvePath(fullPath(p)); } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java Tue Aug 19 23:49:39 2014 @@ -22,6 +22,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.EnumSet; import java.util.List; +import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -37,7 +38,12 @@ import org.apache.hadoop.fs.FsStatus; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnresolvedLinkException; +import org.apache.hadoop.fs.XAttrSetFlag; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Progressable; @@ -196,6 +202,11 @@ class ChRootedFs extends AbstractFileSys return myFs.getFileStatus(fullPath(f)); } + public void access(Path path, FsAction mode) throws AccessControlException, + FileNotFoundException, UnresolvedLinkException, IOException { + myFs.access(fullPath(path), mode); + } + @Override public FileStatus getFileLinkStatus(final Path f) throws IOException, UnresolvedLinkException { @@ -280,6 +291,70 @@ class ChRootedFs extends AbstractFileSys } @Override + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { + myFs.modifyAclEntries(fullPath(path), aclSpec); + } + + @Override + public void removeAclEntries(Path path, List aclSpec) + throws IOException { + myFs.removeAclEntries(fullPath(path), aclSpec); + } + + @Override + public void removeDefaultAcl(Path path) throws IOException { + myFs.removeDefaultAcl(fullPath(path)); + } + + @Override + public void removeAcl(Path path) throws IOException { + myFs.removeAcl(fullPath(path)); + } + + @Override + public void setAcl(Path path, List aclSpec) throws IOException { + myFs.setAcl(fullPath(path), aclSpec); + } + + @Override + public AclStatus getAclStatus(Path path) throws IOException { + return myFs.getAclStatus(fullPath(path)); + } + + @Override + public void setXAttr(Path path, String name, byte[] value, + EnumSet flag) throws IOException { + myFs.setXAttr(fullPath(path), name, value, flag); + } + + @Override + public byte[] getXAttr(Path path, String name) throws IOException { + return myFs.getXAttr(fullPath(path), name); + } + + @Override + public Map getXAttrs(Path path) throws IOException { + return myFs.getXAttrs(fullPath(path)); + } + + @Override + public Map getXAttrs(Path path, List names) + throws IOException { + return myFs.getXAttrs(fullPath(path), names); + } + + @Override + public List listXAttrs(Path path) throws IOException { + return myFs.listXAttrs(fullPath(path)); + } + + @Override + public void removeXAttr(Path path, String name) throws IOException { + myFs.removeXAttr(fullPath(path), name); + } + + @Override public void setVerifyChecksum(final boolean verifyChecksum) throws IOException, UnresolvedLinkException { myFs.setVerifyChecksum(verifyChecksum); Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Tue Aug 19 23:49:39 2014 @@ -27,6 +27,7 @@ import java.util.Arrays; import java.util.EnumSet; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.Map.Entry; @@ -46,8 +47,11 @@ import org.apache.hadoop.fs.FsConstants; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; +import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.AclUtil; +import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.viewfs.InodeTree.INode; import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink; @@ -356,7 +360,14 @@ public class ViewFileSystem extends File return new ViewFsFileStatus(status, this.makeQualified(f)); } - + @Override + public void access(Path path, FsAction mode) throws AccessControlException, + FileNotFoundException, IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.access(res.remainingPath, mode); + } + @Override public FileStatus[] listStatus(final Path f) throws AccessControlException, FileNotFoundException, IOException { @@ -520,6 +531,50 @@ public class ViewFileSystem extends File } @Override + public void setXAttr(Path path, String name, byte[] value, + EnumSet flag) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.setXAttr(res.remainingPath, name, value, flag); + } + + @Override + public byte[] getXAttr(Path path, String name) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getXAttr(res.remainingPath, name); + } + + @Override + public Map getXAttrs(Path path) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getXAttrs(res.remainingPath); + } + + @Override + public Map getXAttrs(Path path, List names) + throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getXAttrs(res.remainingPath, names); + } + + @Override + public List listXAttrs(Path path) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.listXAttrs(res.remainingPath); + } + + @Override + public void removeXAttr(Path path, String name) throws IOException { + InodeTree.ResolveResult res = fsState.resolve(getUriPath(path), + true); + res.targetFileSystem.removeXAttr(res.remainingPath, name); + } + + @Override public void setVerifyChecksum(final boolean verifyChecksum) { List> mountPoints = fsState.getMountPoints(); @@ -825,5 +880,80 @@ public class ViewFileSystem extends File public short getDefaultReplication(Path f) { throw new NotInMountpointException(f, "getDefaultReplication"); } + + @Override + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("modifyAclEntries", path); + } + + @Override + public void removeAclEntries(Path path, List aclSpec) + throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeAclEntries", path); + } + + @Override + public void removeDefaultAcl(Path path) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeDefaultAcl", path); + } + + @Override + public void removeAcl(Path path) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeAcl", path); + } + + @Override + public void setAcl(Path path, List aclSpec) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("setAcl", path); + } + + @Override + public AclStatus getAclStatus(Path path) throws IOException { + checkPathIsSlash(path); + return new AclStatus.Builder().owner(ugi.getUserName()) + .group(ugi.getGroupNames()[0]) + .addEntries(AclUtil.getMinimalAcl(PERMISSION_555)) + .stickyBit(false).build(); + } + + @Override + public void setXAttr(Path path, String name, byte[] value, + EnumSet flag) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("setXAttr", path); + } + + @Override + public byte[] getXAttr(Path path, String name) throws IOException { + throw new NotInMountpointException(path, "getXAttr"); + } + + @Override + public Map getXAttrs(Path path) throws IOException { + throw new NotInMountpointException(path, "getXAttrs"); + } + + @Override + public Map getXAttrs(Path path, List names) + throws IOException { + throw new NotInMountpointException(path, "getXAttrs"); + } + + @Override + public List listXAttrs(Path path) throws IOException { + throw new NotInMountpointException(path, "listXAttrs"); + } + + @Override + public void removeXAttr(Path path, String name) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeXAttr", path); + } } } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Tue Aug 19 23:49:39 2014 @@ -26,6 +26,7 @@ import java.net.URISyntaxException; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; +import java.util.Map; import java.util.Map.Entry; import org.apache.hadoop.classification.InterfaceAudience; @@ -48,7 +49,12 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.UnsupportedFileSystemException; +import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.local.LocalConfigKeys; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclUtil; +import org.apache.hadoop.fs.permission.AclStatus; +import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.viewfs.InodeTree.INode; import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink; @@ -348,6 +354,14 @@ public class ViewFs extends AbstractFile } @Override + public void access(Path path, FsAction mode) throws AccessControlException, + FileNotFoundException, UnresolvedLinkException, IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.access(res.remainingPath, mode); + } + + @Override public FileStatus getFileLinkStatus(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { @@ -603,6 +617,95 @@ public class ViewFs extends AbstractFile return true; } + @Override + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.modifyAclEntries(res.remainingPath, aclSpec); + } + + @Override + public void removeAclEntries(Path path, List aclSpec) + throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.removeAclEntries(res.remainingPath, aclSpec); + } + + @Override + public void removeDefaultAcl(Path path) + throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.removeDefaultAcl(res.remainingPath); + } + + @Override + public void removeAcl(Path path) + throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.removeAcl(res.remainingPath); + } + + @Override + public void setAcl(Path path, List aclSpec) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.setAcl(res.remainingPath, aclSpec); + } + + @Override + public AclStatus getAclStatus(Path path) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getAclStatus(res.remainingPath); + } + + @Override + public void setXAttr(Path path, String name, byte[] value, + EnumSet flag) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.setXAttr(res.remainingPath, name, value, flag); + } + + @Override + public byte[] getXAttr(Path path, String name) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getXAttr(res.remainingPath, name); + } + + @Override + public Map getXAttrs(Path path) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getXAttrs(res.remainingPath); + } + + @Override + public Map getXAttrs(Path path, List names) + throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.getXAttrs(res.remainingPath, names); + } + + @Override + public List listXAttrs(Path path) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + return res.targetFileSystem.listXAttrs(res.remainingPath); + } + + @Override + public void removeXAttr(Path path, String name) throws IOException { + InodeTree.ResolveResult res = + fsState.resolve(getUriPath(path), true); + res.targetFileSystem.removeXAttr(res.remainingPath, name); + } /* @@ -832,5 +935,80 @@ public class ViewFs extends AbstractFile throws AccessControlException { throw readOnlyMountTable("setVerifyChecksum", ""); } + + @Override + public void modifyAclEntries(Path path, List aclSpec) + throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("modifyAclEntries", path); + } + + @Override + public void removeAclEntries(Path path, List aclSpec) + throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeAclEntries", path); + } + + @Override + public void removeDefaultAcl(Path path) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeDefaultAcl", path); + } + + @Override + public void removeAcl(Path path) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeAcl", path); + } + + @Override + public void setAcl(Path path, List aclSpec) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("setAcl", path); + } + + @Override + public AclStatus getAclStatus(Path path) throws IOException { + checkPathIsSlash(path); + return new AclStatus.Builder().owner(ugi.getUserName()) + .group(ugi.getGroupNames()[0]) + .addEntries(AclUtil.getMinimalAcl(PERMISSION_555)) + .stickyBit(false).build(); + } + + @Override + public void setXAttr(Path path, String name, byte[] value, + EnumSet flag) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("setXAttr", path); + } + + @Override + public byte[] getXAttr(Path path, String name) throws IOException { + throw new NotInMountpointException(path, "getXAttr"); + } + + @Override + public Map getXAttrs(Path path) throws IOException { + throw new NotInMountpointException(path, "getXAttrs"); + } + + @Override + public Map getXAttrs(Path path, List names) + throws IOException { + throw new NotInMountpointException(path, "getXAttrs"); + } + + @Override + public List listXAttrs(Path path) throws IOException { + throw new NotInMountpointException(path, "listXAttrs"); + } + + @Override + public void removeXAttr(Path path, String name) throws IOException { + checkPathIsSlash(path); + throw readOnlyMountTable("removeXAttr", path); + } } } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java Tue Aug 19 23:49:39 2014 @@ -143,7 +143,6 @@ public class ActiveStandbyElector implem public static final Log LOG = LogFactory.getLog(ActiveStandbyElector.class); - static int NUM_RETRIES = 3; private static final int SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE = 1000; private static enum ConnectionState { @@ -170,6 +169,7 @@ public class ActiveStandbyElector implem private final String zkLockFilePath; private final String zkBreadCrumbPath; private final String znodeWorkingDir; + private final int maxRetryNum; private Lock sessionReestablishLockForTests = new ReentrantLock(); private boolean wantToBeInElection; @@ -207,7 +207,7 @@ public class ActiveStandbyElector implem public ActiveStandbyElector(String zookeeperHostPorts, int zookeeperSessionTimeout, String parentZnodeName, List acl, List authInfo, - ActiveStandbyElectorCallback app) throws IOException, + ActiveStandbyElectorCallback app, int maxRetryNum) throws IOException, HadoopIllegalArgumentException, KeeperException { if (app == null || acl == null || parentZnodeName == null || zookeeperHostPorts == null || zookeeperSessionTimeout <= 0) { @@ -220,7 +220,8 @@ public class ActiveStandbyElector implem appClient = app; znodeWorkingDir = parentZnodeName; zkLockFilePath = znodeWorkingDir + "/" + LOCK_FILENAME; - zkBreadCrumbPath = znodeWorkingDir + "/" + BREADCRUMB_FILENAME; + zkBreadCrumbPath = znodeWorkingDir + "/" + BREADCRUMB_FILENAME; + this.maxRetryNum = maxRetryNum; // createConnection for future API calls createConnection(); @@ -439,7 +440,7 @@ public class ActiveStandbyElector implem LOG.debug(errorMessage); if (shouldRetry(code)) { - if (createRetryCount < NUM_RETRIES) { + if (createRetryCount < maxRetryNum) { LOG.debug("Retrying createNode createRetryCount: " + createRetryCount); ++createRetryCount; createLockNodeAsync(); @@ -500,7 +501,7 @@ public class ActiveStandbyElector implem LOG.debug(errorMessage); if (shouldRetry(code)) { - if (statRetryCount < NUM_RETRIES) { + if (statRetryCount < maxRetryNum) { ++statRetryCount; monitorLockNodeAsync(); return; @@ -735,7 +736,7 @@ public class ActiveStandbyElector implem private boolean reEstablishSession() { int connectionRetryCount = 0; boolean success = false; - while(!success && connectionRetryCount < NUM_RETRIES) { + while(!success && connectionRetryCount < maxRetryNum) { LOG.debug("Establishing zookeeper connection for " + this); try { createConnection(); @@ -972,14 +973,14 @@ public class ActiveStandbyElector implem }); } - private static T zkDoWithRetries(ZKAction action) - throws KeeperException, InterruptedException { + private T zkDoWithRetries(ZKAction action) throws KeeperException, + InterruptedException { int retry = 0; while (true) { try { return action.run(); } catch (KeeperException ke) { - if (shouldRetry(ke.code()) && ++retry < NUM_RETRIES) { + if (shouldRetry(ke.code()) && ++retry < maxRetryNum) { continue; } throw ke; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java Tue Aug 19 23:49:39 2014 @@ -19,7 +19,9 @@ package org.apache.hadoop.ha; import java.io.IOException; import java.io.PrintStream; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Map; import org.apache.commons.cli.Options; @@ -33,6 +35,7 @@ import org.apache.hadoop.classification. import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo; import org.apache.hadoop.ha.HAServiceProtocol.RequestSource; import org.apache.hadoop.util.Tool; @@ -66,7 +69,7 @@ public abstract class HAAdmin extends Co protected final static Map USAGE = ImmutableMap.builder() .put("-transitionToActive", - new UsageInfo("", "Transitions the service into Active state")) + new UsageInfo(" [--"+FORCEACTIVE+"]", "Transitions the service into Active state")) .put("-transitionToStandby", new UsageInfo("", "Transitions the service into Standby state")) .put("-failover", @@ -100,6 +103,11 @@ public abstract class HAAdmin extends Co } protected abstract HAServiceTarget resolveTarget(String string); + + protected Collection getTargetIds(String targetNodeToActivate) { + return new ArrayList( + Arrays.asList(new String[]{targetNodeToActivate})); + } protected String getUsageString() { return "Usage: HAAdmin"; @@ -133,6 +141,11 @@ public abstract class HAAdmin extends Co printUsage(errOut, "-transitionToActive"); return -1; } + /* returns true if other target node is active or some exception occurred + and forceActive was not set */ + if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) { + return -1; + } HAServiceTarget target = resolveTarget(argv[0]); if (!checkManualStateManagementOK(target)) { return -1; @@ -142,7 +155,48 @@ public abstract class HAAdmin extends Co HAServiceProtocolHelper.transitionToActive(proto, createReqInfo()); return 0; } - + + /** + * Checks whether other target node is active or not + * @param targetNodeToActivate + * @return true if other target node is active or some other exception + * occurred and forceActive was set otherwise false + * @throws IOException + */ + private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive) + throws IOException { + Collection targetIds = getTargetIds(targetNodeToActivate); + if(targetIds == null) { + errOut.println("transitionToActive: No target node in the " + + "current configuration"); + printUsage(errOut, "-transitionToActive"); + return true; + } + targetIds.remove(targetNodeToActivate); + for(String targetId : targetIds) { + HAServiceTarget target = resolveTarget(targetId); + if (!checkManualStateManagementOK(target)) { + return true; + } + try { + HAServiceProtocol proto = target.getProxy(getConf(), 5000); + if(proto.getServiceStatus().getState() == HAServiceState.ACTIVE) { + errOut.println("transitionToActive: Node " + targetId +" is already active"); + printUsage(errOut, "-transitionToActive"); + return true; + } + } catch (Exception e) { + //If forceActive switch is false then return true + if(!forceActive) { + errOut.println("Unexpected error occurred " + e.getMessage()); + printUsage(errOut, "-transitionToActive"); + return true; + } + } + } + return false; + } + private int transitionToStandby(final CommandLine cmd) throws IOException, ServiceFailedException { String[] argv = cmd.getArgs(); @@ -364,6 +418,9 @@ public abstract class HAAdmin extends Co if ("-failover".equals(cmd)) { addFailoverCliOpts(opts); } + if("-transitionToActive".equals(cmd)) { + addTransitionToActiveCliOpts(opts); + } // Mutative commands take FORCEMANUAL option if ("-transitionToActive".equals(cmd) || "-transitionToStandby".equals(cmd) || @@ -433,6 +490,14 @@ public abstract class HAAdmin extends Co // that change state. } + /** + * Add CLI options which are specific to the transitionToActive command and + * no others. + */ + private void addTransitionToActiveCliOpts(Options transitionToActiveCliOpts) { + transitionToActiveCliOpts.addOption(FORCEACTIVE, false, "force active"); + } + private CommandLine parseOpts(String cmdName, Options opts, String[] argv) { try { // Strip off the first arg, since that's just the command name Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java Tue Aug 19 23:49:39 2014 @@ -74,6 +74,9 @@ public class HealthMonitor { private List callbacks = Collections.synchronizedList( new LinkedList()); + private List serviceStateCallbacks = Collections + .synchronizedList(new LinkedList()); + private HAServiceStatus lastServiceState = new HAServiceStatus( HAServiceState.INITIALIZING); @@ -134,7 +137,15 @@ public class HealthMonitor { public void removeCallback(Callback cb) { callbacks.remove(cb); } - + + public synchronized void addServiceStateCallback(ServiceStateCallback cb) { + this.serviceStateCallbacks.add(cb); + } + + public synchronized void removeServiceStateCallback(ServiceStateCallback cb) { + serviceStateCallbacks.remove(cb); + } + public void shutdown() { LOG.info("Stopping HealthMonitor thread"); shouldRun = false; @@ -217,6 +228,9 @@ public class HealthMonitor { private synchronized void setLastServiceStatus(HAServiceStatus status) { this.lastServiceState = status; + for (ServiceStateCallback cb : serviceStateCallbacks) { + cb.reportServiceStatus(lastServiceState); + } } private synchronized void enterState(State newState) { @@ -293,4 +307,11 @@ public class HealthMonitor { static interface Callback { void enteredState(State newState); } + + /** + * Callback interface for service states. + */ + static interface ServiceStateCallback { + void reportServiceStatus(HAServiceStatus status); + } } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java Tue Aug 19 23:49:39 2014 @@ -32,8 +32,10 @@ import org.apache.commons.logging.LogFac import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.ActiveStandbyElector.ActiveNotFoundException; import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback; +import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo; import org.apache.hadoop.ha.HAServiceProtocol.RequestSource; import org.apache.hadoop.util.ZKUtil; @@ -105,6 +107,8 @@ public abstract class ZKFailoverControll private State lastHealthState = State.INITIALIZING; + private volatile HAServiceState serviceState = HAServiceState.INITIALIZING; + /** Set if a fatal error occurs */ private String fatalError = null; @@ -294,6 +298,7 @@ public abstract class ZKFailoverControll private void initHM() { healthMonitor = new HealthMonitor(conf, localTarget); healthMonitor.addCallback(new HealthCallbacks()); + healthMonitor.addServiceStateCallback(new ServiceStateCallBacks()); healthMonitor.start(); } @@ -337,10 +342,12 @@ public abstract class ZKFailoverControll Preconditions.checkArgument(zkTimeout > 0, "Invalid ZK session timeout %s", zkTimeout); - + int maxRetryNum = conf.getInt( + CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_KEY, + CommonConfigurationKeys.HA_FC_ELECTOR_ZK_OP_RETRIES_DEFAULT); elector = new ActiveStandbyElector(zkQuorum, zkTimeout, getParentZnode(), zkAcls, zkAuths, - new ElectorCallbacks()); + new ElectorCallbacks(), maxRetryNum); } private String getParentZnode() { @@ -376,6 +383,7 @@ public abstract class ZKFailoverControll String msg = "Successfully transitioned " + localTarget + " to active state"; LOG.info(msg); + serviceState = HAServiceState.ACTIVE; recordActiveAttempt(new ActiveAttemptRecord(true, msg)); } catch (Throwable t) { @@ -484,6 +492,7 @@ public abstract class ZKFailoverControll // TODO handle this. It's a likely case since we probably got fenced // at the same time. } + serviceState = HAServiceState.STANDBY; } @@ -574,6 +583,7 @@ public abstract class ZKFailoverControll delayJoiningUntilNanotime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(millisToCede); elector.quitElection(needFence); + serviceState = HAServiceState.INITIALIZING; } } recheckElectability(); @@ -739,12 +749,16 @@ public abstract class ZKFailoverControll switch (lastHealthState) { case SERVICE_HEALTHY: elector.joinElection(targetToData(localTarget)); + if (quitElectionOnBadState) { + quitElectionOnBadState = false; + } break; case INITIALIZING: LOG.info("Ensuring that " + localTarget + " does not " + "participate in active master election"); elector.quitElection(false); + serviceState = HAServiceState.INITIALIZING; break; case SERVICE_UNHEALTHY: @@ -752,6 +766,7 @@ public abstract class ZKFailoverControll LOG.info("Quitting master election for " + localTarget + " and marking that fencing is necessary"); elector.quitElection(true); + serviceState = HAServiceState.INITIALIZING; break; case HEALTH_MONITOR_FAILED: @@ -784,6 +799,44 @@ public abstract class ZKFailoverControll whenNanos, TimeUnit.NANOSECONDS); } + int serviceStateMismatchCount = 0; + boolean quitElectionOnBadState = false; + + void verifyChangedServiceState(HAServiceState changedState) { + synchronized (elector) { + synchronized (this) { + if (serviceState == HAServiceState.INITIALIZING) { + if (quitElectionOnBadState) { + LOG.debug("rechecking for electability from bad state"); + recheckElectability(); + } + return; + } + if (changedState == serviceState) { + serviceStateMismatchCount = 0; + return; + } + if (serviceStateMismatchCount == 0) { + // recheck one more time. As this might be due to parallel transition. + serviceStateMismatchCount++; + return; + } + // quit the election as the expected state and reported state + // mismatches. + LOG.error("Local service " + localTarget + + " has changed the serviceState to " + changedState + + ". Expected was " + serviceState + + ". Quitting election marking fencing necessary."); + delayJoiningUntilNanotime = System.nanoTime() + + TimeUnit.MILLISECONDS.toNanos(1000); + elector.quitElection(true); + quitElectionOnBadState = true; + serviceStateMismatchCount = 0; + serviceState = HAServiceState.INITIALIZING; + } + } + } + /** * @return the last health state passed to the FC * by the HealthMonitor. @@ -855,7 +908,17 @@ public abstract class ZKFailoverControll recheckElectability(); } } - + + /** + * Callbacks for HAServiceStatus + */ + class ServiceStateCallBacks implements HealthMonitor.ServiceStateCallback { + @Override + public void reportServiceStatus(HAServiceStatus status) { + verifyChangedServiceState(status.getState()); + } + } + private static class ActiveAttemptRecord { private final boolean succeeded; private final String status; Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java Tue Aug 19 23:49:39 2014 @@ -1005,7 +1005,7 @@ public final class HttpServer2 implement String remoteUser = request.getRemoteUser(); if (remoteUser == null) { - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, + response.sendError(HttpServletResponse.SC_FORBIDDEN, "Unauthenticated users are not " + "authorized to access this page."); return false; @@ -1013,7 +1013,7 @@ public final class HttpServer2 implement if (servletContext.getAttribute(ADMINS_ACL) != null && !userHasAdministratorAccess(servletContext, remoteUser)) { - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " + response.sendError(HttpServletResponse.SC_FORBIDDEN, "User " + remoteUser + " is unauthorized to access this page."); return false; } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java Tue Aug 19 23:49:39 2014 @@ -256,7 +256,7 @@ public class MapFile { } else { keyClass= (Class) keyClassOption.getValue(); - this.comparator = WritableComparator.get(keyClass); + this.comparator = WritableComparator.get(keyClass, conf); } this.lastKey = comparator.newKey(); FileSystem fs = dirName.getFileSystem(conf); @@ -428,12 +428,13 @@ public class MapFile { this.data = createDataFileReader(dataFile, conf, options); this.firstPosition = data.getPosition(); - if (comparator == null) - this.comparator = - WritableComparator.get(data.getKeyClass(). - asSubclass(WritableComparable.class)); - else + if (comparator == null) { + Class cls; + cls = data.getKeyClass().asSubclass(WritableComparable.class); + this.comparator = WritableComparator.get(cls, conf); + } else { this.comparator = comparator; + } // open the index SequenceFile.Reader.Option[] indexOptions = Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java Tue Aug 19 23:49:39 2014 @@ -2676,7 +2676,7 @@ public class SequenceFile { /** Sort and merge files containing the named classes. */ public Sorter(FileSystem fs, Class keyClass, Class valClass, Configuration conf) { - this(fs, WritableComparator.get(keyClass), keyClass, valClass, conf); + this(fs, WritableComparator.get(keyClass, conf), keyClass, valClass, conf); } /** Sort and merge using an arbitrary {@link RawComparator}. */ Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java Tue Aug 19 23:49:39 2014 @@ -52,7 +52,7 @@ public class SetFile extends MapFile { Class keyClass, SequenceFile.CompressionType compress) throws IOException { - this(conf, fs, dirName, WritableComparator.get(keyClass), compress); + this(conf, fs, dirName, WritableComparator.get(keyClass, conf), compress); } /** Create a set naming the element comparator and compression type. */ Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java Tue Aug 19 23:49:39 2014 @@ -288,9 +288,7 @@ public class Text extends BinaryComparab @Override public void readFields(DataInput in) throws IOException { int newLength = WritableUtils.readVInt(in); - setCapacity(newLength, false); - in.readFully(bytes, 0, newLength); - length = newLength; + readWithKnownLength(in, newLength); } public void readFields(DataInput in, int maxLength) throws IOException { @@ -302,9 +300,7 @@ public class Text extends BinaryComparab throw new IOException("tried to deserialize " + newLength + " bytes of data, but maxLength = " + maxLength); } - setCapacity(newLength, false); - in.readFully(bytes, 0, newLength); - length = newLength; + readWithKnownLength(in, newLength); } /** Skips over one Text in the input. */ @@ -313,6 +309,17 @@ public class Text extends BinaryComparab WritableUtils.skipFully(in, length); } + /** + * Read a Text object whose length is already known. + * This allows creating Text from a stream which uses a different serialization + * format. + */ + public void readWithKnownLength(DataInput in, int len) throws IOException { + setCapacity(len, false); + in.readFully(bytes, 0, len); + length = len; + } + /** serialize * write this object to out * length uses zero-compressed encoding Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java Tue Aug 19 23:49:39 2014 @@ -24,6 +24,8 @@ import java.util.concurrent.ConcurrentHa import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; /** A Comparator for {@link WritableComparable}s. @@ -37,13 +39,21 @@ import org.apache.hadoop.util.Reflection */ @InterfaceAudience.Public @InterfaceStability.Stable -public class WritableComparator implements RawComparator { +public class WritableComparator implements RawComparator, Configurable { private static final ConcurrentHashMap comparators = new ConcurrentHashMap(); // registry - /** Get a comparator for a {@link WritableComparable} implementation. */ + private Configuration conf; + + /** For backwards compatibility. **/ public static WritableComparator get(Class c) { + return get(c, null); + } + + /** Get a comparator for a {@link WritableComparable} implementation. */ + public static WritableComparator get( + Class c, Configuration conf) { WritableComparator comparator = comparators.get(c); if (comparator == null) { // force the static initializers to run @@ -52,12 +62,24 @@ public class WritableComparator implemen comparator = comparators.get(c); // if not, use the generic one if (comparator == null) { - comparator = new WritableComparator(c, true); + comparator = new WritableComparator(c, conf, true); } } + // Newly passed Configuration objects should be used. + ReflectionUtils.setConf(comparator, conf); return comparator; } + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return conf; + } + /** * Force initialization of the static members. * As of Java 5, referencing a class doesn't force it to initialize. Since @@ -91,12 +113,19 @@ public class WritableComparator implemen /** Construct for a {@link WritableComparable} implementation. */ protected WritableComparator(Class keyClass) { - this(keyClass, false); + this(keyClass, null, false); } protected WritableComparator(Class keyClass, boolean createInstances) { + this(keyClass, null, createInstances); + } + + protected WritableComparator(Class keyClass, + Configuration conf, + boolean createInstances) { this.keyClass = keyClass; + this.conf = (conf != null) ? conf : new Configuration(); if (createInstances) { key1 = newKey(); key2 = newKey(); @@ -112,7 +141,7 @@ public class WritableComparator implemen /** Construct a new {@link WritableComparable} instance. */ public WritableComparable newKey() { - return ReflectionUtils.newInstance(keyClass, null); + return ReflectionUtils.newInstance(keyClass, conf); } /** Optimization hook. Override this to make SequenceFile.Sorter's scream. Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java Tue Aug 19 23:49:39 2014 @@ -100,7 +100,8 @@ public class BZip2Codec implements Confi @Override public CompressionOutputStream createOutputStream(OutputStream out) throws IOException { - return createOutputStream(out, createCompressor()); + return CompressionCodec.Util. + createOutputStreamWithCodecPool(this, conf, out); } /** @@ -153,7 +154,8 @@ public class BZip2Codec implements Confi @Override public CompressionInputStream createInputStream(InputStream in) throws IOException { - return createInputStream(in, createDecompressor()); + return CompressionCodec.Util. + createInputStreamWithCodecPool(this, conf, in); } /** Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java Tue Aug 19 23:49:39 2014 @@ -24,6 +24,7 @@ import java.io.OutputStream; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; /** * This class encapsulates a streaming compression/decompression pair. @@ -113,4 +114,58 @@ public interface CompressionCodec { * @return the extension including the '.' */ String getDefaultExtension(); + + static class Util { + /** + * Create an output stream with a codec taken from the global CodecPool. + * + * @param codec The codec to use to create the output stream. + * @param conf The configuration to use if we need to create a new codec. + * @param out The output stream to wrap. + * @return The new output stream + * @throws IOException + */ + static CompressionOutputStream createOutputStreamWithCodecPool( + CompressionCodec codec, Configuration conf, OutputStream out) + throws IOException { + Compressor compressor = CodecPool.getCompressor(codec, conf); + CompressionOutputStream stream = null; + try { + stream = codec.createOutputStream(out, compressor); + } finally { + if (stream == null) { + CodecPool.returnCompressor(compressor); + } else { + stream.setTrackedCompressor(compressor); + } + } + return stream; + } + + /** + * Create an input stream with a codec taken from the global CodecPool. + * + * @param codec The codec to use to create the input stream. + * @param conf The configuration to use if we need to create a new codec. + * @param in The input stream to wrap. + * @return The new input stream + * @throws IOException + */ + static CompressionInputStream createInputStreamWithCodecPool( + CompressionCodec codec, Configuration conf, InputStream in) + throws IOException { + Decompressor decompressor = CodecPool.getDecompressor(codec); + CompressionInputStream stream = null; + try { + stream = codec.createInputStream(in, decompressor); + } finally { + if (stream == null) { + CodecPool.returnDecompressor(decompressor); + } else { + stream.setTrackedDecompressor(decompressor); + } + } + return stream; + } + } } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java Tue Aug 19 23:49:39 2014 @@ -41,6 +41,8 @@ public abstract class CompressionInputSt protected final InputStream in; protected long maxAvailableData = 0L; + private Decompressor trackedDecompressor; + /** * Create a compression input stream that reads * the decompressed bytes from the given stream. @@ -58,6 +60,10 @@ public abstract class CompressionInputSt @Override public void close() throws IOException { in.close(); + if (trackedDecompressor != null) { + CodecPool.returnDecompressor(trackedDecompressor); + trackedDecompressor = null; + } } /** @@ -112,4 +118,8 @@ public abstract class CompressionInputSt public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException { throw new UnsupportedOperationException(); } + + void setTrackedDecompressor(Decompressor decompressor) { + trackedDecompressor = decompressor; + } } Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java Tue Aug 19 23:49:39 2014 @@ -34,7 +34,13 @@ public abstract class CompressionOutputS * The output stream to be compressed. */ protected final OutputStream out; - + + /** + * If non-null, this is the Compressor object that we should call + * CodecPool#returnCompressor on when this stream is closed. + */ + private Compressor trackedCompressor; + /** * Create a compression output stream that writes * the compressed bytes to the given stream. @@ -43,11 +49,19 @@ public abstract class CompressionOutputS protected CompressionOutputStream(OutputStream out) { this.out = out; } - + + void setTrackedCompressor(Compressor compressor) { + trackedCompressor = compressor; + } + @Override public void close() throws IOException { finish(); out.close(); + if (trackedCompressor != null) { + CodecPool.returnCompressor(trackedCompressor); + trackedCompressor = null; + } } @Override Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java?rev=1619012&r1=1619011&r2=1619012&view=diff ============================================================================== --- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java (original) +++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressorStream.java Tue Aug 19 23:49:39 2014 @@ -102,9 +102,13 @@ public class CompressorStream extends Co @Override public void close() throws IOException { if (!closed) { - finish(); - out.close(); - closed = true; + try { + finish(); + } + finally { + out.close(); + closed = true; + } } }