hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1377092 [2/3] - in /hadoop/common/branches/HDFS-3077/hadoop-common-project: hadoop-annotations/ hadoop-auth-examples/ hadoop-auth/ hadoop-common/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apa...
Date Fri, 24 Aug 2012 20:38:17 GMT
Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/LocalConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/LocalConfigKeys.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/LocalConfigKeys.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/LocalConfigKeys.java Fri Aug 24 20:38:08 2012
@@ -24,11 +24,18 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FsServerDefaults;
+import org.apache.hadoop.util.DataChecksum;
 
 /** 
  * This class contains constants for configuration keys used
  * in the local file system, raw local fs and checksum fs.
+ *
+ * Note that the settings for unimplemented features are ignored. 
+ * E.g. checksum related settings are just place holders. Even when
+ * wrapped with {@link ChecksumFileSystem}, these settings are not
+ * used.
  */
+
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class LocalConfigKeys extends CommonConfigurationKeys {
@@ -44,7 +51,9 @@ public class LocalConfigKeys extends Com
                                                 "file.client-write-packet-size";
   public static final int CLIENT_WRITE_PACKET_SIZE_DEFAULT = 64*1024;
   public static final boolean ENCRYPT_DATA_TRANSFER_DEFAULT = false;
-
+  public static final long FS_TRASH_INTERVAL_DEFAULT = 0;
+  public static final DataChecksum.Type CHECKSUM_TYPE_DEFAULT =
+      DataChecksum.Type.CRC32;
   public static FsServerDefaults getServerDefaults() throws IOException {
     return new FsServerDefaults(
         BLOCK_SIZE_DEFAULT,
@@ -52,7 +61,9 @@ public class LocalConfigKeys extends Com
         CLIENT_WRITE_PACKET_SIZE_DEFAULT,
         REPLICATION_DEFAULT,
         STREAM_BUFFER_SIZE_DEFAULT,
-        ENCRYPT_DATA_TRANSFER_DEFAULT);
+        ENCRYPT_DATA_TRANSFER_DEFAULT,
+        FS_TRASH_INTERVAL_DEFAULT,
+        CHECKSUM_TYPE_DEFAULT);
   }
 }
   

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java Fri Aug 24 20:38:08 2012
@@ -44,12 +44,12 @@ import org.apache.hadoop.util.StringUtil
 @InterfaceStability.Evolving
 
 abstract public class Command extends Configured {
-  /** default name of the command */
-  public static String NAME;
-  /** the command's usage switches and arguments format */
-  public static String USAGE;
-  /** the command's long description */
-  public static String DESCRIPTION;
+  /** field name indicating the default name of the command */
+  public static final String COMMAND_NAME_FIELD = "NAME";
+  /** field name indicating the command's usage switches and arguments format */
+  public static final String COMMAND_USAGE_FIELD = "USAGE";
+  /** field name indicating the command's long description */
+  public static final String COMMAND_DESCRIPTION_FIELD = "DESCRIPTION";
     
   protected String[] args;
   protected String name;
@@ -397,7 +397,7 @@ abstract public class Command extends Co
    */
   public String getName() {
     return (name == null)
-      ? getCommandField("NAME")
+      ? getCommandField(COMMAND_NAME_FIELD)
       : name.startsWith("-") ? name.substring(1) : name;
   }
 
@@ -415,7 +415,7 @@ abstract public class Command extends Co
    */
   public String getUsage() {
     String cmd = "-" + getName();
-    String usage = isDeprecated() ? "" : getCommandField("USAGE");
+    String usage = isDeprecated() ? "" : getCommandField(COMMAND_USAGE_FIELD);
     return usage.isEmpty() ? cmd : cmd + " " + usage; 
   }
 
@@ -426,7 +426,7 @@ abstract public class Command extends Co
   public String getDescription() {
     return isDeprecated()
       ? "(DEPRECATED) Same as '" + getReplacementCommand() + "'"
-      : getCommandField("DESCRIPTION");
+      : getCommandField(COMMAND_DESCRIPTION_FIELD);
   }
 
   /**

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFs.java Fri Aug 24 20:38:08 2012
@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileChecksum
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.UnresolvedLinkException;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -159,11 +160,11 @@ class ChRootedFs extends AbstractFileSys
   public FSDataOutputStream createInternal(final Path f,
       final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
       final int bufferSize, final short replication, final long blockSize,
-      final Progressable progress, final int bytesPerChecksum,
+      final Progressable progress, final ChecksumOpt checksumOpt,
       final boolean createParent) throws IOException, UnresolvedLinkException {
     return myFs.createInternal(fullPath(f), flag,
         absolutePermission, bufferSize,
-        replication, blockSize, progress, bytesPerChecksum, createParent);
+        replication, blockSize, progress, checksumOpt, createParent);
   }
 
   @Override

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Fri Aug 24 20:38:08 2012
@@ -23,7 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -49,11 +49,8 @@ import org.apache.hadoop.fs.UnsupportedF
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.viewfs.InodeTree.INode;
 import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.Time;
 
@@ -236,11 +233,6 @@ public class ViewFileSystem extends File
   }
   
   @Override
-  public String getCanonicalServiceName() {
-    return null;
-  }
-
-  @Override
   public URI getUri() {
     return myUri;
   }
@@ -549,6 +541,18 @@ public class ViewFileSystem extends File
     }
   }
 
+  @Override
+  public FileSystem[] getChildFileSystems() {
+    List<InodeTree.MountPoint<FileSystem>> mountPoints =
+        fsState.getMountPoints();
+    Set<FileSystem> children = new HashSet<FileSystem>();
+    for (InodeTree.MountPoint<FileSystem> mountPoint : mountPoints) {
+      FileSystem targetFs = mountPoint.target.targetFileSystem;
+      children.addAll(Arrays.asList(targetFs.getChildFileSystems()));
+    }
+    return children.toArray(new FileSystem[]{});
+  }
+  
   public MountPoint[] getMountPoints() {
     List<InodeTree.MountPoint<FileSystem>> mountPoints = 
                   fsState.getMountPoints();
@@ -561,59 +565,6 @@ public class ViewFileSystem extends File
     return result;
   }
   
- 
-  @Override
-  public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
-    List<InodeTree.MountPoint<FileSystem>> mountPoints = 
-                fsState.getMountPoints();
-    int initialListSize  = 0;
-    for (InodeTree.MountPoint<FileSystem> im : mountPoints) {
-      initialListSize += im.target.targetDirLinkList.length; 
-    }
-    List<Token<?>> result = new ArrayList<Token<?>>(initialListSize);
-    for ( int i = 0; i < mountPoints.size(); ++i ) {
-      List<Token<?>> tokens = 
-        mountPoints.get(i).target.targetFileSystem.getDelegationTokens(renewer);
-      if (tokens != null) {
-        result.addAll(tokens);
-      }
-    }
-    return result;
-  }
-
-  @Override
-  public List<Token<?>> getDelegationTokens(String renewer,
-      Credentials credentials) throws IOException {
-    List<InodeTree.MountPoint<FileSystem>> mountPoints =
-        fsState.getMountPoints();
-    int initialListSize = 0;
-    for (InodeTree.MountPoint<FileSystem> im : mountPoints) {
-      initialListSize += im.target.targetDirLinkList.length;
-    }
-    Set<String> seenServiceNames = new HashSet<String>();
-    List<Token<?>> result = new ArrayList<Token<?>>(initialListSize);
-    for (int i = 0; i < mountPoints.size(); ++i) {
-      String serviceName =
-          mountPoints.get(i).target.targetFileSystem.getCanonicalServiceName();
-      if (serviceName == null || seenServiceNames.contains(serviceName)) {
-        continue;
-      }
-      seenServiceNames.add(serviceName);
-      Token<?> knownToken = credentials.getToken(new Text(serviceName));
-      if (knownToken != null) {
-        result.add(knownToken);
-      } else {
-        List<Token<?>> tokens =
-            mountPoints.get(i).target.targetFileSystem
-                .getDelegationTokens(renewer);
-        if (tokens != null) {
-          result.addAll(tokens);
-        }
-      }
-    }
-    return result;
-  }
-
   /*
    * An instance of this class represents an internal dir of the viewFs 
    * that is internal dir of the mount table.

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Fri Aug 24 20:38:08 2012
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.FsServerDefaults;
 import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.ParentNotDirectoryException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
@@ -265,7 +266,7 @@ public class ViewFs extends AbstractFile
   public FSDataOutputStream createInternal(final Path f,
       final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
       final int bufferSize, final short replication, final long blockSize,
-      final Progressable progress, final int bytesPerChecksum,
+      final Progressable progress, final ChecksumOpt checksumOpt,
       final boolean createParent) throws AccessControlException,
       FileAlreadyExistsException, FileNotFoundException,
       ParentNotDirectoryException, UnsupportedFileSystemException,
@@ -283,7 +284,7 @@ public class ViewFs extends AbstractFile
     assert(res.remainingPath != null);
     return res.targetFileSystem.createInternal(res.remainingPath, flag,
         absolutePermission, bufferSize, replication,
-        blockSize, progress, bytesPerChecksum,
+        blockSize, progress, checksumOpt,
         createParent);
   }
 
@@ -632,7 +633,7 @@ public class ViewFs extends AbstractFile
     public FSDataOutputStream createInternal(final Path f,
         final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
         final int bufferSize, final short replication, final long blockSize,
-        final Progressable progress, final int bytesPerChecksum,
+        final Progressable progress, final ChecksumOpt checksumOpt,
         final boolean createParent) throws AccessControlException,
         FileAlreadyExistsException, FileNotFoundException,
         ParentNotDirectoryException, UnsupportedFileSystemException,

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java Fri Aug 24 20:38:08 2012
@@ -49,16 +49,34 @@ public class FailoverController {
   private final int rpcTimeoutToNewActive;
   
   private final Configuration conf;
+  /*
+   * Need a copy of conf for graceful fence to set 
+   * configurable retries for IPC client.
+   * Refer HDFS-3561
+   */
+  private final Configuration gracefulFenceConf;
 
   private final RequestSource requestSource;
   
   public FailoverController(Configuration conf,
       RequestSource source) {
     this.conf = conf;
+    this.gracefulFenceConf = new Configuration(conf);
     this.requestSource = source;
     
     this.gracefulFenceTimeout = getGracefulFenceTimeout(conf);
     this.rpcTimeoutToNewActive = getRpcTimeoutToNewActive(conf);
+    
+    //Configure less retries for graceful fence 
+    int gracefulFenceConnectRetries = conf.getInt(
+        CommonConfigurationKeys.HA_FC_GRACEFUL_FENCE_CONNECTION_RETRIES,
+        CommonConfigurationKeys.HA_FC_GRACEFUL_FENCE_CONNECTION_RETRIES_DEFAULT);
+    gracefulFenceConf.setInt(
+        CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY,
+        gracefulFenceConnectRetries);
+    gracefulFenceConf.setInt(
+        CommonConfigurationKeys.IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY,
+        gracefulFenceConnectRetries);
   }
 
   static int getGracefulFenceTimeout(Configuration conf) {
@@ -150,7 +168,7 @@ public class FailoverController {
   boolean tryGracefulFence(HAServiceTarget svc) {
     HAServiceProtocol proxy = null;
     try {
-      proxy = svc.getProxy(conf, gracefulFenceTimeout);
+      proxy = svc.getProxy(gracefulFenceConf, gracefulFenceTimeout);
       proxy.transitionToStandby(createReqInfo());
       return true;
     } catch (ServiceFailedException sfe) {

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Fri Aug 24 20:38:08 2012
@@ -330,6 +330,12 @@ public class HttpServer implements Filte
       Context logContext = new Context(parent, "/logs");
       logContext.setResourceBase(logDir);
       logContext.addServlet(AdminAuthorizedServlet.class, "/*");
+      if (conf.getBoolean(
+          CommonConfigurationKeys.HADOOP_JETTY_LOGS_SERVE_ALIASES,
+          CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
+        logContext.getInitParams().put(
+            "org.mortbay.jetty.servlet.Default.aliases", "true");
+      }
       logContext.setDisplayName("logs");
       setContextAttributes(logContext, conf);
       defaultContexts.put(logContext, true);

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java Fri Aug 24 20:38:08 2012
@@ -206,12 +206,20 @@ public class IOUtils {
    * for any reason (including EOF)
    */
   public static void skipFully(InputStream in, long len) throws IOException {
-    while (len > 0) {
-      long ret = in.skip(len);
-      if (ret < 0) {
-        throw new IOException( "Premature EOF from inputStream");
+    long amt = len;
+    while (amt > 0) {
+      long ret = in.skip(amt);
+      if (ret == 0) {
+        // skip may return 0 even if we're not at EOF.  Luckily, we can 
+        // use the read() method to figure out if we're at the end.
+        int b = in.read();
+        if (b == -1) {
+          throw new EOFException( "Premature EOF from inputStream after " +
+              "skipping " + (len - amt) + " byte(s).");
+        }
+        ret = 1;
       }
-      len -= ret;
+      amt -= ret;
     }
   }
   

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Fri Aug 24 20:38:08 2012
@@ -46,11 +46,13 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
+import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
@@ -108,6 +110,42 @@ import com.google.common.annotations.Vis
 public abstract class Server {
   private final boolean authorize;
   private boolean isSecurityEnabled;
+  private ExceptionsHandler exceptionsHandler = new ExceptionsHandler();
+  
+  public void addTerseExceptions(Class<?>... exceptionClass) {
+    exceptionsHandler.addTerseExceptions(exceptionClass);
+  }
+
+  /**
+   * ExceptionsHandler manages Exception groups for special handling
+   * e.g., terse exception group for concise logging messages
+   */
+  static class ExceptionsHandler {
+    private volatile Set<String> terseExceptions = new HashSet<String>();
+
+    /**
+     * Add exception class so server won't log its stack trace.
+     * Modifying the terseException through this method is thread safe.
+     *
+     * @param exceptionClass exception classes 
+     */
+    void addTerseExceptions(Class<?>... exceptionClass) {
+
+      // Make a copy of terseException for performing modification
+      final HashSet<String> newSet = new HashSet<String>(terseExceptions);
+
+      // Add all class names into the HashSet
+      for (Class<?> name : exceptionClass) {
+        newSet.add(name.toString());
+      }
+      // Replace terseException set
+      terseExceptions = Collections.unmodifiableSet(newSet);
+    }
+
+    boolean isTerse(Class<?> t) {
+      return terseExceptions.contains(t.toString());
+    }
+  }
   
   /**
    * The first four bytes of Hadoop RPC connections
@@ -1704,8 +1742,8 @@ public abstract class Server {
               // on the server side, as opposed to just a normal exceptional
               // result.
               LOG.warn(logMsg, e);
-            } else if (e instanceof StandbyException) {
-              // Don't log the whole stack trace of these exceptions.
+            } else if (exceptionsHandler.isTerse(e.getClass())) {
+             // Don't log the whole stack trace of these exceptions.
               // Way too noisy!
               LOG.info(logMsg);
             } else {
@@ -1844,6 +1882,8 @@ public abstract class Server {
     if (isSecurityEnabled) {
       SaslRpcServer.init(conf);
     }
+    
+    this.exceptionsHandler.addTerseExceptions(StandbyException.class);
   }
 
   private void closeConnection(Connection connection) {

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java Fri Aug 24 20:38:08 2012
@@ -57,6 +57,20 @@ public class Credentials implements Writ
     new HashMap<Text, Token<? extends TokenIdentifier>>(); 
 
   /**
+   * Create an empty credentials instance
+   */
+  public Credentials() {
+  }
+  
+  /**
+   * Create a copy of the given credentials
+   * @param credentials to copy
+   */
+  public Credentials(Credentials credentials) {
+    this.addAll(credentials);
+  }
+  
+  /**
    * Returns the key bytes for the alias
    * @param alias the alias for the key
    * @return key for this alias
@@ -260,4 +274,10 @@ public class Credentials implements Writ
       }
     }
   }
+  
+  public void addTokensToUGI(UserGroupInformation ugi) {
+    for (Map.Entry<Text, Token<?>> token: tokenMap.entrySet()) {
+      ugi.addToken(token.getKey(), token.getValue());
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Fri Aug 24 20:38:08 2012
@@ -55,6 +55,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
@@ -641,14 +642,12 @@ public class UserGroupInformation {
                                           AuthenticationMethod.SIMPLE);
         loginUser = new UserGroupInformation(login.getSubject());
         String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
-        if (fileLocation != null && isSecurityEnabled()) {
+        if (fileLocation != null) {
           // load the token storage file and put all of the tokens into the
           // user.
           Credentials cred = Credentials.readTokenStorageFile(
               new Path("file:///" + fileLocation), conf);
-          for (Token<?> token: cred.getAllTokens()) {
-            loginUser.addToken(token);
-          }
+          cred.addTokensToUGI(loginUser);
         }
         loginUser.spawnAutoRenewalThreadForUserCreds();
       } catch (LoginException le) {
@@ -1177,6 +1176,41 @@ public class UserGroupInformation {
   public synchronized Set<TokenIdentifier> getTokenIdentifiers() {
     return subject.getPublicCredentials(TokenIdentifier.class);
   }
+
+  // wrapper to retain the creds key for the token
+  private class NamedToken {
+    Text alias;
+    Token<? extends TokenIdentifier> token;
+    NamedToken(Text alias, Token<? extends TokenIdentifier> token) {
+      this.alias = alias;
+      this.token = token;
+    }
+    @Override
+    public boolean equals(Object o) {
+      boolean equals;
+      if (o == this) {
+        equals = true;
+      } else if (!(o instanceof NamedToken)) {
+        equals = false;
+      } else {
+        Text otherAlias = ((NamedToken)o).alias;
+        if (alias == otherAlias) {
+          equals = true;
+        } else {
+          equals = (otherAlias != null && otherAlias.equals(alias));
+        }
+      }
+      return equals;
+    }
+    @Override
+    public int hashCode() {
+      return (alias != null) ? alias.hashCode() : -1; 
+    }
+    @Override
+    public String toString() {
+      return "NamedToken: alias="+alias+" token="+token;
+    }
+  }
   
   /**
    * Add a token to this UGI
@@ -1185,7 +1219,22 @@ public class UserGroupInformation {
    * @return true on successful add of new token
    */
   public synchronized boolean addToken(Token<? extends TokenIdentifier> token) {
-    return subject.getPrivateCredentials().add(token);
+    return addToken(token.getService(), token);
+  }
+
+  /**
+   * Add a named token to this UGI
+   * 
+   * @param alias Name of the token
+   * @param token Token to be added
+   * @return true on successful add of new token
+   */
+  public synchronized boolean addToken(Text alias,
+                                       Token<? extends TokenIdentifier> token) {
+    NamedToken namedToken = new NamedToken(alias, token);
+    Collection<Object> ugiCreds = subject.getPrivateCredentials();
+    ugiCreds.remove(namedToken); // allow token to be replaced
+    return ugiCreds.add(new NamedToken(alias, token));
   }
   
   /**
@@ -1195,14 +1244,23 @@ public class UserGroupInformation {
    */
   public synchronized
   Collection<Token<? extends TokenIdentifier>> getTokens() {
-    Set<Object> creds = subject.getPrivateCredentials();
-    List<Token<?>> result = new ArrayList<Token<?>>(creds.size());
-    for(Object o: creds) {
-      if (o instanceof Token<?>) {
-        result.add((Token<?>) o);
-      }
+    return Collections.unmodifiableList(
+        new ArrayList<Token<?>>(getCredentials().getAllTokens()));
+  }
+
+  /**
+   * Obtain the tokens in credentials form associated with this user.
+   * 
+   * @return Credentials of tokens associated with this user
+   */
+  public synchronized Credentials getCredentials() {
+    final Credentials credentials = new Credentials();
+    final Set<NamedToken> namedTokens =
+        subject.getPrivateCredentials(NamedToken.class);
+    for (final NamedToken namedToken : namedTokens) {
+      credentials.addToken(namedToken.alias, namedToken.token);
     }
-    return Collections.unmodifiableList(result);
+    return credentials;
   }
 
   /**

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Fri Aug 24 20:38:08 2012
@@ -113,6 +113,16 @@ extends AbstractDelegationTokenIdentifie
     }
   }
   
+  /**
+   * Reset all data structures and mutable state.
+   */
+  public synchronized void reset() {
+    currentId = 0;
+    allKeys.clear();
+    delegationTokenSequenceNumber = 0;
+    currentTokens.clear();
+  }
+  
   /** 
    * Add a previously used master key to cache (when NN restarts), 
    * should be called before activate().
@@ -190,7 +200,6 @@ extends AbstractDelegationTokenIdentifie
   
   @Override
   protected synchronized byte[] createPassword(TokenIdent identifier) {
-    LOG.info("Creating password for identifier: "+identifier);
     int sequenceNum;
     long now = Time.now();
     sequenceNum = ++delegationTokenSequenceNumber;
@@ -198,6 +207,7 @@ extends AbstractDelegationTokenIdentifie
     identifier.setMaxDate(now + tokenMaxLifetime);
     identifier.setMasterKeyId(currentId);
     identifier.setSequenceNumber(sequenceNum);
+    LOG.info("Creating password for identifier: " + identifier);
     byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
     currentTokens.put(identifier, new DelegationTokenInformation(now
         + tokenRenewInterval, password));

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java Fri Aug 24 20:38:08 2012
@@ -43,31 +43,48 @@ public class DataChecksum implements Che
   public static final int CHECKSUM_NULL    = 0;
   public static final int CHECKSUM_CRC32   = 1;
   public static final int CHECKSUM_CRC32C  = 2;
-  
-  private static String[] NAMES = new String[] {
-    "NULL", "CRC32", "CRC32C"
-  };
-  
-  private static final int CHECKSUM_NULL_SIZE  = 0;
-  private static final int CHECKSUM_CRC32_SIZE = 4;
-  private static final int CHECKSUM_CRC32C_SIZE = 4;
-  
-  
-  public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) {
+  public static final int CHECKSUM_DEFAULT = 3; 
+  public static final int CHECKSUM_MIXED   = 4;
+ 
+  /** The checksum types */
+  public static enum Type {
+    NULL  (CHECKSUM_NULL, 0),
+    CRC32 (CHECKSUM_CRC32, 4),
+    CRC32C(CHECKSUM_CRC32C, 4),
+    DEFAULT(CHECKSUM_DEFAULT, 0), // This cannot be used to create DataChecksum
+    MIXED (CHECKSUM_MIXED, 0); // This cannot be used to create DataChecksum
+
+    public final int id;
+    public final int size;
+    
+    private Type(int id, int size) {
+      this.id = id;
+      this.size = size;
+    }
+
+    /** @return the type corresponding to the id. */
+    public static Type valueOf(int id) {
+      if (id < 0 || id >= values().length) {
+        throw new IllegalArgumentException("id=" + id
+            + " out of range [0, " + values().length + ")");
+      }
+      return values()[id];
+    }
+  }
+
+
+  public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) {
     if ( bytesPerChecksum <= 0 ) {
       return null;
     }
     
     switch ( type ) {
-    case CHECKSUM_NULL :
-      return new DataChecksum( CHECKSUM_NULL, new ChecksumNull(), 
-                               CHECKSUM_NULL_SIZE, bytesPerChecksum );
-    case CHECKSUM_CRC32 :
-      return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(), 
-                               CHECKSUM_CRC32_SIZE, bytesPerChecksum );
-    case CHECKSUM_CRC32C:
-      return new DataChecksum( CHECKSUM_CRC32C, new PureJavaCrc32C(),
-                               CHECKSUM_CRC32C_SIZE, bytesPerChecksum);
+    case NULL :
+      return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum );
+    case CRC32 :
+      return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum );
+    case CRC32C:
+      return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum);
     default:
       return null;  
     }
@@ -87,7 +104,7 @@ public class DataChecksum implements Che
                            ( (bytes[offset+2] & 0xff) << 16 ) |
                            ( (bytes[offset+3] & 0xff) << 8 )  |
                            ( (bytes[offset+4] & 0xff) );
-    return newDataChecksum( bytes[0], bytesPerChecksum );
+    return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum );
   }
   
   /**
@@ -98,7 +115,7 @@ public class DataChecksum implements Che
                                  throws IOException {
     int type = in.readByte();
     int bpc = in.readInt();
-    DataChecksum summer = newDataChecksum( type, bpc );
+    DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc );
     if ( summer == null ) {
       throw new IOException( "Could not create DataChecksum of type " +
                              type + " with bytesPerChecksum " + bpc );
@@ -111,13 +128,13 @@ public class DataChecksum implements Che
    */
   public void writeHeader( DataOutputStream out ) 
                            throws IOException { 
-    out.writeByte( type );
+    out.writeByte( type.id );
     out.writeInt( bytesPerChecksum );
   }
 
   public byte[] getHeader() {
     byte[] header = new byte[DataChecksum.HEADER_LEN];
-    header[0] = (byte) (type & 0xff);
+    header[0] = (byte) (type.id & 0xff);
     // Writing in buffer just like DataOutput.WriteInt()
     header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
     header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
@@ -133,11 +150,11 @@ public class DataChecksum implements Che
    */
    public int writeValue( DataOutputStream out, boolean reset )
                           throws IOException {
-     if ( size <= 0 ) {
+     if ( type.size <= 0 ) {
        return 0;
      }
 
-     if ( size == 4 ) {
+     if ( type.size == 4 ) {
        out.writeInt( (int) summer.getValue() );
      } else {
        throw new IOException( "Unknown Checksum " + type );
@@ -147,7 +164,7 @@ public class DataChecksum implements Che
        reset();
      }
      
-     return size;
+     return type.size;
    }
    
    /**
@@ -157,11 +174,11 @@ public class DataChecksum implements Che
     */
     public int writeValue( byte[] buf, int offset, boolean reset )
                            throws IOException {
-      if ( size <= 0 ) {
+      if ( type.size <= 0 ) {
         return 0;
       }
 
-      if ( size == 4 ) {
+      if ( type.size == 4 ) {
         int checksum = (int) summer.getValue();
         buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
         buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
@@ -175,7 +192,7 @@ public class DataChecksum implements Che
         reset();
       }
       
-      return size;
+      return type.size;
     }
    
    /**
@@ -183,36 +200,33 @@ public class DataChecksum implements Che
     * @return true if the checksum matches and false otherwise.
     */
    public boolean compare( byte buf[], int offset ) {
-     if ( size == 4 ) {
+     if ( type.size == 4 ) {
        int checksum = ( (buf[offset+0] & 0xff) << 24 ) | 
                       ( (buf[offset+1] & 0xff) << 16 ) |
                       ( (buf[offset+2] & 0xff) << 8 )  |
                       ( (buf[offset+3] & 0xff) );
        return checksum == (int) summer.getValue();
      }
-     return size == 0;
+     return type.size == 0;
    }
    
-  private final int type;
-  private final int size;
+  private final Type type;
   private final Checksum summer;
   private final int bytesPerChecksum;
   private int inSum = 0;
   
-  private DataChecksum( int checksumType, Checksum checksum,
-                        int sumSize, int chunkSize ) {
-    type = checksumType;
+  private DataChecksum( Type type, Checksum checksum, int chunkSize ) {
+    this.type = type;
     summer = checksum;
-    size = sumSize;
     bytesPerChecksum = chunkSize;
   }
   
   // Accessors
-  public int getChecksumType() {
+  public Type getChecksumType() {
     return type;
   }
   public int getChecksumSize() {
-    return size;
+    return type.size;
   }
   public int getBytesPerChecksum() {
     return bytesPerChecksum;
@@ -260,7 +274,7 @@ public class DataChecksum implements Che
   public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
       String fileName, long basePos)
   throws ChecksumException {
-    if (size == 0) return;
+    if (type.size == 0) return;
     
     if (data.hasArray() && checksums.hasArray()) {
       verifyChunkedSums(
@@ -270,7 +284,7 @@ public class DataChecksum implements Che
       return;
     }
     if (NativeCrc32.isAvailable()) {
-      NativeCrc32.verifyChunkedSums(bytesPerChecksum, type, checksums, data,
+      NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data,
           fileName, basePos);
       return;
     }
@@ -280,7 +294,7 @@ public class DataChecksum implements Che
     checksums.mark();
     try {
       byte[] buf = new byte[bytesPerChecksum];
-      byte[] sum = new byte[size];
+      byte[] sum = new byte[type.size];
       while (data.remaining() > 0) {
         int n = Math.min(data.remaining(), bytesPerChecksum);
         checksums.get(sum);
@@ -351,7 +365,7 @@ public class DataChecksum implements Che
    *                  buffer to put the checksums.
    */
   public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
-    if (size == 0) return;
+    if (type.size == 0) return;
     
     if (data.hasArray() && checksums.hasArray()) {
       calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
@@ -411,18 +425,12 @@ public class DataChecksum implements Che
   
   @Override
   public int hashCode() {
-    return (this.type + 31) * this.bytesPerChecksum;
+    return (this.type.id + 31) * this.bytesPerChecksum;
   }
   
   @Override
   public String toString() {
-    String strType;
-    if (type < NAMES.length && type > 0) {
-      strType = NAMES[type];
-    } else {
-      strType = String.valueOf(type);
-    }
-    return "DataChecksum(type=" + strType +
+    return "DataChecksum(type=" + type +
       ", chunkSize=" + bytesPerChecksum + ")";
   }
   

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LineReader.java Fri Aug 24 20:38:08 2012
@@ -204,11 +204,13 @@ public class LineReader {
       int startPosn = bufferPosn; //starting from where we left off the last time
       if (bufferPosn >= bufferLength) {
         startPosn = bufferPosn = 0;
-        if (prevCharCR)
+        if (prevCharCR) {
           ++bytesConsumed; //account for CR from previous read
+        }
         bufferLength = in.read(buffer);
-        if (bufferLength <= 0)
+        if (bufferLength <= 0) {
           break; // EOF
+        }
       }
       for (; bufferPosn < bufferLength; ++bufferPosn) { //search for newline
         if (buffer[bufferPosn] == LF) {
@@ -223,8 +225,9 @@ public class LineReader {
         prevCharCR = (buffer[bufferPosn] == CR);
       }
       int readLength = bufferPosn - startPosn;
-      if (prevCharCR && newlineLength == 0)
+      if (prevCharCR && newlineLength == 0) {
         --readLength; //CR at the end of the buffer
+      }
       bytesConsumed += readLength;
       int appendLength = readLength - newlineLength;
       if (appendLength > maxLineLength - txtLength) {
@@ -236,8 +239,9 @@ public class LineReader {
       }
     } while (newlineLength == 0 && bytesConsumed < maxBytesToConsume);
 
-    if (bytesConsumed > (long)Integer.MAX_VALUE)
-      throw new IOException("Too many bytes before newline: " + bytesConsumed);    
+    if (bytesConsumed > (long)Integer.MAX_VALUE) {
+      throw new IOException("Too many bytes before newline: " + bytesConsumed);
+    }
     return (int)bytesConsumed;
   }
 
@@ -246,18 +250,56 @@ public class LineReader {
    */
   private int readCustomLine(Text str, int maxLineLength, int maxBytesToConsume)
       throws IOException {
+   /* We're reading data from inputStream, but the head of the stream may be
+    *  already captured in the previous buffer, so we have several cases:
+    * 
+    * 1. The buffer tail does not contain any character sequence which
+    *    matches with the head of delimiter. We count it as a 
+    *    ambiguous byte count = 0
+    *    
+    * 2. The buffer tail contains a X number of characters,
+    *    that forms a sequence, which matches with the
+    *    head of delimiter. We count ambiguous byte count = X
+    *    
+    *    // ***  eg: A segment of input file is as follows
+    *    
+    *    " record 1792: I found this bug very interesting and
+    *     I have completely read about it. record 1793: This bug
+    *     can be solved easily record 1794: This ." 
+    *    
+    *    delimiter = "record";
+    *        
+    *    supposing:- String at the end of buffer =
+    *    "I found this bug very interesting and I have completely re"
+    *    There for next buffer = "ad about it. record 179       ...."           
+    *     
+    *     The matching characters in the input
+    *     buffer tail and delimiter head = "re" 
+    *     Therefore, ambiguous byte count = 2 ****   //
+    *     
+    *     2.1 If the following bytes are the remaining characters of
+    *         the delimiter, then we have to capture only up to the starting 
+    *         position of delimiter. That means, we need not include the 
+    *         ambiguous characters in str.
+    *     
+    *     2.2 If the following bytes are not the remaining characters of
+    *         the delimiter ( as mentioned in the example ), 
+    *         then we have to include the ambiguous characters in str. 
+    */
     str.clear();
     int txtLength = 0; // tracks str.getLength(), as an optimization
     long bytesConsumed = 0;
     int delPosn = 0;
+    int ambiguousByteCount=0; // To capture the ambiguous characters count
     do {
-      int startPosn = bufferPosn; // starting from where we left off the last
-      // time
+      int startPosn = bufferPosn; // Start from previous end position
       if (bufferPosn >= bufferLength) {
         startPosn = bufferPosn = 0;
         bufferLength = in.read(buffer);
-        if (bufferLength <= 0)
+        if (bufferLength <= 0) {
+          str.append(recordDelimiterBytes, 0, ambiguousByteCount);
           break; // EOF
+        }
       }
       for (; bufferPosn < bufferLength; ++bufferPosn) {
         if (buffer[bufferPosn] == recordDelimiterBytes[delPosn]) {
@@ -266,7 +308,8 @@ public class LineReader {
             bufferPosn++;
             break;
           }
-        } else {
+        } else if (delPosn != 0) {
+          bufferPosn--;
           delPosn = 0;
         }
       }
@@ -277,14 +320,27 @@ public class LineReader {
         appendLength = maxLineLength - txtLength;
       }
       if (appendLength > 0) {
+        if (ambiguousByteCount > 0) {
+          str.append(recordDelimiterBytes, 0, ambiguousByteCount);
+          //appending the ambiguous characters (refer case 2.2)
+          bytesConsumed += ambiguousByteCount;
+          ambiguousByteCount=0;
+        }
         str.append(buffer, startPosn, appendLength);
         txtLength += appendLength;
       }
-    } while (delPosn < recordDelimiterBytes.length
+      if (bufferPosn >= bufferLength) {
+        if (delPosn > 0 && delPosn < recordDelimiterBytes.length) {
+          ambiguousByteCount = delPosn;
+          bytesConsumed -= ambiguousByteCount; //to be consumed in next
+        }
+      }
+    } while (delPosn < recordDelimiterBytes.length 
         && bytesConsumed < maxBytesToConsume);
-    if (bytesConsumed > (long) Integer.MAX_VALUE)
+    if (bytesConsumed > (long) Integer.MAX_VALUE) {
       throw new IOException("Too many bytes before delimiter: " + bytesConsumed);
-    return (int) bytesConsumed;
+    }
+    return (int) bytesConsumed; 
   }
 
   /**
@@ -296,7 +352,7 @@ public class LineReader {
    */
   public int readLine(Text str, int maxLineLength) throws IOException {
     return readLine(str, maxLineLength, Integer.MAX_VALUE);
-}
+  }
 
   /**
    * Read from the InputStream into the given Text.
@@ -307,5 +363,4 @@ public class LineReader {
   public int readLine(Text str) throws IOException {
     return readLine(str, Integer.MAX_VALUE, Integer.MAX_VALUE);
   }
-
 }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java Fri Aug 24 20:38:08 2012
@@ -47,7 +47,7 @@ public class NativeCodeLoader {
     }
     try {
       System.loadLibrary("hadoop");
-      LOG.info("Loaded the native-hadoop library");
+      LOG.debug("Loaded the native-hadoop library");
       nativeCodeLoaded = true;
     } catch (Throwable t) {
       // Ignore failure to load

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c Fri Aug 24 20:38:08 2012
@@ -24,7 +24,7 @@
 // Simple Functions
 //****************************
 
-extern int LZ4_compress   (char* source, char* dest, int isize);
+extern int LZ4_compress   (const char* source, char* dest, int isize);
 
 /*
 LZ4_compress() :

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c Fri Aug 24 20:38:08 2012
@@ -20,7 +20,7 @@
 #include "org_apache_hadoop.h"
 #include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"
 
-int LZ4_uncompress_unknownOutputSize (char* source, char* dest, int isize, int maxOutputSize);
+int LZ4_uncompress_unknownOutputSize(const char* source, char* dest, int isize, int maxOutputSize);
 
 /*
 LZ4_uncompress_unknownOutputSize() :

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c Fri Aug 24 20:38:08 2012
@@ -25,6 +25,8 @@
 #include "org_apache_hadoop_io_compress_snappy.h"
 #include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
 
+#define JINT_MAX 0x7fffffff
+
 static jfieldID SnappyCompressor_clazz;
 static jfieldID SnappyCompressor_uncompressedDirectBuf;
 static jfieldID SnappyCompressor_uncompressedDirectBufLen;
@@ -39,7 +41,7 @@ JNIEXPORT void JNICALL Java_org_apache_h
   // Load libsnappy.so
   void *libsnappy = dlopen(HADOOP_SNAPPY_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
   if (!libsnappy) {
-    char* msg = (char*)malloc(1000);
+    char msg[1000];
     snprintf(msg, 1000, "%s (%s)!", "Cannot load " HADOOP_SNAPPY_LIBRARY, dlerror());
     THROW(env, "java/lang/UnsatisfiedLinkError", msg);
     return;
@@ -71,6 +73,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
   jint uncompressed_direct_buf_len = (*env)->GetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen);
   jobject compressed_direct_buf = (*env)->GetObjectField(env, thisj, SnappyCompressor_compressedDirectBuf);
   jint compressed_direct_buf_len = (*env)->GetIntField(env, thisj, SnappyCompressor_directBufferSize);
+  size_t buf_len;
 
   // Get the input direct buffer
   LOCK_CLASS(env, clazz, "SnappyCompressor");
@@ -78,7 +81,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
 
   if (uncompressed_bytes == 0) {
-    return (jint)0;
+    return 0;
   }
 
   // Get the output direct buffer
@@ -87,15 +90,22 @@ JNIEXPORT jint JNICALL Java_org_apache_h
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
 
   if (compressed_bytes == 0) {
-    return (jint)0;
+    return 0;
   }
 
-  snappy_status ret = dlsym_snappy_compress(uncompressed_bytes, uncompressed_direct_buf_len, compressed_bytes, &compressed_direct_buf_len);
+  /* size_t should always be 4 bytes or larger. */
+  buf_len = (size_t)compressed_direct_buf_len;
+  snappy_status ret = dlsym_snappy_compress(uncompressed_bytes,
+        uncompressed_direct_buf_len, compressed_bytes, &buf_len);
   if (ret != SNAPPY_OK){
     THROW(env, "Ljava/lang/InternalError", "Could not compress data. Buffer length is too small.");
+    return 0;
+  }
+  if (buf_len > JINT_MAX) {
+    THROW(env, "Ljava/lang/InternalError", "Invalid return buffer length.");
+    return 0;
   }
 
   (*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
-
-  return (jint)compressed_direct_buf_len;
+  return (jint)buf_len;
 }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Fri Aug 24 20:38:08 2012
@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 
+#define _GNU_SOURCE
+
 #include <assert.h>
 #include <errno.h>
 #include <fcntl.h>
@@ -366,23 +368,15 @@ Java_org_apache_hadoop_io_nativeio_Nativ
  */
 static void throw_ioe(JNIEnv* env, int errnum)
 {
-  const char* message;
-  char buffer[80];
+  char message[80];
   jstring jstr_message;
 
-  buffer[0] = 0;
-#ifdef STRERROR_R_CHAR_P
-  // GNU strerror_r
-  message = strerror_r(errnum, buffer, sizeof(buffer));
-  assert (message != NULL);
-#else
-  int ret = strerror_r(errnum, buffer, sizeof(buffer));
-  if (ret == 0) {
-    message = buffer;
+  if ((errnum >= 0) && (errnum < sys_nerr)) {
+    snprintf(message, sizeof(message), "%s", sys_errlist[errnum]);
   } else {
-    message = "Unknown error";
+    snprintf(message, sizeof(message), "Unknown error %d", errnum);
   }
-#endif
+
   jobject errno_obj = errno_to_enum(env, errnum);
 
   if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL)

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c Fri Aug 24 20:38:08 2012
@@ -40,8 +40,8 @@ Java_org_apache_hadoop_security_JniBased
 (JNIEnv *env, jobject jobj, jstring juser) {
   extern int getGroupIDList(const char *user, int *ngroups, gid_t **groups);
   extern int getGroupDetails(gid_t group, char **grpBuf);
-
-  jobjectArray jgroups; 
+  const char *cuser = NULL;
+  jobjectArray jgroups = NULL;
   int error = -1;
 
   if (emptyGroups == NULL) {
@@ -56,7 +56,7 @@ Java_org_apache_hadoop_security_JniBased
     }
   }
   char *grpBuf = NULL;
-  const char *cuser = (*env)->GetStringUTFChars(env, juser, NULL);
+  cuser = (*env)->GetStringUTFChars(env, juser, NULL);
   if (cuser == NULL) {
     goto cleanup;
   }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c Fri Aug 24 20:38:08 2012
@@ -45,6 +45,8 @@ typedef struct listElement UserList;
 JNIEXPORT jobjectArray JNICALL 
 Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNetgroupJNI
 (JNIEnv *env, jobject jobj, jstring jgroup) {
+  UserList *userListHead = NULL;
+  int       userListSize = 0;
 
   // pointers to free at the end
   const char *cgroup  = NULL;
@@ -65,9 +67,6 @@ Java_org_apache_hadoop_security_JniBased
   // get users
   // see man pages for setnetgrent, getnetgrent and endnetgrent
 
-  UserList *userListHead = NULL;
-  int       userListSize = 0;
-
   // set the name of the group for subsequent calls to getnetgrent
   // note that we want to end group lokup regardless whether setnetgrent
   // was successful or not (as long as it was called we need to call

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c Fri Aug 24 20:38:08 2012
@@ -18,6 +18,7 @@
 
 #include <arpa/inet.h>
 #include <assert.h>
+#include <inttypes.h>
 #include <stdlib.h>
 #include <stdint.h>
 #include <string.h>
@@ -50,7 +51,7 @@ static void throw_checksum_exception(JNI
 
   // Format error message
   snprintf(message, sizeof(message),
-    "Checksum error: %s at %ld exp: %d got: %d",
+    "Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
     filename, pos, expected_crc, got_crc);
   if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
     goto cleanup;

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Fri Aug 24 20:38:08 2012
@@ -41,7 +41,7 @@ static uint32_t crc32c_sb8(uint32_t crc,
 
 #ifdef USE_PIPELINED
 static void pipelined_crc32c(uint32_t *crc1, uint32_t *crc2, uint32_t *crc3, const uint8_t *p_buf, size_t block_size, int num_blocks);
-#endif USE_PIPELINED
+#endif
 static int cached_cpu_supports_crc32; // initialized by constructor below
 static uint32_t crc32c_hardware(uint32_t crc, const uint8_t* data, size_t length);
 

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Fri Aug 24 20:38:08 2012
@@ -351,8 +351,12 @@
   <name>fs.trash.interval</name>
   <value>0</value>
   <description>Number of minutes after which the checkpoint
-  gets deleted.
-  If zero, the trash feature is disabled.
+  gets deleted.  If zero, the trash feature is disabled.
+  This option may be configured both on the server and the
+  client. If trash is disabled server side then the client
+  side configuration is checked. If trash is enabled on the
+  server side then the value configured on the server is
+  used and the client configuration value is ignored.
   </description>
 </property>
 
@@ -360,7 +364,8 @@
   <name>fs.trash.checkpoint.interval</name>
   <value>0</value>
   <description>Number of minutes between trash checkpoints.
-  Should be smaller or equal to fs.trash.interval.
+  Should be smaller or equal to fs.trash.interval. If zero,
+  the value is set to the value of fs.trash.interval.
   Every time the checkpointer runs it creates a new checkpoint 
   out of current and removes checkpoints created more than 
   fs.trash.interval minutes ago.
@@ -1083,4 +1088,13 @@
   </description>
 </property>
 
+<property>
+  <name>hadoop.jetty.logs.serve.aliases</name>
+  <value>true</value>
+  <description>
+    Enable/Disable aliases serving from jetty
+  </description>
+</property>
+
+
 </configuration>

Propchange: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1373573-1377085

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Fri Aug 24 20:38:08 2012
@@ -39,6 +39,7 @@ import java.util.regex.Pattern;
 
 import junit.framework.TestCase;
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertNotNull;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
@@ -1157,6 +1158,12 @@ public class TestConfiguration extends T
         configuration.getPattern("testPattern", Pattern.compile("")).pattern());
   }
   
+  public void testGetClassByNameOrNull() throws Exception {
+   Configuration config = new Configuration();
+   Class<?> clazz = config.getClassByNameOrNull("java.lang.Object");
+   assertNotNull(clazz);
+  }
+  
   public static void main(String[] argv) throws Exception {
     junit.textui.TestRunner.main(new String[]{
       TestConfiguration.class.getName()

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java Fri Aug 24 20:38:08 2012
@@ -364,15 +364,17 @@ public abstract class FSMainOperationsBa
   }
   
   @Test
-  public void testGlobStatusThrowsExceptionForNonExistentFile() throws Exception {
-    try {
-      // This should throw a FileNotFoundException
-      fSys.globStatus(
-          getTestRootPath(fSys, "test/hadoopfsdf/?"));
-      Assert.fail("Should throw FileNotFoundException");
-    } catch (FileNotFoundException fnfe) {
-      // expected
-    }
+  public void testGlobStatusNonExistentFile() throws Exception {
+    FileStatus[] paths = fSys.globStatus(
+        getTestRootPath(fSys, "test/hadoopfsdf"));
+    Assert.assertNull(paths);
+
+    paths = fSys.globStatus(
+        getTestRootPath(fSys, "test/hadoopfsdf/?"));
+    Assert.assertEquals(0, paths.length);
+    paths = fSys.globStatus(
+        getTestRootPath(fSys, "test/hadoopfsdf/xyz*/?"));
+    Assert.assertEquals(0, paths.length);
   }
   
   @Test

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java Fri Aug 24 20:38:08 2012
@@ -360,15 +360,17 @@ public abstract class FileContextMainOpe
   }
   
   @Test
-  public void testGlobStatusThrowsExceptionForNonExistentFile() throws Exception {
-    try {
-      // This should throw a FileNotFoundException
-      fc.util().globStatus(
-          getTestRootPath(fc, "test/hadoopfsdf/?"));
-      Assert.fail("Should throw FileNotFoundException");
-    } catch (FileNotFoundException fnfe) {
-      // expected
-    }
+  public void testGlobStatusNonExistentFile() throws Exception {
+    FileStatus[] paths = fc.util().globStatus(
+          getTestRootPath(fc, "test/hadoopfsdf"));
+    Assert.assertNull(paths);
+
+    paths = fc.util().globStatus(
+        getTestRootPath(fc, "test/hadoopfsdf/?"));
+    Assert.assertEquals(0, paths.length);
+    paths = fc.util().globStatus(
+        getTestRootPath(fc, "test/hadoopfsdf/xyz*/?"));
+    Assert.assertEquals(0, paths.length);
   }
   
   @Test

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Fri Aug 24 20:38:08 2012
@@ -24,8 +24,10 @@ import java.util.Random;
 
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.token.Token;
 import org.junit.Assert;
 import static org.junit.Assert.*;
+import static org.mockito.Mockito.mock;
 
 /**
  * Helper class for unit tests.
@@ -218,4 +220,39 @@ public final class FileSystemTestHelper 
     }
     Assert.assertEquals(aFs.makeQualified(new Path(path)), s.getPath());
   }
+  
+  /**
+   * Class to enable easier mocking of a FileSystem
+   * Use getRawFileSystem to retrieve the mock
+   */
+  public static class MockFileSystem extends FilterFileSystem {
+    public MockFileSystem() {
+      // it's a bit ackward to mock ourselves, but it allows the visibility
+      // of methods to be increased
+      super(mock(MockFileSystem.class));
+    }
+    @Override
+    public MockFileSystem getRawFileSystem() {
+      return (MockFileSystem) super.getRawFileSystem();
+      
+    }
+    // these basic methods need to directly propagate to the mock to be
+    // more transparent
+    @Override
+    public void initialize(URI uri, Configuration conf) throws IOException {
+      fs.initialize(uri, conf);
+    }
+    @Override
+    public String getCanonicalServiceName() {
+      return fs.getCanonicalServiceName();
+    }
+    @Override
+    public FileSystem[] getChildFileSystems() {
+      return fs.getChildFileSystems();
+    }
+    @Override // publicly expose for mocking
+    public Token<?> getDelegationToken(String renewer) throws IOException {
+      return fs.getDelegationToken(renewer);
+    }    
+  }
 }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestAfsCheckPath.java Fri Aug 24 20:38:08 2012
@@ -24,6 +24,7 @@ import java.net.URISyntaxException;
 import java.util.EnumSet;
 
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.Progressable;
 import org.junit.Test;
@@ -76,7 +77,7 @@ public class TestAfsCheckPath {
     @Override
     public FSDataOutputStream createInternal(Path f, EnumSet<CreateFlag> flag,
         FsPermission absolutePermission, int bufferSize, short replication,
-        long blockSize, Progressable progress, int bytesPerChecksum,
+        long blockSize, Progressable progress, ChecksumOpt checksumOpt,
         boolean createParent) throws IOException {
       // deliberately empty
       return null;

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileSystemCanonicalization.java Fri Aug 24 20:38:08 2012
@@ -18,18 +18,20 @@
 
 package org.apache.hadoop.fs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
 import java.io.IOException;
 import java.net.URI;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.NetUtilsTestResolver;
 import org.apache.hadoop.util.Progressable;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
-public class TestFileSystemCanonicalization extends TestCase {
+public class TestFileSystemCanonicalization {
   static String[] authorities = {
     "myfs://host",
     "myfs://host.a",
@@ -41,8 +43,8 @@ public class TestFileSystemCanonicalizat
   };
 
 
-  @Test
-  public void testSetupResolver() throws Exception {
+  @BeforeClass
+  public static void initialize() throws Exception {
     NetUtilsTestResolver.install();
   }
 

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java Fri Aug 24 20:38:08 2012
@@ -32,8 +32,10 @@ import java.util.Iterator;
 import org.apache.commons.logging.Log;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.Options.ChecksumOpt;
 import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.fs.Options.Rename;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 import org.junit.BeforeClass;
@@ -79,6 +81,11 @@ public class TestFilterFileSystem {
             Progressable progress) throws IOException {
       return null;
     }
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+            EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
+            Progressable progress, ChecksumOpt checksumOpt) throws IOException {
+      return null;
+    }
     public boolean mkdirs(Path f) { return false; }
     public FSDataInputStream open(Path f) { return null; }
     public FSDataOutputStream create(Path f) { return null; }
@@ -137,6 +144,16 @@ public class TestFilterFileSystem {
         Progressable progress) throws IOException {
       return null;
     }
+    public FSDataOutputStream create(Path f,
+        FsPermission permission,
+        EnumSet<CreateFlag> flags,
+        int bufferSize,
+        short replication,
+        long blockSize,
+        Progressable progress,
+        ChecksumOpt checksumOpt) throws IOException {
+      return null;
+    }
     public String getName() { return null; }
     public boolean delete(Path f) { return false; }
     public short getReplication(Path src) { return 0 ; }
@@ -185,6 +202,10 @@ public class TestFilterFileSystem {
     public boolean cancelDeleteOnExit(Path f) throws IOException {
       return false;
     }
+    public Token<?>[] addDelegationTokens(String renewer, Credentials creds)
+        throws IOException {
+      return null;
+    }
     public String getScheme() {
       return "dontcheck";
     }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java Fri Aug 24 20:38:08 2012
@@ -32,7 +32,6 @@ import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.ftpserver.command.impl.STAT;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.shell.CommandFactory;
 import org.apache.hadoop.fs.shell.FsCommand;

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java Fri Aug 24 20:38:08 2012
@@ -343,14 +343,18 @@ public class TestLocalDirAllocator {
   @Test
   public void testRemoveContext() throws IOException {
     String dir = buildBufferDir(ROOT, 0);
-    String contextCfgItemName = "application_1340842292563_0004.app.cache.dirs";
-    conf.set(contextCfgItemName, dir);
-    LocalDirAllocator localDirAllocator = new LocalDirAllocator(
-        contextCfgItemName);
-    localDirAllocator.getLocalPathForWrite("p1/x", SMALL_FILE_SIZE, conf);
-    assertTrue(LocalDirAllocator.isContextValid(contextCfgItemName));
-    LocalDirAllocator.removeContext(contextCfgItemName);
-    assertFalse(LocalDirAllocator.isContextValid(contextCfgItemName));
+    try {
+      String contextCfgItemName = "application_1340842292563_0004.app.cache.dirs";
+      conf.set(contextCfgItemName, dir);
+      LocalDirAllocator localDirAllocator = new LocalDirAllocator(
+          contextCfgItemName);
+      localDirAllocator.getLocalPathForWrite("p1/x", SMALL_FILE_SIZE, conf);
+      assertTrue(LocalDirAllocator.isContextValid(contextCfgItemName));
+      LocalDirAllocator.removeContext(contextCfgItemName);
+      assertFalse(LocalDirAllocator.isContextValid(contextCfgItemName));
+    } finally {
+      rmBufferDirs();
+    }
   }
 
 }

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Fri Aug 24 20:38:08 2012
@@ -33,8 +33,8 @@ import org.junit.Test;
  * This class tests the local file system via the FileSystem abstraction.
  */
 public class TestLocalFileSystem {
-  private static String TEST_ROOT_DIR
-    = System.getProperty("test.build.data","build/test/data/work-dir/localfs");
+  private static final String TEST_ROOT_DIR
+    = System.getProperty("test.build.data","build/test/data") + "/work-dir/localfs";
 
   private Configuration conf;
   private FileSystem fileSys;

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Fri Aug 24 20:38:08 2012
@@ -111,10 +111,10 @@ public class TestTrash extends TestCase 
       throws IOException {
     FileSystem fs = FileSystem.get(conf);
 
-    conf.set(FS_TRASH_INTERVAL_KEY, "0"); // disabled
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 0); // disabled
     assertFalse(new Trash(conf).isEnabled());
 
-    conf.set(FS_TRASH_INTERVAL_KEY, "10"); // 10 minute
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 10); // 10 minute
     assertTrue(new Trash(conf).isEnabled());
 
     FsShell shell = new FsShell();
@@ -435,7 +435,7 @@ public class TestTrash extends TestCase 
   }
 
   public static void trashNonDefaultFS(Configuration conf) throws IOException {
-    conf.set(FS_TRASH_INTERVAL_KEY, "10"); // 10 minute
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 10); // 10 minute
     // attempt non-default FileSystem trash
     {
       final FileSystem lfs = FileSystem.getLocal(conf);
@@ -580,7 +580,7 @@ public class TestTrash extends TestCase 
     FileSystem fs = FileSystem.getLocal(conf);
     
     conf.set("fs.defaultFS", fs.getUri().toString());
-    conf.set(FS_TRASH_INTERVAL_KEY, "10"); //minutes..
+    conf.setLong(FS_TRASH_INTERVAL_KEY, 10); //minutes..
     FsShell shell = new FsShell();
     shell.setConf(conf);
     //Path trashRoot = null;

Modified: hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java?rev=1377092&r1=1377091&r2=1377092&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java (original)
+++ hadoop/common/branches/HDFS-3077/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCommandFactory.java Fri Aug 24 20:38:08 2012
@@ -48,6 +48,10 @@ public class TestCommandFactory {
     factory.addClass(TestCommand3.class, "tc3");
     names = factory.getNames();
     assertArrayEquals(new String []{"tc1", "tc2", "tc2.1", "tc3"}, names);
+    
+    factory.addClass(TestCommand4.class, (new TestCommand4()).getName());
+    names = factory.getNames();
+    assertArrayEquals(new String[]{"tc1", "tc2", "tc2.1", "tc3", "tc4"}, names);
   }
   
   @Test
@@ -72,8 +76,17 @@ public class TestCommandFactory {
     assertNotNull(instance);
     assertEquals(TestCommand2.class, instance.getClass());    
     assertEquals("tc2.1", instance.getCommandName());
+    
+    factory.addClass(TestCommand4.class, "tc4");
+    instance = factory.getInstance("tc4");
+    assertNotNull(instance);
+    assertEquals(TestCommand4.class, instance.getClass());    
+    assertEquals("tc4", instance.getCommandName());
+    String usage = instance.getUsage();
+    assertEquals("-tc4 tc4_usage", usage);
+    assertEquals("tc4_description", instance.getDescription());
   }
-  
+
   static class TestRegistrar {
     public static void registerCommands(CommandFactory factory) {
       factory.addClass(TestCommand1.class, "tc1");
@@ -84,4 +97,10 @@ public class TestCommandFactory {
   static class TestCommand1 extends FsCommand {}
   static class TestCommand2 extends FsCommand {}
   static class TestCommand3 extends FsCommand {}
+  
+  static class TestCommand4 extends FsCommand {
+    static final String NAME = "tc4";
+    static final String USAGE = "tc4_usage";
+    static final String DESCRIPTION = "tc4_description";
+  }
 }
\ No newline at end of file



Mime
View raw message