hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1390763 [2/3] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ src/ src/contrib/bash-tab-completion/ src/main/bin/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/j...
Date Wed, 26 Sep 2012 22:55:16 GMT
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Wed Sep 26 22:55:00 2012
@@ -48,6 +48,8 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Time;
@@ -72,6 +74,8 @@ import com.google.protobuf.BlockingServi
  * All methods in the protocol should throw only IOException.  No field data of
  * the protocol instance is transmitted.
  */
+@InterfaceAudience.LimitedPrivate(value = { "Common", "HDFS", "MapReduce", "Yarn" })
+@InterfaceStability.Evolving
 public class RPC {
   public enum RpcKind {
     RPC_BUILTIN ((short) 1),         // Used for built in calls by tests
@@ -629,7 +633,7 @@ public class RPC {
 
   /** Construct a server for a protocol implementation instance listening on a
    * port and address.
-   * @deprecated protocol interface should be passed.
+   * @deprecated Please use {@link Builder} to build the {@link Server}
    */
   @Deprecated
   public static Server getServer(final Object instance, final String bindAddress, final int port, Configuration conf) 
@@ -639,7 +643,7 @@ public class RPC {
 
   /** Construct a server for a protocol implementation instance listening on a
    * port and address.
-   * @deprecated protocol interface should be passed.
+   * @deprecated Please use {@link Builder} to build the {@link Server}
    */
   @Deprecated
   public static Server getServer(final Object instance, final String bindAddress, final int port,
@@ -651,7 +655,10 @@ public class RPC {
                      null);
   }
 
-  /** Construct a server for a protocol implementation instance. */
+  /** Construct a server for a protocol implementation instance.
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static Server getServer(Class<?> protocol,
                                  Object instance, String bindAddress,
                                  int port, Configuration conf) 
@@ -661,7 +668,7 @@ public class RPC {
   }
 
   /** Construct a server for a protocol implementation instance.
-   * @deprecated secretManager should be passed.
+   * @deprecated Please use {@link Builder} to build the {@link Server}
    */
   @Deprecated
   public static Server getServer(Class<?> protocol,
@@ -674,7 +681,10 @@ public class RPC {
                  conf, null, null);
   }
   
-  /** Construct a server for a protocol implementation instance. */
+  /** Construct a server for a protocol implementation instance. 
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static Server getServer(Class<?> protocol,
                                  Object instance, String bindAddress, int port,
                                  int numHandlers,
@@ -685,6 +695,10 @@ public class RPC {
         conf, secretManager, null);
   }
   
+  /**
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static Server getServer(Class<?> protocol,
       Object instance, String bindAddress, int port,
       int numHandlers,
@@ -697,8 +711,10 @@ public class RPC {
                  verbose, conf, secretManager, portRangeConfig);
   }
 
-  /** Construct a server for a protocol implementation instance. */
-
+  /** Construct a server for a protocol implementation instance.
+   *  @deprecated Please use {@link Builder} to build the {@link Server}
+   */
+  @Deprecated
   public static <PROTO extends VersionedProtocol, IMPL extends PROTO> 
         Server getServer(Class<PROTO> protocol,
                                  IMPL instance, String bindAddress, int port,
@@ -713,6 +729,110 @@ public class RPC {
                  null);
   }
 
+  /**
+   * Class to construct instances of RPC server with specific options.
+   */
+  public static class Builder {
+    private Class<?> protocol = null;
+    private Object instance = null;
+    private String bindAddress = "0.0.0.0";
+    private int port = 0;
+    private int numHandlers = 1;
+    private int numReaders = -1;
+    private int queueSizePerHandler = -1;
+    private boolean verbose = false;
+    private final Configuration conf;    
+    private SecretManager<? extends TokenIdentifier> secretManager = null;
+    private String portRangeConfig = null;
+    
+    public Builder(Configuration conf) {
+      this.conf = conf;
+    }
+
+    /** Mandatory field */
+    public Builder setProtocol(Class<?> protocol) {
+      this.protocol = protocol;
+      return this;
+    }
+    
+    /** Mandatory field */
+    public Builder setInstance(Object instance) {
+      this.instance = instance;
+      return this;
+    }
+    
+    /** Default: 0.0.0.0 */
+    public Builder setBindAddress(String bindAddress) {
+      this.bindAddress = bindAddress;
+      return this;
+    }
+    
+    /** Default: 0 */
+    public Builder setPort(int port) {
+      this.port = port;
+      return this;
+    }
+    
+    /** Default: 1 */
+    public Builder setNumHandlers(int numHandlers) {
+      this.numHandlers = numHandlers;
+      return this;
+    }
+    
+    /** Default: -1 */
+    public Builder setnumReaders(int numReaders) {
+      this.numReaders = numReaders;
+      return this;
+    }
+    
+    /** Default: -1 */
+    public Builder setQueueSizePerHandler(int queueSizePerHandler) {
+      this.queueSizePerHandler = queueSizePerHandler;
+      return this;
+    }
+    
+    /** Default: false */
+    public Builder setVerbose(boolean verbose) {
+      this.verbose = verbose;
+      return this;
+    }
+    
+    /** Default: null */
+    public Builder setSecretManager(
+        SecretManager<? extends TokenIdentifier> secretManager) {
+      this.secretManager = secretManager;
+      return this;
+    }
+    
+    /** Default: null */
+    public Builder setPortRangeConfig(String portRangeConfig) {
+      this.portRangeConfig = portRangeConfig;
+      return this;
+    }
+    
+    /**
+     * Build the RPC Server. 
+     * @throws IOException on error
+     * @throws HadoopIllegalArgumentException when mandatory fields are not set
+     */
+    public Server build() throws IOException, HadoopIllegalArgumentException {
+      if (this.conf == null) {
+        throw new HadoopIllegalArgumentException("conf is not set");
+      }
+      if (this.protocol == null) {
+        throw new HadoopIllegalArgumentException("protocol is not set");
+      }
+      if (this.instance == null) {
+        throw new HadoopIllegalArgumentException("instance is not set");
+      }
+      
+      return getProtocolEngine(this.protocol, this.conf).getServer(
+          this.protocol, this.instance, this.bindAddress, this.port,
+          this.numHandlers, this.numReaders, this.queueSizePerHandler,
+          this.verbose, this.conf, this.secretManager, this.portRangeConfig);
+    }
+  }
+  
   /** An RPC Server. */
   public abstract static class Server extends org.apache.hadoop.ipc.Server {
    boolean verbose;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Wed Sep 26 22:55:00 2012
@@ -64,6 +64,7 @@ import javax.security.sasl.SaslServer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -107,6 +108,8 @@ import com.google.common.annotations.Vis
  * 
  * @see Client
  */
+@InterfaceAudience.LimitedPrivate(value = { "Common", "HDFS", "MapReduce", "Yarn" })
+@InterfaceStability.Evolving
 public abstract class Server {
   private final boolean authorize;
   private boolean isSecurityEnabled;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java Wed Sep 26 22:55:00 2012
@@ -129,7 +129,7 @@ public abstract class MetricsDynamicMBea
   @Override
   public Object getAttribute(String attributeName) throws AttributeNotFoundException,
       MBeanException, ReflectionException {
-    if (attributeName == null || attributeName.equals("")) 
+    if (attributeName == null || attributeName.isEmpty()) 
       throw new IllegalArgumentException();
     
     updateMbeanInfoIfMetricsListChanged();
@@ -197,7 +197,7 @@ public abstract class MetricsDynamicMBea
   public Object invoke(String actionName, Object[] parms, String[] signature)
       throws MBeanException, ReflectionException {
     
-    if (actionName == null || actionName.equals("")) 
+    if (actionName == null || actionName.isEmpty()) 
       throw new IllegalArgumentException();
     
     

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableQuantiles.java Wed Sep 26 22:55:00 2012
@@ -35,6 +35,7 @@ import org.apache.hadoop.metrics2.util.Q
 import org.apache.hadoop.metrics2.util.SampleQuantiles;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
  * Watches a stream of long values, maintaining online estimates of specific
@@ -60,8 +61,9 @@ public class MutableQuantiles extends Mu
   @VisibleForTesting
   protected Map<Quantile, Long> previousSnapshot = null;
 
-  private final ScheduledExecutorService scheduler = Executors
-      .newScheduledThreadPool(1);
+  private static final ScheduledExecutorService scheduler = Executors
+      .newScheduledThreadPool(1, new ThreadFactoryBuilder().setDaemon(true)
+          .setNameFormat("MutableQuantiles-%d").build());
 
   /**
    * Instantiates a new {@link MutableQuantiles} for a metric that rolls itself

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Wed Sep 26 22:55:00 2012
@@ -355,9 +355,8 @@ public class NetUtils {
   }
   
   /**
-   * Returns the InetSocketAddress that a client can use to connect to the
-   * given listening address.  This returns "hostname:port" of the server,
-   * or "127.0.0.1:port" when given a wildcard address of "0.0.0.0:port".
+   * Returns an InetSocketAddress that a client can use to connect to the
+   * given listening address.
    * 
    * @param addr of a listener
    * @return socket address that a client can use to connect to the server.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java Wed Sep 26 22:55:00 2012
@@ -144,7 +144,7 @@ public class CsvRecordInput implements R
     
   @Override
   public void startRecord(String tag) throws IOException {
-    if (tag != null && !"".equals(tag)) {
+    if (tag != null && !tag.isEmpty()) {
       char c1 = (char) stream.read();
       char c2 = (char) stream.read();
       if (c1 != 's' || c2 != '{') {
@@ -156,7 +156,7 @@ public class CsvRecordInput implements R
   @Override
   public void endRecord(String tag) throws IOException {
     char c = (char) stream.read();
-    if (tag == null || "".equals(tag)) {
+    if (tag == null || tag.isEmpty()) {
       if (c != '\n' && c != '\r') {
         throw new IOException("Error deserializing record.");
       } else {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java Wed Sep 26 22:55:00 2012
@@ -115,7 +115,7 @@ public class CsvRecordOutput implements 
     
   @Override
   public void startRecord(Record r, String tag) throws IOException {
-    if (tag != null && !"".equals(tag)) {
+    if (tag != null && ! tag.isEmpty()) {
       printCommaUnlessFirst();
       stream.print("s{");
       isFirst = true;
@@ -124,7 +124,7 @@ public class CsvRecordOutput implements 
     
   @Override
   public void endRecord(Record r, String tag) throws IOException {
-    if (tag == null || "".equals(tag)) {
+    if (tag == null || tag.isEmpty()) {
       stream.print("\n");
       isFirst = true;
     } else {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Wed Sep 26 22:55:00 2012
@@ -25,6 +25,7 @@ import java.net.URLConnection;
 import java.net.UnknownHostException;
 import java.security.AccessController;
 import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.List;
 import java.util.ServiceLoader;
@@ -212,7 +213,7 @@ public class SecurityUtil {
   private static String replacePattern(String[] components, String hostname)
       throws IOException {
     String fqdn = hostname;
-    if (fqdn == null || fqdn.equals("") || fqdn.equals("0.0.0.0")) {
+    if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
       fqdn = getLocalHostName();
     }
     return components[0] + "/" + fqdn.toLowerCase() + "@" + components[2];
@@ -451,6 +452,41 @@ public class SecurityUtil {
       return action.run();
     }
   }
+  
+  /**
+   * Perform the given action as the daemon's login user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static <T> T doAsLoginUser(PrivilegedExceptionAction<T> action)
+      throws IOException {
+    return doAsUser(UserGroupInformation.getLoginUser(), action);
+  }
+
+  /**
+   * Perform the given action as the daemon's current user. If an
+   * InterruptedException is thrown, it is converted to an IOException.
+   *
+   * @param action the action to perform
+   * @return the result of the action
+   * @throws IOException in the event of error
+   */
+  public static <T> T doAsCurrentUser(PrivilegedExceptionAction<T> action)
+      throws IOException {
+    return doAsUser(UserGroupInformation.getCurrentUser(), action);
+  }
+
+  private static <T> T doAsUser(UserGroupInformation ugi,
+      PrivilegedExceptionAction<T> action) throws IOException {
+    try {
+      return ugi.doAs(action);
+    } catch (InterruptedException ie) {
+      throw new IOException(ie);
+    }
+  }
 
   /**
    * Open a (if need be) secure connection to a URL in a secure environment

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Wed Sep 26 22:55:00 2012
@@ -18,6 +18,8 @@
 package org.apache.hadoop.security;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
 
 import java.io.IOException;
 import java.lang.reflect.UndeclaredThrowableException;
@@ -192,13 +194,12 @@ public class UserGroupInformation {
   private static boolean useKerberos;
   /** Server-side groups fetching service */
   private static Groups groups;
+  /** Min time (in seconds) before relogin for Kerberos */
+  private static long kerberosMinSecondsBeforeRelogin;
   /** The configuration to use */
   private static Configuration conf;
 
   
-  /** Leave 10 minutes between relogin attempts. */
-  private static final long MIN_TIME_BEFORE_RELOGIN = 10 * 60 * 1000L;
-  
   /**Environment variable pointing to the token cache file*/
   public static final String HADOOP_TOKEN_FILE_LOCATION = 
     "HADOOP_TOKEN_FILE_LOCATION";
@@ -245,6 +246,16 @@ public class UserGroupInformation {
                                          HADOOP_SECURITY_AUTHENTICATION + 
                                          " of " + value);
     }
+    try {
+        kerberosMinSecondsBeforeRelogin = 1000L * conf.getLong(
+                HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN,
+                HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT);
+    }
+    catch(NumberFormatException nfe) {
+        throw new IllegalArgumentException("Invalid attribute value for " +
+                HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN + " of " +
+                conf.get(HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN));
+    }
     // If we haven't set up testing groups, use the configuration to find it
     if (!(groups instanceof TestingGroups)) {
       groups = Groups.getUserToGroupsMappingService(conf);
@@ -729,7 +740,7 @@ public class UserGroupInformation {
                   return;
                 }
                 nextRefresh = Math.max(getRefreshTime(tgt),
-                                       now + MIN_TIME_BEFORE_RELOGIN);
+                                       now + kerberosMinSecondsBeforeRelogin);
               } catch (InterruptedException ie) {
                 LOG.warn("Terminating renewal thread");
                 return;
@@ -964,10 +975,10 @@ public class UserGroupInformation {
   }
 
   private boolean hasSufficientTimeElapsed(long now) {
-    if (now - user.getLastLogin() < MIN_TIME_BEFORE_RELOGIN ) {
+    if (now - user.getLastLogin() < kerberosMinSecondsBeforeRelogin ) {
       LOG.warn("Not attempting to re-login since the last re-login was " +
-          "attempted less than " + (MIN_TIME_BEFORE_RELOGIN/1000) + " seconds"+
-          " before.");
+          "attempted less than " + (kerberosMinSecondsBeforeRelogin/1000) +
+          " seconds before.");
       return false;
     }
     return true;
@@ -992,7 +1003,7 @@ public class UserGroupInformation {
   @InterfaceAudience.Public
   @InterfaceStability.Evolving
   public static UserGroupInformation createRemoteUser(String user) {
-    if (user == null || "".equals(user)) {
+    if (user == null || user.isEmpty()) {
       throw new IllegalArgumentException("Null user");
     }
     Subject subject = new Subject();
@@ -1027,7 +1038,7 @@ public class UserGroupInformation {
   @InterfaceStability.Evolving
   public static UserGroupInformation createProxyUser(String user,
       UserGroupInformation realUser) {
-    if (user == null || "".equals(user)) {
+    if (user == null || user.isEmpty()) {
       throw new IllegalArgumentException("Null user");
     }
     if (realUser == null) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Wed Sep 26 22:55:00 2012
@@ -88,7 +88,7 @@ public class ServiceAuthorizationManager
     String clientPrincipal = null; 
     if (krbInfo != null) {
       String clientKey = krbInfo.clientPrincipal();
-      if (clientKey != null && !clientKey.equals("")) {
+      if (clientKey != null && !clientKey.isEmpty()) {
         try {
           clientPrincipal = SecurityUtil.getServerPrincipal(
               conf.get(clientKey), addr);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/SecretManager.java Wed Sep 26 22:55:00 2012
@@ -93,7 +93,7 @@ public abstract class SecretManager<T ex
   /**
    * The length of the random keys to use.
    */
-  private static final int KEY_LENGTH = 20;
+  private static final int KEY_LENGTH = 64;
 
   /**
    * A thread local store for the Macs.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java Wed Sep 26 22:55:00 2012
@@ -87,12 +87,12 @@ extends TokenIdentifier {
    */
   @Override
   public UserGroupInformation getUser() {
-    if ( (owner == null) || ("".equals(owner.toString()))) {
+    if ( (owner == null) || (owner.toString().isEmpty())) {
       return null;
     }
     final UserGroupInformation realUgi;
     final UserGroupInformation ugi;
-    if ((realUser == null) || ("".equals(realUser.toString()))
+    if ((realUser == null) || (realUser.toString().isEmpty())
         || realUser.equals(owner)) {
       ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString());
     } else {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Wed Sep 26 22:55:00 2012
@@ -265,7 +265,7 @@ extends AbstractDelegationTokenIdentifie
       throw new InvalidToken("User " + renewer + 
                              " tried to renew an expired token");
     }
-    if ((id.getRenewer() == null) || ("".equals(id.getRenewer().toString()))) {
+    if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
       throw new AccessControlException("User " + renewer + 
                                        " tried to renew a token without " +
                                        "a renewer");
@@ -321,7 +321,7 @@ extends AbstractDelegationTokenIdentifie
     HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller);
     String cancelerShortName = cancelerKrbName.getShortName();
     if (!canceller.equals(owner)
-        && (renewer == null || "".equals(renewer.toString()) || !cancelerShortName
+        && (renewer == null || renewer.toString().isEmpty() || !cancelerShortName
             .equals(renewer.toString()))) {
       throw new AccessControlException(canceller
           + " is not authorized to cancel the token");

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ExitUtil.java Wed Sep 26 22:55:00 2012
@@ -101,7 +101,7 @@ public final class ExitUtil {
    * @throws ExitException if System.exit is disabled for test purposes
    */
   public static void terminate(int status, Throwable t) throws ExitException {
-    terminate(status, t.getMessage());
+    terminate(status, StringUtils.stringifyException(t));
   }
 
   /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java Wed Sep 26 22:55:00 2012
@@ -63,7 +63,7 @@ public class HostsFileReader {
               // Everything from now on is a comment
               break;
             }
-            if (!nodes[i].equals("")) {
+            if (!nodes[i].isEmpty()) {
               LOG.info("Adding " + nodes[i] + " to the list of hosts from " + filename);
               set.add(nodes[i]);  // might need to add canonical name
             }
@@ -80,13 +80,13 @@ public class HostsFileReader {
 
   public synchronized void refresh() throws IOException {
     LOG.info("Refreshing hosts (include/exclude) list");
-    if (!includesFile.equals("")) {
+    if (!includesFile.isEmpty()) {
       Set<String> newIncludes = new HashSet<String>();
       readFileToSet(includesFile, newIncludes);
       // switch the new hosts that are to be included
       includes = newIncludes;
     }
-    if (!excludesFile.equals("")) {
+    if (!excludesFile.isEmpty()) {
       Set<String> newExcludes = new HashSet<String>();
       readFileToSet(excludesFile, newExcludes);
       // switch the excluded hosts

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Wed Sep 26 22:55:00 2012
@@ -348,7 +348,7 @@ public class StringUtils {
    * @return an array of <code>String</code> values
    */
   public static String[] getTrimmedStrings(String str){
-    if (null == str || "".equals(str.trim())) {
+    if (null == str || str.trim().isEmpty()) {
       return emptyStringArray;
     }
 
@@ -408,7 +408,7 @@ public class StringUtils {
       String str, char separator) {
     // String.split returns a single empty result for splitting the empty
     // string.
-    if ("".equals(str)) {
+    if (str.isEmpty()) {
       return new String[]{""};
     }
     ArrayList<String> strList = new ArrayList<String>();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c Wed Sep 26 22:55:00 2012
@@ -88,7 +88,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   compressed_direct_buf_len = LZ4_compress(uncompressed_bytes, compressed_bytes, uncompressed_direct_buf_len);
   if (compressed_direct_buf_len < 0){
-    THROW(env, "Ljava/lang/InternalError", "LZ4_compress failed");
+    THROW(env, "java/lang/InternalError", "LZ4_compress failed");
   }
 
   (*env)->SetIntField(env, thisj, Lz4Compressor_uncompressedDirectBufLen, 0);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c Wed Sep 26 22:55:00 2012
@@ -85,7 +85,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   uncompressed_direct_buf_len = LZ4_uncompress_unknownOutputSize(compressed_bytes, uncompressed_bytes, compressed_direct_buf_len, uncompressed_direct_buf_len);
   if (uncompressed_direct_buf_len < 0) {
-    THROW(env, "Ljava/lang/InternalError", "LZ4_uncompress_unknownOutputSize failed.");
+    THROW(env, "java/lang/InternalError", "LZ4_uncompress_unknownOutputSize failed.");
   }
 
   (*env)->SetIntField(env, thisj, Lz4Decompressor_compressedDirectBufLen, 0);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c Wed Sep 26 22:55:00 2012
@@ -98,11 +98,11 @@ JNIEXPORT jint JNICALL Java_org_apache_h
   snappy_status ret = dlsym_snappy_compress(uncompressed_bytes,
         uncompressed_direct_buf_len, compressed_bytes, &buf_len);
   if (ret != SNAPPY_OK){
-    THROW(env, "Ljava/lang/InternalError", "Could not compress data. Buffer length is too small.");
+    THROW(env, "java/lang/InternalError", "Could not compress data. Buffer length is too small.");
     return 0;
   }
   if (buf_len > JINT_MAX) {
-    THROW(env, "Ljava/lang/InternalError", "Invalid return buffer length.");
+    THROW(env, "java/lang/InternalError", "Invalid return buffer length.");
     return 0;
   }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c Wed Sep 26 22:55:00 2012
@@ -92,11 +92,11 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   snappy_status ret = dlsym_snappy_uncompress(compressed_bytes, compressed_direct_buf_len, uncompressed_bytes, &uncompressed_direct_buf_len);
   if (ret == SNAPPY_BUFFER_TOO_SMALL){
-    THROW(env, "Ljava/lang/InternalError", "Could not decompress data. Buffer length is too small.");
+    THROW(env, "java/lang/InternalError", "Could not decompress data. Buffer length is too small.");
   } else if (ret == SNAPPY_INVALID_INPUT){
-    THROW(env, "Ljava/lang/InternalError", "Could not decompress data. Input is invalid.");
+    THROW(env, "java/lang/InternalError", "Could not decompress data. Input is invalid.");
   } else if (ret != SNAPPY_OK){
-    THROW(env, "Ljava/lang/InternalError", "Could not decompress data.");
+    THROW(env, "java/lang/InternalError", "Could not decompress data.");
   }
 
   (*env)->SetIntField(env, thisj, SnappyDecompressor_compressedDirectBufLen, 0);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Wed Sep 26 22:55:00 2012
@@ -23,6 +23,7 @@
  */
 #include <assert.h>
 #include <arpa/inet.h>
+#include <errno.h>
 #include <stdint.h>
 #include <unistd.h>
 
@@ -33,9 +34,10 @@
 
 #define USE_PIPELINED
 
+#define CRC_INITIAL_VAL 0xffffffff
+
 typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
-static uint32_t crc_init();
-static uint32_t crc_val(uint32_t crc);
+static inline uint32_t crc_val(uint32_t crc);
 static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 
@@ -45,6 +47,35 @@ static void pipelined_crc32c(uint32_t *c
 static int cached_cpu_supports_crc32; // initialized by constructor below
 static uint32_t crc32c_hardware(uint32_t crc, const uint8_t* data, size_t length);
 
+int bulk_calculate_crc(const uint8_t *data, size_t data_len,
+                    uint32_t *sums, int checksum_type,
+                    int bytes_per_checksum) {
+  uint32_t crc;
+  crc_update_func_t crc_update_func;
+
+  switch (checksum_type) {
+    case CRC32_ZLIB_POLYNOMIAL:
+      crc_update_func = crc32_zlib_sb8;
+      break;
+    case CRC32C_POLYNOMIAL:
+      crc_update_func = crc32c_sb8;
+      break;
+    default:
+      return -EINVAL;
+      break;
+  }
+  while (likely(data_len > 0)) {
+    int len = likely(data_len >= bytes_per_checksum) ? bytes_per_checksum : data_len;
+    crc = CRC_INITIAL_VAL;
+    crc = crc_update_func(crc, data, len);
+    *sums = ntohl(crc_val(crc));
+    data += len;
+    data_len -= len;
+    sums++;
+  }
+  return 0;
+}
+
 int bulk_verify_crc(const uint8_t *data, size_t data_len,
                     const uint32_t *sums, int checksum_type,
                     int bytes_per_checksum,
@@ -80,7 +111,7 @@ int bulk_verify_crc(const uint8_t *data,
   if (do_pipelined) {
     /* Process three blocks at a time */
     while (likely(n_blocks >= 3)) {
-      crc1 = crc2 = crc3 = crc_init();  
+      crc1 = crc2 = crc3 = CRC_INITIAL_VAL;
       pipelined_crc32c(&crc1, &crc2, &crc3, data, bytes_per_checksum, 3);
 
       crc = ntohl(crc_val(crc1));
@@ -101,7 +132,7 @@ int bulk_verify_crc(const uint8_t *data,
 
     /* One or two blocks */
     if (n_blocks) {
-      crc1 = crc2 = crc_init();
+      crc1 = crc2 = crc3 = CRC_INITIAL_VAL;
       pipelined_crc32c(&crc1, &crc2, &crc3, data, bytes_per_checksum, n_blocks);
 
       if ((crc = ntohl(crc_val(crc1))) != *sums)
@@ -118,7 +149,7 @@ int bulk_verify_crc(const uint8_t *data,
  
     /* For something smaller than a block */
     if (remainder) {
-      crc1 = crc_init();
+      crc1 = crc2 = crc3 = CRC_INITIAL_VAL;
       pipelined_crc32c(&crc1, &crc2, &crc3, data, remainder, 1);
 
       if ((crc = ntohl(crc_val(crc1))) != *sums)
@@ -130,7 +161,7 @@ int bulk_verify_crc(const uint8_t *data,
 
   while (likely(data_len > 0)) {
     int len = likely(data_len >= bytes_per_checksum) ? bytes_per_checksum : data_len;
-    crc = crc_init();
+    crc = CRC_INITIAL_VAL;
     crc = crc_update_func(crc, data, len);
     crc = ntohl(crc_val(crc));
     if (unlikely(crc != *sums)) {
@@ -151,18 +182,10 @@ return_crc_error:
   return INVALID_CHECKSUM_DETECTED;
 }
 
-
-/**
- * Initialize a CRC
- */
-static uint32_t crc_init() {
-  return 0xffffffff;
-}
-
 /**
  * Extract the final result of a CRC
  */
-static uint32_t crc_val(uint32_t crc) {
+static inline uint32_t crc_val(uint32_t crc) {
   return ~crc;
 }
 
@@ -398,7 +421,7 @@ static void pipelined_crc32c(uint32_t *c
         counter--;
       }
 
-      /* Take care of the remainder. They are only up to three bytes,
+      /* Take care of the remainder. They are only up to seven bytes,
        * so performing byte-level crc32 won't take much time.
        */
       bdata = (uint8_t*)data;
@@ -433,7 +456,7 @@ static void pipelined_crc32c(uint32_t *c
         "crc32b (%5), %0;\n\t"
         "crc32b (%5,%4,1), %1;\n\t"
          : "=r"(c1), "=r"(c2) 
-         : "r"(c1), "r"(c2), "r"(c3), "r"(block_size), "r"(bdata)
+         : "r"(c1), "r"(c2), "r"(block_size), "r"(bdata)
         );
         bdata++;
         remainder--;
@@ -593,7 +616,7 @@ static void pipelined_crc32c(uint32_t *c
         "crc32b (%5), %0;\n\t"
         "crc32b (%5,%4,1), %1;\n\t"
          : "=r"(c1), "=r"(c2) 
-         : "r"(c1), "r"(c2), "r"(c3), "r"(block_size), "r"(bdata)
+         : "r"(c1), "r"(c2), "r"(block_size), "r"(bdata)
         );
         bdata++;
         remainder--;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h Wed Sep 26 22:55:00 2012
@@ -19,6 +19,7 @@
 #define BULK_CRC32_H_INCLUDED
 
 #include <stdint.h>
+#include <unistd.h> /* for size_t */
 
 // Constants for different CRC algorithms
 #define CRC32C_POLYNOMIAL 1
@@ -42,16 +43,45 @@ typedef struct crc32_error {
  * of bytes_per_checksum bytes. The checksums are each 32 bits
  * and are stored in sequential indexes of the 'sums' array.
  *
- *  checksum_type - one of the CRC32 constants defined above
- *  error_info - if non-NULL, will be filled in if an error
- *               is detected
+ * @param data                  The data to checksum
+ * @param dataLen               Length of the data buffer
+ * @param sums                  (out param) buffer to write checksums into.
+ *                              It must contain at least dataLen * 4 bytes.
+ * @param checksum_type         One of the CRC32 algorithm constants defined 
+ *                              above
+ * @param bytes_per_checksum    How many bytes of data to process per checksum.
+ * @param error_info            If non-NULL, will be filled in if an error
+ *                              is detected
  *
- * Returns: 0 for success, non-zero for an error, result codes
- *          for which are defined above
+ * @return                      0 for success, non-zero for an error, result codes
+ *                              for which are defined above
  */
 extern int bulk_verify_crc(const uint8_t *data, size_t data_len,
     const uint32_t *sums, int checksum_type,
     int bytes_per_checksum,
     crc32_error_t *error_info);
 
+/**
+ * Calculate checksums for some data.
+ *
+ * The checksums are each 32 bits and are stored in sequential indexes of the
+ * 'sums' array.
+ *
+ * This function is not (yet) optimized.  It is provided for testing purposes
+ * only.
+ *
+ * @param data                  The data to checksum
+ * @param dataLen               Length of the data buffer
+ * @param sums                  (out param) buffer to write checksums into.
+ *                              It must contain at least dataLen * 4 bytes.
+ * @param checksum_type         One of the CRC32 algorithm constants defined 
+ *                              above
+ * @param bytesPerChecksum      How many bytes of data to process per checksum.
+ *
+ * @return                      0 for success, non-zero for an error
+ */
+int bulk_calculate_crc(const uint8_t *data, size_t data_len,
+                    uint32_t *sums, int checksum_type,
+                    int bytes_per_checksum);
+
 #endif

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Wed Sep 26 22:55:00 2012
@@ -251,6 +251,14 @@
 </property>
 
 <property>
+    <name>hadoop.kerberos.min.seconds.before.relogin</name>
+    <value>60</value>
+    <description>The minimum time between relogin attempts for Kerberos, in
+    seconds.
+    </description>
+</property>
+
+<property>
   <name>hadoop.security.auth_to_local</name>
   <value></value>
   <description>Maps kerberos principals to local user names</description>
@@ -1096,5 +1104,15 @@
   </description>
 </property>
 
+<property>
+  <name>fs.permissions.umask-mode</name>
+  <value>022</value>
+  <description>
+    The umask used when creating files and directories.
+    Can be in octal or in symbolic. Examples are:
+    "022" (octal for u=rwx,g=r-x,o=r-x in symbolic),
+    or "u=rwx,g=rwx,o=" (symbolic for 007 in octal).
+  </description>
+</property>
 
 </configuration>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/DeprecatedProperties.apt.vm Wed Sep 26 22:55:00 2012
@@ -24,8 +24,6 @@ Deprecated Properties
 *-------------------------------+-----------------------+
 || <<Deprecated property name>> || <<New property name>>|
 *-------------------------------+-----------------------+
-|StorageId | dfs.datanode.StorageId
-*---+---+
 |create.empty.dir.if.nonexist | mapreduce.jobcontrol.createdir.ifnotexist
 *---+---+
 |dfs.access.time.precision | dfs.namenode.accesstime.precision
@@ -38,14 +36,16 @@ Deprecated Properties
 *---+---+
 |dfs.block.size | dfs.blocksize
 *---+---+
-|dfs.client.buffer.dir | fs.client.buffer.dir
-*---+---+
 |dfs.data.dir | dfs.datanode.data.dir
 *---+---+
 |dfs.datanode.max.xcievers | dfs.datanode.max.transfer.threads
 *---+---+
 |dfs.df.interval | fs.df.interval
 *---+---+
+|dfs.federation.nameservice.id | dfs.nameservice.id
+*---+---+
+|dfs.federation.nameservices | dfs.nameservices
+*---+---+
 |dfs.http.address | dfs.namenode.http-address
 *---+---+
 |dfs.https.address | dfs.namenode.https-address
@@ -54,10 +54,10 @@ Deprecated Properties
 *---+---+
 |dfs.https.need.client.auth | dfs.client.https.need-auth
 *---+---+
-|dfs.max-repl-streams | dfs.namenode.replication.max-streams
-*---+---+
 |dfs.max.objects | dfs.namenode.max.objects
 *---+---+
+|dfs.max-repl-streams | dfs.namenode.replication.max-streams
+*---+---+
 |dfs.name.dir | dfs.namenode.name.dir
 *---+---+
 |dfs.name.dir.restore | dfs.namenode.name.dir.restore
@@ -86,6 +86,8 @@ Deprecated Properties
 *---+---+
 |dfs.socket.timeout | dfs.client.socket-timeout
 *---+---+
+|dfs.umaskmode | fs.permissions.umask-mode
+*---+---+
 |dfs.write.packet.size | dfs.client-write-packet-size
 *---+---+
 |fs.checkpoint.dir | dfs.namenode.checkpoint.dir
@@ -106,10 +108,10 @@ Deprecated Properties
 *---+---+
 |hadoop.pipes.command-file.keep | mapreduce.pipes.commandfile.preserve
 *---+---+
-|hadoop.pipes.executable | mapreduce.pipes.executable
-*---+---+
 |hadoop.pipes.executable.interpretor | mapreduce.pipes.executable.interpretor
 *---+---+
+|hadoop.pipes.executable | mapreduce.pipes.executable
+*---+---+
 |hadoop.pipes.java.mapper | mapreduce.pipes.isjavamapper
 *---+---+
 |hadoop.pipes.java.recordreader | mapreduce.pipes.isjavarecordreader
@@ -130,6 +132,12 @@ Deprecated Properties
 *---+---+
 |io.sort.spill.percent | mapreduce.map.sort.spill.percent
 *---+---+
+|jobclient.completion.poll.interval | mapreduce.client.completion.pollinterval
+*---+---+
+|jobclient.output.filter | mapreduce.client.output.filter
+*---+---+
+|jobclient.progress.monitor.poll.interval | mapreduce.client.progressmonitor.pollinterval
+*---+---+
 |job.end.notification.url | mapreduce.job.end-notification.url
 *---+---+
 |job.end.retry.attempts | mapreduce.job.end-notification.retry.attempts
@@ -138,12 +146,6 @@ Deprecated Properties
 *---+---+
 |job.local.dir | mapreduce.job.local.dir
 *---+---+
-|jobclient.completion.poll.interval | mapreduce.client.completion.pollinterval
-*---+---+
-|jobclient.output.filter | mapreduce.client.output.filter
-*---+---+
-|jobclient.progress.monitor.poll.interval | mapreduce.client.progressmonitor.pollinterval
-*---+---+
 |keep.failed.task.files | mapreduce.task.files.preserve.failedtasks
 *---+---+
 |keep.task.files.pattern | mapreduce.task.files.preserve.filepattern
@@ -196,10 +198,6 @@ Deprecated Properties
 *---+---+
 |mapred.compress.map.output | mapreduce.map.output.compress
 *---+---+
-|mapred.create.symlink | NONE - symlinking is always on
-*---+---+
-|mapreduce.job.cache.symlink.create | NONE - symlinking is always on
-*---+---+
 |mapred.data.field.separator | mapreduce.fieldsel.data.field.separator
 *---+---+
 |mapred.debug.out.lines | mapreduce.task.debugout.lines
@@ -214,18 +212,18 @@ Deprecated Properties
 *---+---+
 |mapred.heartbeats.in.second | mapreduce.jobtracker.heartbeats.in.second
 *---+---+
-|mapred.hosts | mapreduce.jobtracker.hosts.filename
-*---+---+
 |mapred.hosts.exclude | mapreduce.jobtracker.hosts.exclude.filename
 *---+---+
-|mapred.inmem.merge.threshold | mapreduce.reduce.merge.inmem.threshold
+|mapred.hosts | mapreduce.jobtracker.hosts.filename
 *---+---+
-|mapred.input.dir | mapreduce.input.fileinputformat.inputdir
+|mapred.inmem.merge.threshold | mapreduce.reduce.merge.inmem.threshold
 *---+---+
 |mapred.input.dir.formats | mapreduce.input.multipleinputs.dir.formats
 *---+---+
 |mapred.input.dir.mappers | mapreduce.input.multipleinputs.dir.mappers
 *---+---+
+|mapred.input.dir | mapreduce.input.fileinputformat.inputdir
+*---+---+
 |mapred.input.pathFilter.class | mapreduce.input.pathFilter.class
 *---+---+
 |mapred.jar | mapreduce.job.jar
@@ -236,6 +234,8 @@ Deprecated Properties
 *---+---+
 |mapred.job.id | mapreduce.job.id
 *---+---+
+|mapred.jobinit.threads | mapreduce.jobtracker.jobinit.threads
+*---+---+
 |mapred.job.map.memory.mb | mapreduce.map.memory.mb
 *---+---+
 |mapred.job.name | mapreduce.job.name
@@ -258,42 +258,40 @@ Deprecated Properties
 *---+---+
 |mapred.job.shuffle.merge.percent | mapreduce.reduce.shuffle.merge.percent
 *---+---+
-|mapred.job.tracker | mapreduce.jobtracker.address
-*---+---+
 |mapred.job.tracker.handler.count | mapreduce.jobtracker.handler.count
 *---+---+
 |mapred.job.tracker.history.completed.location | mapreduce.jobtracker.jobhistory.completed.location
 *---+---+
 |mapred.job.tracker.http.address | mapreduce.jobtracker.http.address
 *---+---+
+|mapred.jobtracker.instrumentation | mapreduce.jobtracker.instrumentation
+*---+---+
+|mapred.jobtracker.job.history.block.size | mapreduce.jobtracker.jobhistory.block.size
+*---+---+
 |mapred.job.tracker.jobhistory.lru.cache.size | mapreduce.jobtracker.jobhistory.lru.cache.size
 *---+---+
+|mapred.job.tracker | mapreduce.jobtracker.address
+*---+---+
+|mapred.jobtracker.maxtasks.per.job | mapreduce.jobtracker.maxtasks.perjob
+*---+---+
 |mapred.job.tracker.persist.jobstatus.active | mapreduce.jobtracker.persist.jobstatus.active
 *---+---+
 |mapred.job.tracker.persist.jobstatus.dir | mapreduce.jobtracker.persist.jobstatus.dir
 *---+---+
 |mapred.job.tracker.persist.jobstatus.hours | mapreduce.jobtracker.persist.jobstatus.hours
 *---+---+
-|mapred.job.tracker.retire.jobs | mapreduce.jobtracker.retirejobs
+|mapred.jobtracker.restart.recover | mapreduce.jobtracker.restart.recover
 *---+---+
 |mapred.job.tracker.retiredjobs.cache.size | mapreduce.jobtracker.retiredjobs.cache.size
 *---+---+
-|mapred.jobinit.threads | mapreduce.jobtracker.jobinit.threads
-*---+---+
-|mapred.jobtracker.instrumentation | mapreduce.jobtracker.instrumentation
-*---+---+
-|mapred.jobtracker.job.history.block.size | mapreduce.jobtracker.jobhistory.block.size
-*---+---+
-|mapred.jobtracker.maxtasks.per.job | mapreduce.jobtracker.maxtasks.perjob
+|mapred.job.tracker.retire.jobs | mapreduce.jobtracker.retirejobs
 *---+---+
-|mapred.jobtracker.restart.recover | mapreduce.jobtracker.restart.recover
+|mapred.jobtracker.taskalloc.capacitypad | mapreduce.jobtracker.taskscheduler.taskalloc.capacitypad
 *---+---+
 |mapred.jobtracker.taskScheduler | mapreduce.jobtracker.taskscheduler
 *---+---+
 |mapred.jobtracker.taskScheduler.maxRunningTasksPerJob | mapreduce.jobtracker.taskscheduler.maxrunningtasks.perjob
 *---+---+
-|mapred.jobtracker.taskalloc.capacitypad | mapreduce.jobtracker.taskscheduler.taskalloc.capacitypad
-*---+---+
 |mapred.join.expr | mapreduce.join.expr
 *---+---+
 |mapred.join.keycomparator | mapreduce.join.keycomparator
@@ -320,19 +318,19 @@ Deprecated Properties
 *---+---+
 |mapred.map.output.compression.codec | mapreduce.map.output.compress.codec
 *---+---+
-|mapred.map.task.debug.script | mapreduce.map.debug.script
-*---+---+
-|mapred.map.tasks | mapreduce.job.maps
-*---+---+
-|mapred.map.tasks.speculative.execution | mapreduce.map.speculative
-*---+---+
 |mapred.mapoutput.key.class | mapreduce.map.output.key.class
 *---+---+
 |mapred.mapoutput.value.class | mapreduce.map.output.value.class
 *---+---+
+|mapred.mapper.regex.group | mapreduce.mapper.regexmapper..group
+*---+---+
 |mapred.mapper.regex | mapreduce.mapper.regex
 *---+---+
-|mapred.mapper.regex.group | mapreduce.mapper.regexmapper..group
+|mapred.map.task.debug.script | mapreduce.map.debug.script
+*---+---+
+|mapred.map.tasks | mapreduce.job.maps
+*---+---+
+|mapred.map.tasks.speculative.execution | mapreduce.map.speculative
 *---+---+
 |mapred.max.map.failures.percent | mapreduce.map.failures.maxpercent
 *---+---+
@@ -352,12 +350,12 @@ Deprecated Properties
 *---+---+
 |mapred.min.split.size.per.rack | mapreduce.input.fileinputformat.split.minsize.per.rack
 *---+---+
-|mapred.output.compress | mapreduce.output.fileoutputformat.compress
-*---+---+
 |mapred.output.compression.codec | mapreduce.output.fileoutputformat.compress.codec
 *---+---+
 |mapred.output.compression.type | mapreduce.output.fileoutputformat.compress.type
 *---+---+
+|mapred.output.compress | mapreduce.output.fileoutputformat.compress
+*---+---+
 |mapred.output.dir | mapreduce.output.fileoutputformat.outputdir
 *---+---+
 |mapred.output.key.class | mapreduce.job.output.key.class
@@ -440,12 +438,6 @@ Deprecated Properties
 *---+---+
 |mapred.task.timeout | mapreduce.task.timeout
 *---+---+
-|mapred.task.tracker.http.address | mapreduce.tasktracker.http.address
-*---+---+
-|mapred.task.tracker.report.address | mapreduce.tasktracker.report.address
-*---+---+
-|mapred.task.tracker.task-controller | mapreduce.tasktracker.taskcontroller
-*---+---+
 |mapred.tasktracker.dns.interface | mapreduce.tasktracker.dns.interface
 *---+---+
 |mapred.tasktracker.dns.nameserver | mapreduce.tasktracker.dns.nameserver
@@ -454,6 +446,8 @@ Deprecated Properties
 *---+---+
 |mapred.tasktracker.expiry.interval | mapreduce.jobtracker.expire.trackers.interval
 *---+---+
+|mapred.task.tracker.http.address | mapreduce.tasktracker.http.address
+*---+---+
 |mapred.tasktracker.indexcache.mb | mapreduce.tasktracker.indexcache.mb
 *---+---+
 |mapred.tasktracker.instrumentation | mapreduce.tasktracker.instrumentation
@@ -466,6 +460,10 @@ Deprecated Properties
 *---+---+
 |mapred.tasktracker.reduce.tasks.maximum | mapreduce.tasktracker.reduce.tasks.maximum
 *---+---+
+|mapred.task.tracker.report.address | mapreduce.tasktracker.report.address
+*---+---+
+|mapred.task.tracker.task-controller | mapreduce.tasktracker.taskcontroller
+*---+---+
 |mapred.tasktracker.taskmemorymanager.monitoring-interval | mapreduce.tasktracker.taskmemorymanager.monitoringinterval
 *---+---+
 |mapred.tasktracker.tasks.sleeptime-before-sigkill | mapreduce.tasktracker.tasks.sleeptimebeforesigkill
@@ -480,20 +478,12 @@ Deprecated Properties
 *---+---+
 |mapred.tip.id | mapreduce.task.id
 *---+---+
-|mapred.used.genericoptionsparser | mapreduce.client.genericoptionsparser.used
-*---+---+
-|mapred.userlog.limit.kb | mapreduce.task.userlog.limit.kb
-*---+---+
-|mapred.userlog.retain.hours | mapreduce.job.userlog.retain.hours
-*---+---+
-|mapred.work.output.dir | mapreduce.task.output.dir
-*---+---+
-|mapred.working.dir | mapreduce.job.working.dir
-*---+---+
 |mapreduce.combine.class | mapreduce.job.combine.class
 *---+---+
 |mapreduce.inputformat.class | mapreduce.job.inputformat.class
 *---+---+
+|mapreduce.job.counters.limit | mapreduce.job.counters.max
+*---+---+
 |mapreduce.jobtracker.permissions.supergroup | mapreduce.cluster.permissions.supergroup
 *---+---+
 |mapreduce.map.class | mapreduce.job.map.class
@@ -504,6 +494,16 @@ Deprecated Properties
 *---+---+
 |mapreduce.reduce.class | mapreduce.job.reduce.class
 *---+---+
+|mapred.used.genericoptionsparser | mapreduce.client.genericoptionsparser.used
+*---+---+
+|mapred.userlog.limit.kb | mapreduce.task.userlog.limit.kb
+*---+---+
+|mapred.userlog.retain.hours | mapreduce.job.userlog.retain.hours
+*---+---+
+|mapred.working.dir | mapreduce.job.working.dir
+*---+---+
+|mapred.work.output.dir | mapreduce.task.output.dir
+*---+---+
 |min.num.spills.for.combine | mapreduce.map.combine.minspills
 *---+---+
 |reduce.output.key.value.fields.spec | mapreduce.fieldsel.reduce.output.key.value.fields.spec
@@ -538,3 +538,13 @@ Deprecated Properties
 *---+---+
 |webinterface.private.actions | mapreduce.jobtracker.webinterface.trusted
 *---+---+
+
+  The following table lists additional changes to some configuration properties:
+
+*-------------------------------+-----------------------+
+|| <<Deprecated property name>> || <<New property name>>|
+*-------------------------------+-----------------------+
+|mapred.create.symlink | NONE - symlinking is always on
+*---+---+
+|mapreduce.job.cache.symlink.create | NONE - symlinking is always on
+*---+---+

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1379224-1390762

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Wed Sep 26 22:55:00 2012
@@ -358,6 +358,36 @@ public class TestConfiguration extends T
     tearDown();
   }
 
+  public void testRelativeIncludes() throws Exception {
+    tearDown();
+    String relConfig = new File("./tmp/test-config.xml").getAbsolutePath();
+    String relConfig2 = new File("./tmp/test-config2.xml").getAbsolutePath();
+
+    new File(new File(relConfig).getParent()).mkdirs();
+    out = new BufferedWriter(new FileWriter(relConfig2));
+    startConfig();
+    appendProperty("a", "b");
+    endConfig();
+
+    out = new BufferedWriter(new FileWriter(relConfig));
+    startConfig();
+    // Add the relative path instead of the absolute one.
+    addInclude(new File(relConfig2).getName());
+    appendProperty("c", "d");
+    endConfig();
+
+    // verify that the includes file contains all properties
+    Path fileResource = new Path(relConfig);
+    conf.addResource(fileResource);
+    assertEquals(conf.get("a"), "b");
+    assertEquals(conf.get("c"), "d");
+
+    // Cleanup
+    new File(relConfig).delete();
+    new File(relConfig2).delete();
+    new File(new File(relConfig).getParent()).delete();
+  }
+
   BufferedWriter out;
 	
   public void testIntegerRanges() {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java Wed Sep 26 22:55:00 2012
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs;
 
+import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.EnumSet;
@@ -61,8 +62,15 @@ public abstract class FileContextMainOpe
   private static String TEST_DIR_AXX = "test/hadoop/axx";
   private static int numBlocks = 2;
   
-  static  final String LOCAL_FS_ROOT_URI = "file:///tmp/test";
-  
+  public static final Path LOCAL_FS_ROOT_PATH;
+      
+  static {
+    File testBuildData = new File(System.getProperty("test.build.data",
+                                    "build/test/data"));
+    Path localFsRootPath = new Path(testBuildData.getAbsolutePath(), 
+                                    "root-uri");
+    LOCAL_FS_ROOT_PATH = localFsRootPath.makeQualified(LocalFileSystem.NAME, null);
+  }
   
   protected static FileContext fc;
   
@@ -95,7 +103,7 @@ public abstract class FileContextMainOpe
   @After
   public void tearDown() throws Exception {
     fc.delete(new Path(getAbsoluteTestRootPath(fc), new Path("test")), true);
-    fc.delete(new Path(LOCAL_FS_ROOT_URI), true);
+    fc.delete(LOCAL_FS_ROOT_PATH, true);
   }
   
   
@@ -176,7 +184,7 @@ public abstract class FileContextMainOpe
     
     // Try a URI
 
-    absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir");
+    absoluteDir = new Path(LOCAL_FS_ROOT_PATH, "existingDir");
     fc.mkdir(absoluteDir, FileContext.DEFAULT_PERM, true);
     fc.setWorkingDirectory(absoluteDir);
     Assert.assertEquals(absoluteDir, fc.getWorkingDirectory());

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java Wed Sep 26 22:55:00 2012
@@ -293,6 +293,23 @@ public class TestLocalDirAllocator {
     }
   }
 
+  /*
+   * Test when mapred.local.dir not configured and called
+   * getLocalPathForWrite
+   */
+  @Test
+  public void testShouldNotthrowNPE() throws Exception {
+    Configuration conf1 = new Configuration();
+    try {
+      dirAllocator.getLocalPathForWrite("/test", conf1);
+      fail("Exception not thrown when " + CONTEXT + " is not set");
+    } catch (IOException e) {
+      assertEquals(CONTEXT + " not configured", e.getMessage());
+    } catch (NullPointerException e) {
+      fail("Lack of configuration should not have thrown an NPE.");
+    }
+  }
+
   /** Test no side effect files are left over. After creating a temp
    * temp file, remove both the temp file and its parent. Verify that
    * no files or directories are left over as can happen when File objects

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Wed Sep 26 22:55:00 2012
@@ -26,6 +26,8 @@ import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URI;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -99,7 +101,6 @@ public class TestTrash extends TestCase 
   }
 
   /**
-   * 
    * Test trash for the shell's delete command for the default file system
    * specified in the paramter conf
    * @param conf 
@@ -429,8 +430,40 @@ public class TestTrash extends TestCase 
       String output = byteStream.toString();
       System.setOut(stdout);
       System.setErr(stderr);
-      assertTrue("skipTrash wasn't suggested as remedy to failed rm command",
-        output.indexOf(("Consider using -skipTrash option")) != -1 );
+      assertTrue("skipTrash wasn't suggested as remedy to failed rm command" +
+          " or we deleted / even though we could not get server defaults",
+          output.indexOf("Consider using -skipTrash option") != -1 ||
+          output.indexOf("Failed to determine server trash configuration") != -1);
+    }
+
+    // Verify old checkpoint format is recognized
+    {
+      // emulate two old trash checkpoint directories, one that is old enough
+      // to be deleted on the next expunge and one that isn't.
+      long trashInterval = conf.getLong(FS_TRASH_INTERVAL_KEY,
+          FS_TRASH_INTERVAL_DEFAULT);
+      long now = Time.now();
+      DateFormat oldCheckpointFormat = new SimpleDateFormat("yyMMddHHmm");
+      Path dirToDelete = new Path(trashRoot.getParent(),
+          oldCheckpointFormat.format(now - (trashInterval * 60 * 1000) - 1));
+      Path dirToKeep = new Path(trashRoot.getParent(),
+          oldCheckpointFormat.format(now));
+      mkdir(trashRootFs, dirToDelete);
+      mkdir(trashRootFs, dirToKeep);
+
+      // Clear out trash
+      int rc = -1;
+      try {
+        rc = shell.run(new String [] { "-expunge" } );
+      } catch (Exception e) {
+        System.err.println("Exception raised from fs expunge " +
+            e.getLocalizedMessage());
+      }
+      assertEquals(0, rc);
+      assertFalse("old checkpoint format not recognized",
+          trashRootFs.exists(dirToDelete));
+      assertTrue("old checkpoint format directory should not be removed",
+          trashRootFs.exists(dirToKeep));
     }
 
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java Wed Sep 26 22:55:00 2012
@@ -35,6 +35,7 @@ import javax.servlet.http.HttpServletReq
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
 
 public class TestServletFilter extends HttpServerFunctionalTest {
@@ -163,7 +164,7 @@ public class TestServletFilter extends H
   @Test
   public void testServletFilterWhenInitThrowsException() throws Exception {
     Configuration conf = new Configuration();
-    // start a http server with CountingFilter
+    // start a http server with ErrorFilter
     conf.set(HttpServer.FILTER_INITIALIZER_PROPERTY,
         ErrorFilter.Initializer.class.getName());
     HttpServer http = createTestServer(conf);
@@ -174,4 +175,25 @@ public class TestServletFilter extends H
       assertTrue( e.getMessage().contains("Problem in starting http server. Server handlers failed"));
     }
   }
+  
+  /**
+   * Similar to the above test case, except that it uses a different API to add the
+   * filter. Regression test for HADOOP-8786.
+   */
+  @Test
+  public void testContextSpecificServletFilterWhenInitThrowsException()
+      throws Exception {
+    Configuration conf = new Configuration();
+    HttpServer http = createTestServer(conf);
+    http.defineFilter(http.webAppContext,
+        "ErrorFilter", ErrorFilter.class.getName(),
+        null, null);
+    try {
+      http.start();
+      fail("expecting exception");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
+    }
+  }
+
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/RandomDatum.java Wed Sep 26 22:55:00 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.io;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Random;
 
 
@@ -65,6 +66,11 @@ public class RandomDatum implements Writ
     return compareTo((RandomDatum)o) == 0;
   }
 
+  @Override
+  public int hashCode() {
+    return Arrays.hashCode(this.data);
+  }
+  
   private static final char[] HEX_DIGITS =
   {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java Wed Sep 26 22:55:00 2012
@@ -67,6 +67,36 @@ public class TestIOUtils {
   }
 
   @Test
+  public void testCopyBytesShouldCloseInputSteamWhenOutputStreamCloseThrowsRunTimeException()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[1]);
+    Mockito.doThrow(new RuntimeException()).when(outputStream).close();
+    try {
+      IOUtils.copyBytes(inputStream, outputStream, 1, true);
+      fail("Didn't throw exception");
+    } catch (RuntimeException e) {
+    }
+    Mockito.verify(outputStream, Mockito.atLeastOnce()).close();
+  }
+
+  @Test
+  public void testCopyBytesShouldCloseInputSteamWhenInputStreamCloseThrowsRunTimeException()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[1]);
+    Mockito.doThrow(new RuntimeException()).when(inputStream).close();
+    try {
+      IOUtils.copyBytes(inputStream, outputStream, 1, true);
+      fail("Didn't throw exception");
+    } catch (RuntimeException e) {
+    }
+    Mockito.verify(inputStream, Mockito.atLeastOnce()).close();
+  }
+
+  @Test
   public void testCopyBytesShouldNotCloseStreamsWhenCloseIsFalse()
       throws Exception {
     InputStream inputStream = Mockito.mock(InputStream.class);
@@ -76,7 +106,7 @@ public class TestIOUtils {
     Mockito.verify(inputStream, Mockito.atMost(0)).close();
     Mockito.verify(outputStream, Mockito.atMost(0)).close();
   }
-  
+
   @Test
   public void testCopyBytesWithCountShouldCloseStreamsWhenCloseIsTrue()
       throws Exception {
@@ -117,7 +147,7 @@ public class TestIOUtils {
     Mockito.verify(inputStream, Mockito.atLeastOnce()).close();
     Mockito.verify(outputStream, Mockito.atLeastOnce()).close();
   }
-  
+
   @Test
   public void testWriteFully() throws IOException {
     final int INPUT_BUFFER_LEN = 10000;
@@ -148,6 +178,7 @@ public class TestIOUtils {
       for (int i = HALFWAY; i < input.length; i++) {
         assertEquals(input[i - HALFWAY], output[i]);
       }
+      raf.close();
     } finally {
       File f = new File(TEST_FILE_NAME);
       if (f.exists()) {
@@ -177,7 +208,7 @@ public class TestIOUtils {
           "Error while reading compressed data", ioe);
     }
   }
-  
+
   @Test
   public void testSkipFully() throws IOException {
     byte inArray[] = new byte[] {0, 1, 2, 3, 4};

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Wed Sep 26 22:55:00 2012
@@ -34,6 +34,8 @@ import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
 import java.util.Random;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
@@ -202,6 +204,15 @@ public class TestCodec {
       v2.readFields(inflateIn);
       assertTrue("original and compressed-then-decompressed-output not equal",
                  k1.equals(k2) && v1.equals(v2));
+      
+      // original and compressed-then-decompressed-output have the same hashCode
+      Map<RandomDatum, String> m = new HashMap<RandomDatum, String>();
+      m.put(k1, k1.toString());
+      m.put(v1, v1.toString());
+      String result = m.get(k2);
+      assertEquals("k1 and k2 hashcode not equal", result, k1.toString());
+      result = m.get(v2);
+      assertEquals("v1 and v2 hashcode not equal", result, v1.toString());
     }
 
     // De-compress data byte-at-a-time

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Wed Sep 26 22:55:00 2012
@@ -67,7 +67,7 @@ public class TestNativeIO {
 
     assertEquals(System.getProperty("user.name"), stat.getOwner());
     assertNotNull(stat.getGroup());
-    assertTrue(!"".equals(stat.getGroup()));
+    assertTrue(!stat.getGroup().isEmpty());
     assertEquals("Stat mode field should indicate a regular file",
       NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
   }
@@ -96,7 +96,7 @@ public class TestNativeIO {
               NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertNotNull(stat.getGroup());
-              assertTrue(!"".equals(stat.getGroup()));
+              assertTrue(!stat.getGroup().isEmpty());
               assertEquals("Stat mode field should indicate a regular file",
                 NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
             } catch (Throwable t) {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java Wed Sep 26 22:55:00 2012
@@ -17,27 +17,62 @@
  */
 package org.apache.hadoop.io.serializer;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertNotNull;
 
+import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.io.Writable;
+import org.apache.log4j.Level;
 
 public class TestSerializationFactory {
 
+  static {
+    ((Log4JLogger) SerializationFactory.LOG).getLogger().setLevel(Level.ALL);
+  }
+
+  static Configuration conf;
+  static SerializationFactory factory;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    conf = new Configuration();
+    factory = new SerializationFactory(conf);
+  }
+
+  @Test
+  public void testSerializationKeyIsEmpty() {
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, "");
+    SerializationFactory factory = new SerializationFactory(conf);
+  }
+
   @Test
-  public void testSerializerAvailability() {
+  public void testSerializationKeyIsInvalid() {
     Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, "INVALID_KEY_XXX");
     SerializationFactory factory = new SerializationFactory(conf);
+  }
+
+  @Test
+  public void testGetSerializer() {
     // Test that a valid serializer class is returned when its present
-    assertNotNull("A valid class must be returned for default Writable Serde",
+    assertNotNull("A valid class must be returned for default Writable SerDe",
         factory.getSerializer(Writable.class));
-    assertNotNull("A valid class must be returned for default Writable serDe",
-        factory.getDeserializer(Writable.class));
     // Test that a null is returned when none can be found.
     assertNull("A null should be returned if there are no serializers found.",
         factory.getSerializer(TestSerializationFactory.class));
+  }
+
+  @Test
+  public void testGetDeserializer() {
+    // Test that a valid serializer class is returned when its present
+    assertNotNull("A valid class must be returned for default Writable SerDe",
+        factory.getDeserializer(Writable.class));
+    // Test that a null is returned when none can be found.
     assertNull("A null should be returned if there are no deserializers found",
         factory.getDeserializer(TestSerializationFactory.class));
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java Wed Sep 26 22:55:00 2012
@@ -165,8 +165,10 @@ public class MiniRPCBenchmark {
         new TestDelegationTokenSecretManager(24*60*60*1000,
             7*24*60*60*1000,24*60*60*1000,3600000);
       secretManager.startThreads();
-      rpcServer = RPC.getServer(MiniProtocol.class,
-          this, DEFAULT_SERVER_ADDRESS, 0, 1, false, conf, secretManager);
+      rpcServer = new RPC.Builder(conf).setProtocol(MiniProtocol.class)
+          .setInstance(this).setBindAddress(DEFAULT_SERVER_ADDRESS).setPort(0)
+          .setNumHandlers(1).setVerbose(false).setSecretManager(secretManager)
+          .build();
       rpcServer.start();
     }
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java?rev=1390763&r1=1390762&r2=1390763&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java Wed Sep 26 22:55:00 2012
@@ -227,11 +227,14 @@ public class RPCCallBenchmark implements
       BlockingService service = TestProtobufRpcProto
           .newReflectiveBlockingService(serverImpl);
 
-      server = RPC.getServer(TestRpcService.class, service,
-          opts.host, opts.port, opts.serverThreads, false, conf, null);
+      server = new RPC.Builder(conf).setProtocol(TestRpcService.class)
+          .setInstance(service).setBindAddress(opts.host).setPort(opts.port)
+          .setNumHandlers(opts.serverThreads).setVerbose(false).build();
     } else if (opts.rpcEngine == WritableRpcEngine.class) {
-      server = RPC.getServer(TestProtocol.class, new TestRPC.TestImpl(),
-          opts.host, opts.port, opts.serverThreads, false, conf, null);
+      server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
+          .setInstance(new TestRPC.TestImpl()).setBindAddress(opts.host)
+          .setPort(opts.port).setNumHandlers(opts.serverThreads)
+          .setVerbose(false).build();
     } else {
       throw new RuntimeException("Bad engine: " + opts.rpcEngine);
     }



Mime
View raw message