hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1479733 [1/2] - in /hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common: ./ src/main/conf/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/conf/ src/main/java/org/apache/hadoop/fs/local/ src/main/java/org/apa...
Date Tue, 07 May 2013 00:17:53 GMT
Author: szetszwo
Date: Tue May  7 00:17:50 2013
New Revision: 1479733

URL: http://svn.apache.org/r1479733
Log:
Merge r1477868 through r1479732 from trunk.

Added:
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchMethodException.java
      - copied unchanged from r1479732, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchMethodException.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchProtocolException.java
      - copied unchanged from r1479732, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcNoSuchProtocolException.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/readlink.c
      - copied unchanged from r1479732, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/readlink.c
Modified:
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/main.c
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt Tue May  7 00:17:50 2013
@@ -19,6 +19,8 @@ Trunk (Unreleased)
 
     HADOOP-9194. RPC Support for QoS. (Junping Du via llu)
 
+    HADOOP-9425 Add error codes to rpc-response (sanjay Radia)
+
   NEW FEATURES
     
     HADOOP-8561. Introduce HADOOP_PROXY_USER for secure impersonation in child
@@ -166,6 +168,8 @@ Trunk (Unreleased)
     HADOOP-9486. Promoted Windows and Shell related utils from YARN to Hadoop
     Common. (Chris Nauroth via vinodkv)
 
+    HADOOP-8608. Add Configuration API for parsing time durations. (cdouglas)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -534,6 +538,15 @@ Trunk (Unreleased)
 
     HADOOP-9413. Add common utils for File#setReadable/Writable/Executable &
     File#canRead/Write/Execute that work cross-platform. (Ivan Mitic via suresh)
+
+    HADOOP-9532. HADOOP_CLIENT_OPTS is appended twice by Windows cmd scripts.
+    (Chris Nauroth via suresh)
+
+    HADOOP-9043. Disallow in winutils creating symlinks with forwards slashes.
+    (Chris Nauroth and Arpit Agarwal via suresh)
+
+    HADOOP-9483. winutils support for readlink command.
+    (Arpit Agarwal via suresh)
     
 Release 2.0.5-beta - UNRELEASED
 
@@ -583,6 +596,13 @@ Release 2.0.5-beta - UNRELEASED
     HADOOP-9322. LdapGroupsMapping doesn't seem to set a timeout for
     its directory search. (harsh)
 
+    HADOOP-9523. Provide a generic IBM java vendor flag in PlatformName.java
+    to support non-Sun JREs. (Tian Hong Wang via suresh)
+
+    HADOOP-9511. Adding support for additional input streams (FSDataInputStream
+    and RandomAccessFile) in SecureIOUtils so as to help YARN-578. (Omkar Vinit
+    Joshi via vinodkv)
+
   OPTIMIZATIONS
 
     HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs
@@ -668,6 +688,9 @@ Release 2.0.5-beta - UNRELEASED
     HADOOP-9504. MetricsDynamicMBeanBase has concurrency issues in
     createMBeanInfo (Liang Xie via jlowe)
 
+    HADOOP-9455. HADOOP_CLIENT_OPTS appended twice causes JVM failures.
+    (Chris Nauroth via suresh)
+
 Release 2.0.4-alpha - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1477868-1479732

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd Tue May  7 00:17:50 2013
@@ -43,7 +43,7 @@ if exist %HADOOP_HOME%\contrib\capacity-
 @rem set HADOOP_NAMENODE_INIT_HEAPSIZE=""
 
 @rem Extra Java runtime options.  Empty by default.
-@rem set HADOOP_OPTS=-Djava.net.preferIPv4Stack=true %HADOOP_CLIENT_OPTS%
+@rem set HADOOP_OPTS=-Djava.net.preferIPv4Stack=true
 
 @rem Command specific options appended to HADOOP_OPTS when specified
 if not defined HADOOP_SECURITY_LOGGER (

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh Tue May  7 00:17:50 2013
@@ -45,7 +45,7 @@ done
 #export HADOOP_NAMENODE_INIT_HEAPSIZE=""
 
 # Extra Java runtime options.  Empty by default.
-export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
+export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true"
 
 MAC_OSX=false
 case "`uname`" in

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1477868-1479732

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1477868-1479732

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Tue May  7 00:17:50 2013
@@ -54,6 +54,7 @@ import java.util.concurrent.CopyOnWriteA
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
+import java.util.concurrent.TimeUnit;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
@@ -1145,6 +1146,93 @@ public class Configuration implements It
       : Enum.valueOf(defaultValue.getDeclaringClass(), val);
   }
 
+  enum ParsedTimeDuration {
+    NS {
+      TimeUnit unit() { return TimeUnit.NANOSECONDS; }
+      String suffix() { return "ns"; }
+    },
+    US {
+      TimeUnit unit() { return TimeUnit.MICROSECONDS; }
+      String suffix() { return "us"; }
+    },
+    MS {
+      TimeUnit unit() { return TimeUnit.MILLISECONDS; }
+      String suffix() { return "ms"; }
+    },
+    S {
+      TimeUnit unit() { return TimeUnit.SECONDS; }
+      String suffix() { return "s"; }
+    },
+    M {
+      TimeUnit unit() { return TimeUnit.MINUTES; }
+      String suffix() { return "m"; }
+    },
+    H {
+      TimeUnit unit() { return TimeUnit.HOURS; }
+      String suffix() { return "h"; }
+    },
+    D {
+      TimeUnit unit() { return TimeUnit.DAYS; }
+      String suffix() { return "d"; }
+    };
+    abstract TimeUnit unit();
+    abstract String suffix();
+    static ParsedTimeDuration unitFor(String s) {
+      for (ParsedTimeDuration ptd : values()) {
+        // iteration order is in decl order, so SECONDS matched last
+        if (s.endsWith(ptd.suffix())) {
+          return ptd;
+        }
+      }
+      return null;
+    }
+    static ParsedTimeDuration unitFor(TimeUnit unit) {
+      for (ParsedTimeDuration ptd : values()) {
+        if (ptd.unit() == unit) {
+          return ptd;
+        }
+      }
+      return null;
+    }
+  }
+
+  /**
+   * Set the value of <code>name</code> to the given time duration. This
+   * is equivalent to <code>set(&lt;name&gt;, value + &lt;time suffix&gt;)</code>.
+   * @param name Property name
+   * @param value Time duration
+   * @param unit Unit of time
+   */
+  public void setTimeDuration(String name, long value, TimeUnit unit) {
+    set(name, value + ParsedTimeDuration.unitFor(unit).suffix());
+  }
+
+  /**
+   * Return time duration in the given time unit. Valid units are encoded in
+   * properties as suffixes: nanoseconds (ns), microseconds (us), milliseconds
+   * (ms), seconds (s), minutes (m), hours (h), and days (d).
+   * @param name Property name
+   * @param defaultValue Value returned if no mapping exists.
+   * @param unit Unit to convert the stored property, if it exists.
+   * @throws NumberFormatException If the property stripped of its unit is not
+   *         a number
+   */
+  public long getTimeDuration(String name, long defaultValue, TimeUnit unit) {
+    String vStr = get(name);
+    if (null == vStr) {
+      return defaultValue;
+    }
+    vStr = vStr.trim();
+    ParsedTimeDuration vUnit = ParsedTimeDuration.unitFor(vStr);
+    if (null == vUnit) {
+      LOG.warn("No unit for " + name + "(" + vStr + ") assuming " + unit);
+      vUnit = ParsedTimeDuration.unitFor(unit);
+    } else {
+      vStr = vStr.substring(0, vStr.lastIndexOf(vUnit.suffix()));
+    }
+    return unit.convert(Long.parseLong(vStr), vUnit.unit());
+  }
+
   /**
    * Get the value of the <code>name</code> property as a <code>Pattern</code>.
    * If no such property is specified, or if the specified value is not a valid

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java Tue May  7 00:17:50 2013
@@ -18,6 +18,7 @@
 package org.apache.hadoop.fs.local;
 
 import java.io.IOException;
+import java.io.File;
 import java.io.FileNotFoundException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -90,8 +91,8 @@ public class RawLocalFs extends Delegate
     // NB: Use createSymbolicLink in java.nio.file.Path once available
     try {
       Shell.execCommand(Shell.getSymlinkCommand(
-        getPathWithoutSchemeAndAuthority(target),
-        getPathWithoutSchemeAndAuthority(link)));
+        getPathWithoutSchemeAndAuthority(target).getPath(),
+        getPathWithoutSchemeAndAuthority(link).getPath()));
     } catch (IOException x) {
       throw new IOException("Unable to create symlink: "+x.getMessage());
     }
@@ -175,12 +176,12 @@ public class RawLocalFs extends Delegate
     throw new AssertionError();
   }
 
-  private static String getPathWithoutSchemeAndAuthority(Path path) {
-    // This code depends on Path.toString() to remove the leading slash before
-    // the drive specification on Windows.
+  private static File getPathWithoutSchemeAndAuthority(Path path) {
     Path newPath = path.isUriPathAbsolute() ?
       new Path(null, null, path.toUri().getPath()) :
       path;
-    return newPath.toString();
+
+    // Path.toString() removes leading slash before drive spec on Windows.
+    return new File(newPath.toString());
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java Tue May  7 00:17:50 2013
@@ -18,22 +18,23 @@
 package org.apache.hadoop.io;
 
 import java.io.File;
-import java.io.FileDescriptor;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.RandomAccessFile;
 import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.io.nativeio.Errno;
 import org.apache.hadoop.io.nativeio.NativeIO;
-import org.apache.hadoop.io.nativeio.NativeIOException;
 import org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat;
 import org.apache.hadoop.security.UserGroupInformation;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * This class provides secure APIs for opening and creating files on the local
  * disk. The main issue this class tries to handle is that of symlink traversal.
@@ -90,6 +91,95 @@ public class SecureIOUtils {
   private final static FileSystem rawFilesystem;
 
   /**
+   * Open the given File for random read access, verifying the expected user/
+   * group constraints if security is enabled.
+   * 
+   * Note that this function provides no additional security checks if hadoop
+   * security is disabled, since doing the checks would be too expensive when
+   * native libraries are not available.
+   * 
+   * @param f file that we are trying to open
+   * @param mode mode in which we want to open the random access file
+   * @param expectedOwner the expected user owner for the file
+   * @param expectedGroup the expected group owner for the file
+   * @throws IOException if an IO error occurred or if the user/group does
+   * not match when security is enabled.
+   */
+  public static RandomAccessFile openForRandomRead(File f,
+      String mode, String expectedOwner, String expectedGroup)
+      throws IOException {
+    if (!UserGroupInformation.isSecurityEnabled()) {
+      return new RandomAccessFile(f, mode);
+    }
+    return forceSecureOpenForRandomRead(f, mode, expectedOwner, expectedGroup);
+  }
+
+  /**
+   * Same as openForRandomRead except that it will run even if security is off.
+   * This is used by unit tests.
+   */
+  @VisibleForTesting
+  protected static RandomAccessFile forceSecureOpenForRandomRead(File f,
+      String mode, String expectedOwner, String expectedGroup)
+      throws IOException {
+    RandomAccessFile raf = new RandomAccessFile(f, mode);
+    boolean success = false;
+    try {
+      Stat stat = NativeIO.POSIX.getFstat(raf.getFD());
+      checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
+          expectedGroup);
+      success = true;
+      return raf;
+    } finally {
+      if (!success) {
+        raf.close();
+      }
+    }
+  }
+
+  /**
+   * Opens the {@link FSDataInputStream} on the requested file on local file
+   * system, verifying the expected user/group constraints if security is
+   * enabled.
+   * @param file absolute path of the file
+   * @param expectedOwner the expected user owner for the file
+   * @param expectedGroup the expected group owner for the file
+   * @throws IOException if an IO Error occurred or the user/group does not
+   * match if security is enabled
+   */
+  public static FSDataInputStream openFSDataInputStream(File file,
+      String expectedOwner, String expectedGroup) throws IOException {
+    if (!UserGroupInformation.isSecurityEnabled()) {
+      return rawFilesystem.open(new Path(file.getAbsolutePath()));
+    }
+    return forceSecureOpenFSDataInputStream(file, expectedOwner, expectedGroup);
+  }
+
+  /**
+   * Same as openFSDataInputStream except that it will run even if security is
+   * off. This is used by unit tests.
+   */
+  @VisibleForTesting
+  protected static FSDataInputStream forceSecureOpenFSDataInputStream(
+      File file,
+      String expectedOwner, String expectedGroup) throws IOException {
+    final FSDataInputStream in =
+        rawFilesystem.open(new Path(file.getAbsolutePath()));
+    boolean success = false;
+    try {
+      Stat stat = NativeIO.POSIX.getFstat(in.getFileDescriptor());
+      checkStat(file, stat.getOwner(), stat.getGroup(), expectedOwner,
+          expectedGroup);
+      success = true;
+      return in;
+    } finally {
+      if (!success) {
+        in.close();
+      }
+    }
+  }
+
+  /**
    * Open the given File for read access, verifying the expected user/group
    * constraints if security is enabled.
    *
@@ -115,7 +205,8 @@ public class SecureIOUtils {
    * Same as openForRead() except that it will run even if security is off.
    * This is used by unit tests.
    */
-  static FileInputStream forceSecureOpenForRead(File f, String expectedOwner,
+  @VisibleForTesting
+  protected static FileInputStream forceSecureOpenForRead(File f, String expectedOwner,
       String expectedGroup) throws IOException {
 
     FileInputStream fis = new FileInputStream(f);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java Tue May  7 00:17:50 2013
@@ -25,6 +25,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.io.compress.zlib.*;
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 /**
  * This class creates gzip compressors/decompressors. 
@@ -41,10 +42,9 @@ public class GzipCodec extends DefaultCo
 
     private static class ResetableGZIPOutputStream extends GZIPOutputStream {
       private static final int TRAILER_SIZE = 8;
-      public static final String JVMVendor= System.getProperty("java.vendor");
       public static final String JVMVersion= System.getProperty("java.version");
       private static final boolean HAS_BROKEN_FINISH =
-          (JVMVendor.contains("IBM") && JVMVersion.contains("1.6.0"));
+          (IBM_JAVA && JVMVersion.contains("1.6.0"));
 
       public ResetableGZIPOutputStream(OutputStream out) throws IOException {
         super(out);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Tue May  7 00:17:50 2013
@@ -65,6 +65,7 @@ import org.apache.hadoop.io.retry.RetryP
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 import org.apache.hadoop.net.ConnectTimeoutException;
 import org.apache.hadoop.net.NetUtils;
@@ -987,8 +988,15 @@ public class Client {
                   "ServerDidNotSetExceptionClassName";
           final String errorMsg = header.hasErrorMsg() ? 
                 header.getErrorMsg() : "ServerDidNotSetErrorMsg" ;
+          final RpcErrorCodeProto erCode = 
+                    (header.hasErrorDetail() ? header.getErrorDetail() : null);
+          if (erCode == null) {
+             LOG.warn("Detailed error code not set by server on rpc error");
+          }
           RemoteException re = 
-              new RemoteException(exceptionClassName, errorMsg);
+              ( (erCode == null) ? 
+                  new RemoteException(exceptionClassName, errorMsg) :
+              new RemoteException(exceptionClassName, errorMsg, erCode));
           if (status == RpcStatusProto.ERROR) {
             call.setException(re);
             calls.remove(callId);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java Tue May  7 00:17:50 2013
@@ -437,8 +437,8 @@ public class ProtobufRpcEngine implement
      */
     static class ProtoBufRpcInvoker implements RpcInvoker {
       private static ProtoClassProtoImpl getProtocolImpl(RPC.Server server,
-          String protoName, long version) throws IOException {
-        ProtoNameVer pv = new ProtoNameVer(protoName, version);
+          String protoName, long clientVersion) throws RpcServerException {
+        ProtoNameVer pv = new ProtoNameVer(protoName, clientVersion);
         ProtoClassProtoImpl impl = 
             server.getProtocolImplMap(RPC.RpcKind.RPC_PROTOCOL_BUFFER).get(pv);
         if (impl == null) { // no match for Protocol AND Version
@@ -446,10 +446,11 @@ public class ProtobufRpcEngine implement
               server.getHighestSupportedProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, 
                   protoName);
           if (highest == null) {
-            throw new IOException("Unknown protocol: " + protoName);
+            throw new RpcNoSuchProtocolException(
+                "Unknown protocol: " + protoName);
           }
           // protocol supported but not the version that client wants
-          throw new RPC.VersionMismatch(protoName, version,
+          throw new RPC.VersionMismatch(protoName, clientVersion,
               highest.version);
         }
         return impl;
@@ -513,7 +514,7 @@ public class ProtobufRpcEngine implement
           String msg = "Unknown method " + methodName + " called on " 
                                 + connectionProtocolName + " protocol.";
           LOG.warn(msg);
-          throw new RpcServerException(msg);
+          throw new RpcNoSuchMethodException(msg);
         }
         Message prototype = service.getRequestPrototype(methodDescriptor);
         Message param = prototype.newBuilderForType()

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Tue May  7 00:17:50 2013
@@ -43,6 +43,8 @@ import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolInfoService;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -213,7 +215,7 @@ public class RPC {
   /**
    * A version mismatch for the RPC protocol.
    */
-  public static class VersionMismatch extends IOException {
+  public static class VersionMismatch extends RpcServerException {
     private static final long serialVersionUID = 0;
 
     private String interfaceName;
@@ -257,6 +259,19 @@ public class RPC {
     public long getServerVersion() {
       return serverVersion;
     }
+    /**
+     * get the rpc status corresponding to this exception
+     */
+    public RpcStatusProto getRpcStatusProto() {
+      return RpcStatusProto.ERROR;
+    }
+
+    /**
+     * get the detailed rpc status corresponding to this exception
+     */
+    public RpcErrorCodeProto getRpcErrorCodeProto() {
+      return RpcErrorCodeProto.ERROR_RPC_VERSION_MISMATCH;
+    }
   }
 
   /**

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java Tue May  7 00:17:50 2013
@@ -21,22 +21,38 @@ package org.apache.hadoop.ipc;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import org.xml.sax.Attributes;
 
 public class RemoteException extends IOException {
   /** For java.io.Serializable */
   private static final long serialVersionUID = 1L;
+  private final int errorCode;
 
   private String className;
   
   public RemoteException(String className, String msg) {
     super(msg);
     this.className = className;
+    errorCode = -1;
+  }
+  
+  public RemoteException(String className, String msg, RpcErrorCodeProto erCode) {
+    super(msg);
+    this.className = className;
+    if (erCode != null)
+      errorCode = erCode.getNumber();
+    else 
+      errorCode = -1;
   }
   
   public String getClassName() {
     return className;
   }
+  
+  public RpcErrorCodeProto getErrorCode() {
+    return RpcErrorCodeProto.valueOf(errorCode);
+  }
 
   /**
    * If this remote exception wraps up one of the lookupTypes

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java Tue May  7 00:17:50 2013
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.ipc;
 
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
+
 /**
  * Indicates an exception on the RPC server 
  */
@@ -42,4 +45,18 @@ public class RpcServerException extends 
   public RpcServerException(final String message, final Throwable cause) {
     super(message, cause);
   }
+  
+  /**
+   * get the rpc status corresponding to this exception
+   */
+  public RpcStatusProto getRpcStatusProto() {
+    return RpcStatusProto.ERROR;
+  }
+
+  /**
+   * get the detailed rpc status corresponding to this exception
+   */
+  public RpcErrorCodeProto getRpcErrorCodeProto() {
+    return RpcErrorCodeProto.ERROR_RPC_SERVER;
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Tue May  7 00:17:50 2013
@@ -83,6 +83,7 @@ import org.apache.hadoop.ipc.metrics.Rpc
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
@@ -1438,7 +1439,8 @@ public abstract class Server {
         final String ioeMessage  = ioe.getLocalizedMessage();
         if (authMethod == AuthMethod.SIMPLE) {
           setupResponse(authFailedResponse, authFailedCall,
-              RpcStatusProto.FATAL, null, ioeClass, ioeMessage);
+              RpcStatusProto.FATAL, RpcErrorCodeProto.FATAL_UNAUTHORIZED, 
+              null, ioeClass, ioeMessage);
           responder.doRespond(authFailedCall);
         } else {
           doSaslReply(SaslStatus.ERROR, null, ioeClass, ioeMessage);
@@ -1528,7 +1530,8 @@ public abstract class Server {
       if (clientVersion >= 9) {
         // Versions >>9  understand the normal response
         Call fakeCall =  new Call(-1, null, this);
-        setupResponse(buffer, fakeCall, RpcStatusProto.FATAL,
+        setupResponse(buffer, fakeCall, 
+            RpcStatusProto.FATAL, RpcErrorCodeProto.FATAL_VERSION_MISMATCH,
             null, VersionMismatch.class.getName(), errMsg);
         responder.doRespond(fakeCall);
       } else if (clientVersion >= 3) {
@@ -1557,8 +1560,9 @@ public abstract class Server {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
 
       Call fakeCall = new Call(-1, null, this);
-      setupResponse(buffer, fakeCall, RpcStatusProto.FATAL, null,
-          IpcException.class.getName(), errMsg);
+      setupResponse(buffer, fakeCall, 
+       RpcStatusProto.FATAL,  RpcErrorCodeProto.FATAL_UNSUPPORTED_SERIALIZATION,
+       null, IpcException.class.getName(), errMsg);
       responder.doRespond(fakeCall);
     }
     
@@ -1647,7 +1651,7 @@ public abstract class Server {
     private void processOneRpc(byte[] buf) throws IOException,
         InterruptedException {
       if (connectionContextRead) {
-        processData(buf);
+        processRpcRequest(buf);
       } else {
         processConnectionContext(buf);
         connectionContextRead = true;
@@ -1659,7 +1663,17 @@ public abstract class Server {
       }
     }
     
-    private void processData(byte[] buf) throws  IOException, InterruptedException {
+    /**
+     * Process an RPC Request - the connection headers and context have been
+     * read
+     * @param buf - contains the RPC request header and the rpc request
+     * @throws RpcServerException due to fatal rpc layer issues such as
+     *   invalid header. In this case a RPC fatal status response is sent back
+     *   to client.
+     */
+    
+    private void processRpcRequest(byte[] buf) 
+        throws  RpcServerException, IOException, InterruptedException {
       DataInputStream dis =
         new DataInputStream(new ByteArrayInputStream(buf));
       RpcRequestHeaderProto header = RpcRequestHeaderProto.parseDelimitedFrom(dis);
@@ -1667,51 +1681,58 @@ public abstract class Server {
       if (LOG.isDebugEnabled())
         LOG.debug(" got #" + header.getCallId());
       if (!header.hasRpcOp()) {
-        throw new IOException(" IPC Server: No rpc op in rpcRequestHeader");
+        String err = " IPC Server: No rpc op in rpcRequestHeader";
+        respondBadRpcHeader(new Call(header.getCallId(), null, this),
+            RpcServerException.class.getName(), err);
+        throw new RpcServerException(err);
       }
       if (header.getRpcOp() != 
           RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET) {
-        throw new IOException("IPC Server does not implement operation" + 
-              header.getRpcOp());
+        String err = "IPC Server does not implement rpc header operation" + 
+                header.getRpcOp();
+        respondBadRpcHeader(new Call(header.getCallId(), null, this),
+            RpcServerException.class.getName(), err);
+        throw new RpcServerException(err);
       }
       // If we know the rpc kind, get its class so that we can deserialize
       // (Note it would make more sense to have the handler deserialize but 
       // we continue with this original design.
       if (!header.hasRpcKind()) {
-        throw new IOException(" IPC Server: No rpc kind in rpcRequestHeader");
+        String err = " IPC Server: No rpc kind in rpcRequestHeader";
+        respondBadRpcHeader(new Call(header.getCallId(), null, this),
+            RpcServerException.class.getName(), err);
+        throw new RpcServerException(err);
       }
       Class<? extends Writable> rpcRequestClass = 
           getRpcRequestWrapper(header.getRpcKind());
       if (rpcRequestClass == null) {
         LOG.warn("Unknown rpc kind "  + header.getRpcKind() + 
             " from client " + getHostAddress());
-        final Call readParamsFailedCall = 
-            new Call(header.getCallId(), null, this);
-        ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream();
-
-        setupResponse(responseBuffer, readParamsFailedCall, RpcStatusProto.FATAL, null,
-            IOException.class.getName(),
-            "Unknown rpc kind "  + header.getRpcKind());
-        responder.doRespond(readParamsFailedCall);
-        return;   
+        final String err = "Unknown rpc kind in rpc header"  + 
+            header.getRpcKind();
+        respondBadRpcHeader(new Call(header.getCallId(), null, this),
+            RpcServerException.class.getName(), err);
+        throw new RpcServerException(err);   
       }
       Writable rpcRequest;
       try { //Read the rpc request
         rpcRequest = ReflectionUtils.newInstance(rpcRequestClass, conf);
         rpcRequest.readFields(dis);
-      } catch (Throwable t) {
+      } catch (Throwable t) { // includes runtime exception from newInstance
         LOG.warn("Unable to read call parameters for client " +
                  getHostAddress() + "on connection protocol " +
             this.protocolName + " for rpcKind " + header.getRpcKind(),  t);
         final Call readParamsFailedCall = 
             new Call(header.getCallId(), null, this);
         ByteArrayOutputStream responseBuffer = new ByteArrayOutputStream();
+        String err = "IPC server unable to read call parameters: "+ t.getMessage();
 
-        setupResponse(responseBuffer, readParamsFailedCall, RpcStatusProto.FATAL, null,
-            t.getClass().getName(),
-            "IPC server unable to read call parameters: " + t.getMessage());
+        setupResponse(responseBuffer, readParamsFailedCall, 
+            RpcStatusProto.FATAL, RpcErrorCodeProto.FATAL_DESERIALIZING_REQUEST,
+            null, t.getClass().getName(),
+            err);
         responder.doRespond(readParamsFailedCall);
-        return;
+        throw new RpcServerException(err, t);
       }
         
       Call call = new Call(header.getCallId(), rpcRequest, this, 
@@ -1737,7 +1758,8 @@ public abstract class Server {
         rpcMetrics.incrAuthorizationSuccesses();
       } catch (AuthorizationException ae) {
         rpcMetrics.incrAuthorizationFailures();
-        setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL, null,
+        setupResponse(authFailedResponse, authFailedCall, 
+            RpcStatusProto.FATAL,  RpcErrorCodeProto.FATAL_UNAUTHORIZED, null,
             ae.getClass().getName(), ae.getMessage());
         responder.doRespond(authFailedCall);
         return false;
@@ -1799,6 +1821,8 @@ public abstract class Server {
           }
           String errorClass = null;
           String error = null;
+          RpcStatusProto returnStatus = RpcStatusProto.SUCCESS;
+          RpcErrorCodeProto detailedErr = null;
           Writable value = null;
 
           CurCall.set(call);
@@ -1839,7 +1863,14 @@ public abstract class Server {
             } else {
               LOG.info(logMsg, e);
             }
-
+            if (e instanceof RpcServerException) {
+              RpcServerException rse = ((RpcServerException)e); 
+              returnStatus = rse.getRpcStatusProto();
+              detailedErr = rse.getRpcErrorCodeProto();
+            } else {
+              returnStatus = RpcStatusProto.ERROR;
+              detailedErr = RpcErrorCodeProto.ERROR_APPLICATION;
+            }
             errorClass = e.getClass().getName();
             error = StringUtils.stringifyException(e);
             // Remove redundant error class name from the beginning of the stack trace
@@ -1854,8 +1885,8 @@ public abstract class Server {
             // responder.doResponse() since setupResponse may use
             // SASL to encrypt response data and SASL enforces
             // its own message ordering.
-            setupResponse(buf, call, (error == null) ? RpcStatusProto.SUCCESS
-                : RpcStatusProto.ERROR, value, errorClass, error);
+            setupResponse(buf, call, returnStatus, detailedErr, 
+                value, errorClass, error);
             
             // Discard the large buf and reset it back to smaller size 
             // to free up heap
@@ -2026,7 +2057,7 @@ public abstract class Server {
    * @throws IOException
    */
   private void setupResponse(ByteArrayOutputStream responseBuf,
-                             Call call, RpcStatusProto status, 
+                             Call call, RpcStatusProto status, RpcErrorCodeProto erCode,
                              Writable rv, String errorClass, String error) 
   throws IOException {
     responseBuf.reset();
@@ -2065,6 +2096,7 @@ public abstract class Server {
         // buffer is reset at the top, and since status is changed
         // to ERROR it won't infinite loop.
         setupResponse(responseBuf, call, RpcStatusProto.ERROR,
+            RpcErrorCodeProto.ERROR_SERIALIZING_RESPONSE,
             null, t.getClass().getName(),
             StringUtils.stringifyException(t));
         return;
@@ -2072,6 +2104,7 @@ public abstract class Server {
     } else { // Rpc Failure
       headerBuilder.setExceptionClassName(errorClass);
       headerBuilder.setErrorMsg(error);
+      headerBuilder.setErrorDetail(erCode);
       RpcResponseHeaderProto header = headerBuilder.build();
       int headerLen = header.getSerializedSize();
       final int fullLength  = 
@@ -2116,6 +2149,19 @@ public abstract class Server {
     call.setResponse(ByteBuffer.wrap(response.toByteArray()));
   }
   
+  
+  private void respondBadRpcHeader(Call call, String errorClass, String error)
+      throws IOException
+  {
+    ByteArrayOutputStream responseBuf = new ByteArrayOutputStream();
+    setupResponse(responseBuf, call, 
+        RpcStatusProto.FATAL,  RpcErrorCodeProto.FATAL_INVALID_RPC_HEADER,
+        null, errorClass, error);
+    responder.doRespond(call);
+    return; 
+    
+  }
+  
   private void wrapWithSasl(ByteArrayOutputStream response, Call call)
       throws IOException {
     if (call.connection.saslServer != null) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Tue May  7 00:17:50 2013
@@ -416,62 +416,62 @@ public class WritableRpcEngine implement
      @Override
       public Writable call(org.apache.hadoop.ipc.RPC.Server server,
           String protocolName, Writable rpcRequest, long receivedTime)
-          throws IOException {
-        try {
-          Invocation call = (Invocation)rpcRequest;
-          if (server.verbose) log("Call: " + call);
-
-          // Verify rpc version
-          if (call.getRpcVersion() != writableRpcVersion) {
-            // Client is using a different version of WritableRpc
-            throw new IOException(
-                "WritableRpc version mismatch, client side version="
-                    + call.getRpcVersion() + ", server side version="
-                    + writableRpcVersion);
-          }
+          throws IOException, RPC.VersionMismatch {
 
-          long clientVersion = call.getProtocolVersion();
-          final String protoName;
-          ProtoClassProtoImpl protocolImpl;
-          if (call.declaringClassProtocolName.equals(VersionedProtocol.class.getName())) {
-            // VersionProtocol methods are often used by client to figure out
-            // which version of protocol to use.
-            //
-            // Versioned protocol methods should go the protocolName protocol
-            // rather than the declaring class of the method since the
-            // the declaring class is VersionedProtocol which is not 
-            // registered directly.
-            // Send the call to the highest  protocol version
-            VerProtocolImpl highest = server.getHighestSupportedProtocol(
-                RPC.RpcKind.RPC_WRITABLE, protocolName);
-            if (highest == null) {
-              throw new IOException("Unknown protocol: " + protocolName);
-            }
-            protocolImpl = highest.protocolTarget;
-          } else {
-            protoName = call.declaringClassProtocolName;
+        Invocation call = (Invocation)rpcRequest;
+        if (server.verbose) log("Call: " + call);
+
+        // Verify writable rpc version
+        if (call.getRpcVersion() != writableRpcVersion) {
+          // Client is using a different version of WritableRpc
+          throw new RpcServerException(
+              "WritableRpc version mismatch, client side version="
+                  + call.getRpcVersion() + ", server side version="
+                  + writableRpcVersion);
+        }
 
-            // Find the right impl for the protocol based on client version.
-            ProtoNameVer pv = 
-                new ProtoNameVer(call.declaringClassProtocolName, clientVersion);
-            protocolImpl = 
-                server.getProtocolImplMap(RPC.RpcKind.RPC_WRITABLE).get(pv);
-            if (protocolImpl == null) { // no match for Protocol AND Version
-               VerProtocolImpl highest = 
-                   server.getHighestSupportedProtocol(RPC.RpcKind.RPC_WRITABLE, 
-                       protoName);
-              if (highest == null) {
-                throw new IOException("Unknown protocol: " + protoName);
-              } else { // protocol supported but not the version that client wants
-                throw new RPC.VersionMismatch(protoName, clientVersion,
-                  highest.version);
-              }
+        long clientVersion = call.getProtocolVersion();
+        final String protoName;
+        ProtoClassProtoImpl protocolImpl;
+        if (call.declaringClassProtocolName.equals(VersionedProtocol.class.getName())) {
+          // VersionProtocol methods are often used by client to figure out
+          // which version of protocol to use.
+          //
+          // Versioned protocol methods should go the protocolName protocol
+          // rather than the declaring class of the method since the
+          // the declaring class is VersionedProtocol which is not 
+          // registered directly.
+          // Send the call to the highest  protocol version
+          VerProtocolImpl highest = server.getHighestSupportedProtocol(
+              RPC.RpcKind.RPC_WRITABLE, protocolName);
+          if (highest == null) {
+            throw new RpcServerException("Unknown protocol: " + protocolName);
+          }
+          protocolImpl = highest.protocolTarget;
+        } else {
+          protoName = call.declaringClassProtocolName;
+
+          // Find the right impl for the protocol based on client version.
+          ProtoNameVer pv = 
+              new ProtoNameVer(call.declaringClassProtocolName, clientVersion);
+          protocolImpl = 
+              server.getProtocolImplMap(RPC.RpcKind.RPC_WRITABLE).get(pv);
+          if (protocolImpl == null) { // no match for Protocol AND Version
+             VerProtocolImpl highest = 
+                 server.getHighestSupportedProtocol(RPC.RpcKind.RPC_WRITABLE, 
+                     protoName);
+            if (highest == null) {
+              throw new RpcServerException("Unknown protocol: " + protoName);
+            } else { // protocol supported but not the version that client wants
+              throw new RPC.VersionMismatch(protoName, clientVersion,
+                highest.version);
             }
           }
+        }
           
 
           // Invoke the protocol method
-
+       try {
           long startTime = Time.now();
           Method method = 
               protocolImpl.protocolClass.getMethod(call.getMethodName(),

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Tue May  7 00:17:50 2013
@@ -64,6 +64,7 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -306,12 +307,11 @@ public class UserGroupInformation {
       System.getProperty("os.name").startsWith("Windows");
   private static final boolean is64Bit =
       System.getProperty("os.arch").contains("64");
-  private static final boolean ibmJava = System.getProperty("java.vendor").contains("IBM");
   private static final boolean aix = System.getProperty("os.name").equals("AIX");
 
   /* Return the OS login module class name */
   private static String getOSLoginModuleName() {
-    if (ibmJava) {
+    if (IBM_JAVA) {
       if (windows) {
         return is64Bit ? "com.ibm.security.auth.module.Win64LoginModule"
             : "com.ibm.security.auth.module.NTLoginModule";
@@ -333,7 +333,7 @@ public class UserGroupInformation {
     ClassLoader cl = ClassLoader.getSystemClassLoader();
     try {
       String principalClass = null;
-      if (ibmJava) {
+      if (IBM_JAVA) {
         if (is64Bit) {
           principalClass = "com.ibm.security.auth.UsernamePrincipal";
         } else {
@@ -430,7 +430,7 @@ public class UserGroupInformation {
     private static final Map<String,String> USER_KERBEROS_OPTIONS = 
       new HashMap<String,String>();
     static {
-      if (ibmJava) {
+      if (IBM_JAVA) {
         USER_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
       } else {
         USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
@@ -439,7 +439,7 @@ public class UserGroupInformation {
       }
       String ticketCache = System.getenv("KRB5CCNAME");
       if (ticketCache != null) {
-        if (ibmJava) {
+        if (IBM_JAVA) {
           // The first value searched when "useDefaultCcache" is used.
           System.setProperty("KRB5CCNAME", ticketCache);
         } else {
@@ -455,7 +455,7 @@ public class UserGroupInformation {
     private static final Map<String,String> KEYTAB_KERBEROS_OPTIONS = 
       new HashMap<String,String>();
     static {
-      if (ibmJava) {
+      if (IBM_JAVA) {
         KEYTAB_KERBEROS_OPTIONS.put("credsType", "both");
       } else {
         KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
@@ -487,7 +487,7 @@ public class UserGroupInformation {
       } else if (USER_KERBEROS_CONFIG_NAME.equals(appName)) {
         return USER_KERBEROS_CONF;
       } else if (KEYTAB_KERBEROS_CONFIG_NAME.equals(appName)) {
-        if (ibmJava) {
+        if (IBM_JAVA) {
           KEYTAB_KERBEROS_OPTIONS.put("useKeytab",
               prependFileAuthority(keytabFile));
         } else {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java Tue May  7 00:17:50 2013
@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.util.ReflectionUtils;
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import javax.net.ssl.HostnameVerifier;
 import javax.net.ssl.HttpsURLConnection;
@@ -58,9 +59,7 @@ public class SSLFactory implements Conne
     "hadoop.ssl.client.conf";
   public static final String SSL_SERVER_CONF_KEY =
     "hadoop.ssl.server.conf";
-  private static final boolean IBMJAVA = 
-      System.getProperty("java.vendor").contains("IBM");
-  public static final String SSLCERTIFICATE = IBMJAVA?"ibmX509":"SunX509"; 
+  public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509"; 
 
   public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false;
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java Tue May  7 00:17:50 2013
@@ -32,20 +32,24 @@ public class PlatformName {
    * The complete platform 'name' to identify the platform as 
    * per the java-vm.
    */
-  private static final String platformName =
+  public static final String PLATFORM_NAME =
       (Shell.WINDOWS ? System.getenv("os") : System.getProperty("os.name"))
       + "-" + System.getProperty("os.arch")
       + "-" + System.getProperty("sun.arch.data.model");
   
   /**
-   * Get the complete platform as per the java-vm.
-   * @return returns the complete platform as per the java-vm.
+   * The java vendor name used in this platform. 
    */
-  public static String getPlatformName() {
-    return platformName;
-  }
+  public static final String JAVA_VENDOR_NAME = System.getProperty("java.vendor");
+
+  /**
+   * A public static variable to indicate the current java vendor is 
+   * IBM java or not. 
+   */
+  public static final boolean IBM_JAVA = JAVA_VENDOR_NAME.contains("IBM");
   
   public static void main(String[] args) {
-    System.out.println(platformName);
+    System.out.println("platform name: " + PLATFORM_NAME);
+    System.out.println("java vendor name: " + JAVA_VENDOR_NAME);
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto Tue May  7 00:17:50 2013
@@ -1,4 +1,4 @@
-/**DER
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto Tue May  7 00:17:50 2013
@@ -62,27 +62,55 @@ message RpcRequestHeaderProto { // the h
 
 /**
  * Rpc Response Header
- * ** If request is successfull response is returned as below ********
  * +------------------------------------------------------------------+
- * | Rpc reponse length in bytes (4 bytes int)                        |
+ * | Rpc total response length in bytes (4 bytes int)                 |
  * |  (sum of next two parts)                                         |
  * +------------------------------------------------------------------+
  * | RpcResponseHeaderProto - serialized delimited ie has len         |
  * +------------------------------------------------------------------+
  * | if request is successful:                                        |
  * |   - RpcResponse -  The actual rpc response  bytes follow         |
- *       the response header                                          |
+ * |     the response header                                          |
  * |     This response is serialized based on RpcKindProto            |
  * | if request fails :                                               |
  * |   The rpc response header contains the necessary info            |
  * +------------------------------------------------------------------+
  *
+ * Note that rpc response header is also used when connection setup fails. 
+ * Ie the response looks like a rpc response with a fake callId.
  */
 message RpcResponseHeaderProto {
+  /**
+    * 
+    * RpcStastus - success or failure
+    * The reponseHeader's errDetail,  exceptionClassName and errMsg contains
+    * further details on the error
+    **/
+
   enum RpcStatusProto {
    SUCCESS = 0;  // RPC succeeded
-   ERROR = 1;    // RPC Failed
-   FATAL = 2;    // Fatal error - connection is closed
+   ERROR = 1;    // RPC or error - connection left open for future calls
+   FATAL = 2;    // Fatal error - connection closed
+  }
+
+  enum RpcErrorCodeProto {
+
+   // Non-fatal Rpc error - connection left open for future rpc calls
+   ERROR_APPLICATION = 1;      // RPC Failed - rpc app threw exception
+   ERROR_NO_SUCH_METHOD = 2;   // Rpc error - no such method
+   ERROR_NO_SUCH_PROTOCOL = 3; // Rpc error - no such protocol
+   ERROR_RPC_SERVER  = 4;      // Rpc error on server side
+   ERROR_SERIALIZING_RESPONSE = 5; // error serializign response
+   ERROR_RPC_VERSION_MISMATCH = 6; // Rpc protocol version mismatch
+
+
+   // Fatal Server side Rpc error - connection closed
+   FATAL_UNKNOWN = 10;                   // unknown Fatal error
+   FATAL_UNSUPPORTED_SERIALIZATION = 11; // IPC layer serilization type invalid
+   FATAL_INVALID_RPC_HEADER = 12;        // fields of RpcHeader are invalid
+   FATAL_DESERIALIZING_REQUEST = 13;     // could not deserilize rpc request
+   FATAL_VERSION_MISMATCH = 14;          // Ipc Layer version mismatch
+   FATAL_UNAUTHORIZED = 15;              // Auth failed
   }
 
   required uint32 callId = 1; // callId used in Request
@@ -90,4 +118,5 @@ message RpcResponseHeaderProto {
   optional uint32 serverIpcVersionNum = 3; // Sent if success or fail
   optional string exceptionClassName = 4;  // if request fails
   optional string errorMsg = 5;  // if request fails, often contains strack trace
+  optional RpcErrorCodeProto errorDetail = 6; // in case of error
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h Tue May  7 00:17:50 2013
@@ -104,6 +104,9 @@ void TaskUsage();
 int Symlink(__in int argc, __in_ecount(argc) wchar_t *argv[]);
 void SymlinkUsage();
 
+int Readlink(__in int argc, __in_ecount(argc) wchar_t *argv[]);
+void ReadlinkUsage();
+
 int SystemInfo();
 void SystemInfoUsage();
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/main.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/main.c?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/main.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/main.c Tue May  7 00:17:50 2013
@@ -55,6 +55,10 @@ int wmain(__in int argc, __in_ecount(arg
   {
     return Symlink(argc - 1, argv + 1);
   }
+  else if (wcscmp(L"readlink", cmd) == 0)
+  {
+    return Readlink(argc - 1, argv + 1);
+  }
   else if (wcscmp(L"task", cmd) == 0)
   {
     return Task(argc - 1, argv + 1);
@@ -105,6 +109,10 @@ The available commands and their usages 
   SymlinkUsage();
   fwprintf(stdout, L"\n\n");
 
+  fwprintf(stdout, L"%-10s%s\n\n", L"readlink", L"Print the target of a symbolic link.");
+  ReadlinkUsage();
+  fwprintf(stdout, L"\n\n");
+
   fwprintf(stdout, L"%-15s%s\n\n", L"systeminfo", L"System information.");
   SystemInfoUsage();
   fwprintf(stdout, L"\n\n");

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/symlink.c Tue May  7 00:17:50 2013
@@ -60,6 +60,17 @@ int Symlink(__in int argc, __in_ecount(a
     goto SymlinkEnd;
   }
 
+  if (wcschr(longLinkName, L'/') != NULL || wcschr(longFileName, L'/') != NULL)
+  {
+    // Reject forward-slash separated paths as they result in unusable symlinks.
+    //
+    fwprintf(stderr,
+      L"Rejecting forward-slash separated path which would result in an "
+      L"unusable symlink: link = %s, target = %s\n", longLinkName, longFileName);
+    ret = FAILURE;
+    goto SymlinkEnd;
+  }
+
   // Check if the the process's access token has the privilege to create
   // symbolic links. Without this step, the call to CreateSymbolicLink() from
   // users have the privilege to create symbolic links will still succeed.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj Tue May  7 00:17:50 2013
@@ -160,6 +160,7 @@
     </Link>
   </ItemDefinitionGroup>
   <ItemGroup>
+    <ClCompile Include="readlink.c" />
     <ClCompile Include="symlink.c" />
     <ClCompile Include="systeminfo.c" />
     <ClCompile Include="chmod.c" />
@@ -178,4 +179,4 @@
   <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
   <ImportGroup Label="ExtensionTargets">
   </ImportGroup>
-</Project>
+</Project>
\ No newline at end of file

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1477868-1479732

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Tue May  7 00:17:50 2013
@@ -36,6 +36,7 @@ import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 import java.util.regex.Pattern;
+import static java.util.concurrent.TimeUnit.*;
 
 import junit.framework.TestCase;
 import static org.junit.Assert.assertArrayEquals;
@@ -43,6 +44,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.hadoop.conf.Configuration.IntegerRanges;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.net.NetUtils;
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 import org.codehaus.jackson.map.ObjectMapper; 
 
 public class TestConfiguration extends TestCase {
@@ -51,9 +53,8 @@ public class TestConfiguration extends T
   final static String CONFIG = new File("./test-config.xml").getAbsolutePath();
   final static String CONFIG2 = new File("./test-config2.xml").getAbsolutePath();
   final static Random RAN = new Random();
-  final static boolean IBMJAVA = System.getProperty("java.vendor").contains("IBM"); 
   final static String XMLHEADER = 
-            IBMJAVA?"<?xml version=\"1.0\" encoding=\"UTF-8\"?><configuration>":
+            IBM_JAVA?"<?xml version=\"1.0\" encoding=\"UTF-8\"?><configuration>":
   "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>";
 
   @Override
@@ -693,6 +694,37 @@ public class TestConfiguration extends T
     assertTrue(fail);
   }
 
+  public void testTimeDuration() {
+    Configuration conf = new Configuration(false);
+    conf.setTimeDuration("test.time.a", 7L, SECONDS);
+    assertEquals("7s", conf.get("test.time.a"));
+    assertEquals(0L, conf.getTimeDuration("test.time.a", 30, MINUTES));
+    assertEquals(7L, conf.getTimeDuration("test.time.a", 30, SECONDS));
+    assertEquals(7000L, conf.getTimeDuration("test.time.a", 30, MILLISECONDS));
+    assertEquals(7000000L,
+        conf.getTimeDuration("test.time.a", 30, MICROSECONDS));
+    assertEquals(7000000000L,
+        conf.getTimeDuration("test.time.a", 30, NANOSECONDS));
+    conf.setTimeDuration("test.time.b", 1, DAYS);
+    assertEquals("1d", conf.get("test.time.b"));
+    assertEquals(1, conf.getTimeDuration("test.time.b", 1, DAYS));
+    assertEquals(24, conf.getTimeDuration("test.time.b", 1, HOURS));
+    assertEquals(MINUTES.convert(1, DAYS),
+        conf.getTimeDuration("test.time.b", 1, MINUTES));
+
+    // check default
+    assertEquals(30L, conf.getTimeDuration("test.time.X", 30, SECONDS));
+    conf.set("test.time.X", "30");
+    assertEquals(30L, conf.getTimeDuration("test.time.X", 40, SECONDS));
+
+    for (Configuration.ParsedTimeDuration ptd :
+         Configuration.ParsedTimeDuration.values()) {
+      conf.setTimeDuration("test.time.unit", 1, ptd.unit());
+      assertEquals(1 + ptd.suffix(), conf.get("test.time.unit"));
+      assertEquals(1, conf.getTimeDuration("test.time.unit", 2, ptd.unit()));
+    }
+  }
+
   public void testPattern() throws IOException {
     out = new BufferedWriter(new FileWriter(CONFIG));
     startConfig();

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java Tue May  7 00:17:50 2013
@@ -202,8 +202,6 @@ public abstract class FileContextPermiss
   
   
   /*
-   * Some filesystem like HDFS ignore the "x" bit if the permission.
-   * Others like localFs does not.
    * Override the method below if the file system being tested masks our
    * certain bits for file masks.
    */

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFSFileContextSymlink.java Tue May  7 00:17:50 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileUtil;
 import static org.apache.hadoop.fs.FileContextTestHelper.*;
 import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
 import org.junit.Test;
 import org.junit.Before;
 
@@ -65,7 +66,44 @@ public class TestLocalFSFileContextSymli
     fc = FileContext.getLocalFSFileContext();
     super.setUp();
   }
-  
+
+  @Override
+  public void testCreateDanglingLink() throws IOException {
+    // Dangling symlinks are not supported on Windows local file system.
+    assumeTrue(!Path.WINDOWS);
+    super.testCreateDanglingLink();
+  }
+
+  @Override
+  public void testCreateFileViaDanglingLinkParent() throws IOException {
+    assumeTrue(!Path.WINDOWS);
+    super.testCreateFileViaDanglingLinkParent();
+  }
+
+  @Override
+  public void testOpenResolvesLinks() throws IOException {
+    assumeTrue(!Path.WINDOWS);
+    super.testOpenResolvesLinks();
+  }
+
+  @Override
+  public void testRecursiveLinks() throws IOException {
+    assumeTrue(!Path.WINDOWS);
+    super.testRecursiveLinks();
+  }
+
+  @Override
+  public void testRenameDirToDanglingSymlink() throws IOException {
+    assumeTrue(!Path.WINDOWS);
+    super.testRenameDirToDanglingSymlink();
+  }
+
+  @Override  
+  public void testStatDanglingLink() throws IOException {
+    assumeTrue(!Path.WINDOWS);
+    super.testStatDanglingLink();
+  }
+
   @Test
   /** lstat a non-existant file using a partially qualified path */
   public void testDanglingLinkFilePartQual() throws IOException {
@@ -87,6 +125,7 @@ public class TestLocalFSFileContextSymli
   @Test
   /** Stat and lstat a dangling link */
   public void testDanglingLink() throws IOException {
+    assumeTrue(!Path.WINDOWS);
     Path fileAbs  = new Path(testBaseDir1()+"/file");    
     Path fileQual = new Path(testURI().toString(), fileAbs);    
     Path link     = new Path(testBaseDir1()+"/linkToFile");

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java Tue May  7 00:17:50 2013
@@ -17,73 +17,133 @@
  */
 package org.apache.hadoop.io;
 
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.nativeio.NativeIO;
-
+import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import static org.junit.Assume.*;
-import static org.junit.Assert.*;
-import java.io.IOException;
-import java.io.File;
-import java.io.FileOutputStream;
 
 public class TestSecureIOUtils {
-  private static String realOwner, realGroup; 
-  private static final File testFilePath =
-      new File(System.getProperty("test.build.data"), "TestSecureIOContext");
+
+  private static String realOwner, realGroup;
+  private static File testFilePathIs;
+  private static File testFilePathRaf;
+  private static File testFilePathFadis;
+  private static FileSystem fs;
 
   @BeforeClass
   public static void makeTestFile() throws Exception {
-    FileOutputStream fos = new FileOutputStream(testFilePath);
-    fos.write("hello".getBytes("UTF-8"));
-    fos.close();
-
     Configuration conf = new Configuration();
-    FileSystem rawFS = FileSystem.getLocal(conf).getRaw();
-    FileStatus stat = rawFS.getFileStatus(
-      new Path(testFilePath.toString()));
+    fs = FileSystem.getLocal(conf).getRaw();
+    testFilePathIs =
+        new File((new Path("target", TestSecureIOUtils.class.getSimpleName()
+            + "1")).toUri().getRawPath());
+    testFilePathRaf =
+        new File((new Path("target", TestSecureIOUtils.class.getSimpleName()
+            + "2")).toUri().getRawPath());
+    testFilePathFadis =
+        new File((new Path("target", TestSecureIOUtils.class.getSimpleName()
+            + "3")).toUri().getRawPath());
+    for (File f : new File[] { testFilePathIs, testFilePathRaf,
+        testFilePathFadis }) {
+      FileOutputStream fos = new FileOutputStream(f);
+      fos.write("hello".getBytes("UTF-8"));
+      fos.close();
+    }
+
+    FileStatus stat = fs.getFileStatus(
+        new Path(testFilePathIs.toString()));
+    // RealOwner and RealGroup would be same for all three files.
     realOwner = stat.getOwner();
     realGroup = stat.getGroup();
   }
 
-  @Test
+  @Test(timeout = 10000)
   public void testReadUnrestricted() throws IOException {
-    SecureIOUtils.openForRead(testFilePath, null, null).close();
+    SecureIOUtils.openForRead(testFilePathIs, null, null).close();
+    SecureIOUtils.openFSDataInputStream(testFilePathFadis, null, null).close();
+    SecureIOUtils.openForRandomRead(testFilePathRaf, "r", null, null).close();
   }
 
-  @Test
+  @Test(timeout = 10000)
   public void testReadCorrectlyRestrictedWithSecurity() throws IOException {
     SecureIOUtils
-      .openForRead(testFilePath, realOwner, realGroup).close();
+        .openForRead(testFilePathIs, realOwner, realGroup).close();
+    SecureIOUtils
+        .openFSDataInputStream(testFilePathFadis, realOwner, realGroup).close();
+    SecureIOUtils.openForRandomRead(testFilePathRaf, "r", realOwner, realGroup)
+        .close();
   }
 
-  @Test
+  @Test(timeout = 10000)
   public void testReadIncorrectlyRestrictedWithSecurity() throws IOException {
     // this will only run if libs are available
     assumeTrue(NativeIO.isAvailable());
 
     System.out.println("Running test with native libs...");
+    String invalidUser = "InvalidUser";
+
+    // We need to make sure that forceSecure.. call works only if
+    // the file belongs to expectedOwner.
 
+    // InputStream
     try {
       SecureIOUtils
-        .forceSecureOpenForRead(testFilePath, "invalidUser", null).close();
-      fail("Didn't throw expection for wrong ownership!");
+          .forceSecureOpenForRead(testFilePathIs, invalidUser, realGroup)
+          .close();
+      fail("Didn't throw expection for wrong user ownership!");
+
+    } catch (IOException ioe) {
+      // expected
+    }
+
+    // FSDataInputStream
+    try {
+      SecureIOUtils
+          .forceSecureOpenFSDataInputStream(testFilePathFadis, invalidUser,
+              realGroup).close();
+      fail("Didn't throw expection for wrong user ownership!");
+    } catch (IOException ioe) {
+      // expected
+    }
+
+    // RandomAccessFile
+    try {
+      SecureIOUtils
+          .forceSecureOpenForRandomRead(testFilePathRaf, "r", invalidUser,
+              realGroup).close();
+      fail("Didn't throw expection for wrong user ownership!");
     } catch (IOException ioe) {
       // expected
     }
   }
 
-  @Test
+  @Test(timeout = 10000)
   public void testCreateForWrite() throws IOException {
     try {
-      SecureIOUtils.createForWrite(testFilePath, 0777);
-      fail("Was able to create file at " + testFilePath);
+      SecureIOUtils.createForWrite(testFilePathIs, 0777);
+      fail("Was able to create file at " + testFilePathIs);
     } catch (SecureIOUtils.AlreadyExistsException aee) {
       // expected
     }
   }
+
+  @AfterClass
+  public static void removeTestFile() throws Exception {
+    // cleaning files
+    for (File f : new File[] { testFilePathIs, testFilePathRaf,
+        testFilePathFadis }) {
+      f.delete();
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java Tue May  7 00:17:50 2013
@@ -25,6 +25,7 @@ import java.net.InetSocketAddress;
 import java.net.URISyntaxException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto;
 import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto;
@@ -183,6 +184,8 @@ public class TestProtoBufRpc {
       RemoteException re = (RemoteException)e.getCause();
       RpcServerException rse = (RpcServerException) re
           .unwrapRemoteException(RpcServerException.class);
+      Assert.assertTrue(re.getErrorCode().equals(
+          RpcErrorCodeProto.ERROR_RPC_SERVER));
     }
   }
   
@@ -223,6 +226,8 @@ public class TestProtoBufRpc {
       Assert.assertTrue(re.getClassName().equals(
           URISyntaxException.class.getName()));
       Assert.assertTrue(re.getMessage().contains("testException"));
+      Assert.assertTrue(
+          re.getErrorCode().equals(RpcErrorCodeProto.ERROR_APPLICATION));
     }
   }
 }
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java?rev=1479733&r1=1479732&r2=1479733&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java Tue May  7 00:17:50 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
 import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
 import org.apache.hadoop.net.NetUtils;
 import org.junit.After;
 import org.junit.Before;
@@ -310,9 +311,13 @@ System.out.println("echo int is NOT supp
     try {
       proxy.echo(21);
       fail("The call must throw VersionMismatch exception");
-    } catch (IOException ex) {
-      Assert.assertTrue("Expected version mismatch but got " + ex.getMessage(), 
-          ex.getMessage().contains("VersionMismatch"));
+    } catch (RemoteException ex) {
+      Assert.assertEquals(RPC.VersionMismatch.class.getName(), 
+          ex.getClassName());
+      Assert.assertTrue(ex.getErrorCode().equals(
+          RpcErrorCodeProto.ERROR_RPC_VERSION_MISMATCH));
+    }  catch (IOException ex) {
+      fail("Expected version mismatch but got " + ex);
     }
   }
   



Mime
View raw message