hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1213389 - in /hadoop/common/branches/HDFS-1623/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-common/ hadoop-common/dev-support/jdiff/ hadoop-common/src/main/docs/ hadoop-common/sr...
Date Mon, 12 Dec 2011 19:41:28 GMT
Author: atm
Date: Mon Dec 12 19:41:20 2011
New Revision: 1213389

URL: http://svn.apache.org/viewvc?rev=1213389&view=rev
Log:
Merge trunk into HA branch.

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/dev-support/jdiff/hadoop-core_0.22.0.xml
      - copied unchanged from r1213339, hadoop/common/trunk/hadoop-common-project/hadoop-common/dev-support/jdiff/hadoop-core_0.22.0.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/proto/
      - copied from r1213339, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
      - copied unchanged from r1213339, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
Removed:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/package-info.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/proto/
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents,
props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/  
(props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/  
(props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/  
(props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationToken.java
Mon Dec 12 19:41:20 2011
@@ -23,12 +23,16 @@ import java.util.Map;
 import java.util.Set;
 import java.util.StringTokenizer;
 
+import javax.servlet.http.HttpServletRequest;
+
 /**
- * The {@link AuthenticationToken} contains information about an authenticated HTTP client
and doubles
- * as the {@link Principal} to be returned by authenticated {@link HttpServletRequest}s
+ * The {@link AuthenticationToken} contains information about an authenticated
+ * HTTP client and doubles as the {@link Principal} to be returned by
+ * authenticated {@link HttpServletRequest}s
  * <p/>
- * The token can be serialized/deserialized to and from a string as it is sent and received
in HTTP client
- * responses and requests as a HTTP cookie (this is done by the {@link AuthenticationFilter}).
+ * The token can be serialized/deserialized to and from a string as it is sent
+ * and received in HTTP client responses and requests as a HTTP cookie (this is
+ * done by the {@link AuthenticationFilter}).
  */
 public class AuthenticationToken implements Principal {
 

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Mon Dec
12 19:41:20 2011
@@ -16,26 +16,26 @@ Trunk (unreleased changes)
     HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
 
     HADOOP-7524. Change RPC to allow multiple protocols including multuple
-                 versions of the same protocol (sanjay Radia)
+    versions of the same protocol (sanjay Radia)
 
     HADOOP-7607. Simplify the RPC proxy cleanup process. (atm)
 
     HADOOP-7635. RetryInvocationHandler should release underlying resources on
-                 close (atm)
+    close (atm)
 
     HADOOP-7687 Make getProtocolSignature public  (sanjay)
 
     HADOOP-7693. Enhance AvroRpcEngine to support the new #addProtocol
-                 interface introduced in HADOOP-7524.  (cutting)
+    interface introduced in HADOOP-7524.  (cutting)
 
     HADOOP-7716. RPC protocol registration on SS does not log the protocol name
-                 (only the class which may be different) (sanjay)
+    (only the class which may be different) (sanjay)
 
     HADOOP-7717. Move handling of concurrent client fail-overs to
-                 RetryInvocationHandler (atm)
+    RetryInvocationHandler (atm)
 
     HADOOP-6490. Use StringUtils over String#replace in Path#normalizePath.
-                 (Uma Maheswara Rao G via harsh)
+    (Uma Maheswara Rao G via harsh)
 
     HADOOP-7736. Remove duplicate Path#normalizePath call. (harsh)
 
@@ -74,8 +74,13 @@ Trunk (unreleased changes)
 
     HADOOP-7886. Add toString to FileStatus. (SreeHari via jghoman)
 
+    HADOOP-7899. Generate proto java files as part of the build. (tucu)
+
   BUGS
 
+    HADOOP-7851. Configuration.getClasses() never returns the default value. 
+                 (Uma Maheswara Rao G via amarrk)
+
     HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
     by Jersey (Alejandro Abdelnur via atm)
 
@@ -121,7 +126,13 @@ Trunk (unreleased changes)
     KerberosName name rules from configuration. (tucu)
 
     HADOOP-7888. TestFailoverProxy fails intermittently on trunk. (Jason Lowe
-                 via atm)
+    via atm)
+
+    HADOOP-7897. ProtobufRpcEngine client side exception mechanism is not
+    consistent with WritableRpcEngine. (suresh)
+
+    HADOOP-7902. skipping name rules setting (if already set) should be done 
+    on UGI initialization only. (tucu)
 
   OPTIMIZATIONS
 
@@ -155,6 +166,12 @@ Release 0.23.1 - Unreleased
     HADOOP-7877. Update balancer CLI usage documentation to include the new
     -policy option.  (szetszwo)
 
+    HADOOP-6840. Support non-recursive create() in FileSystem and 
+    SequenceFile.Writer. (jitendra and eli via eli)
+
+    HADOOP-6886. LocalFileSystem Needs createNonRecursive API.
+    (Nicolas Spiegelberg and eli via eli)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -175,6 +192,14 @@ Release 0.23.1 - Unreleased
 
    HADOOP-7854. UGI getCurrentUser is not synchronized. (Daryn Sharp via jitendra)
 
+   HADOOP-7870. fix SequenceFile#createWriter with boolean
+   createParent arg to respect createParent. (Jon Hsieh via eli)
+
+   HADOOP-7898. Fix javadoc warnings in AuthenticationToken.java. (suresh)
+
+   HADOOP-7878  Regression: HADOOP-7777 switch changes break HDFS tests when the
+   isSingleSwitch() predicate is used. (stevel)
+
 Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES
@@ -884,7 +909,19 @@ Release 0.23.0 - 2011-11-01 
     HADOOP-7797. Fix top-level pom.xml to refer to correct staging maven
     repository. (omalley via acmurthy) 
 
-Release 0.22.0 - Unreleased
+Release 0.22.1 - Unreleased
+
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  IMPROVEMENTS
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+Release 0.22.0 - 2011-11-29
 
   INCOMPATIBLE CHANGES
 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 12 19:41:20 2011
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1211747
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1213339
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/pom.xml Mon Dec 12
19:41:20 2011
@@ -264,6 +264,11 @@
       <artifactId>hadoop-auth</artifactId>
       <scope>compile</scope>
     </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <scope>compile</scope>
+    </dependency>
   </dependencies>
 
   <build>
@@ -290,6 +295,52 @@
         <artifactId>maven-antrun-plugin</artifactId>
         <executions>
           <execution>
+            <id>compile-proto</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <echo file="${project.build.directory}/compile-proto.sh">
+                  PROTO_DIR=${basedir}/src/main/proto
+                  ls $PROTO_DIR &amp;> /dev/null
+                  if [ $? = 0 ]; then
+                    JAVA_DIR=${project.build.directory}/generated-sources/java
+                    mkdir -p $JAVA_DIR
+                    ls $PROTO_DIR/*.proto | xargs -n 1 protoc -I$PROTO_DIR --java_out=$JAVA_DIR
+                  fi
+                </echo>
+                <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                  <arg line="./compile-proto.sh"/>
+                </exec>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
+            <id>compile-test-proto</id>
+            <phase>generate-test-sources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <echo file="${project.build.directory}/compile-test-proto.sh">
+                  PROTO_DIR=${basedir}/src/test/proto
+                  ls $PROTO_DIR &amp;> /dev/null
+                  if [ $? = 0 ]; then
+                    JAVA_DIR=${project.build.directory}/generated-test-sources/java
+                    mkdir -p $JAVA_DIR
+                    ls $PROTO_DIR/*.proto | xargs -n 1 protoc -I$PROTO_DIR --java_out=$JAVA_DIR
+                  fi
+                </echo>
+                <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                  <arg line="./compile-test-proto.sh"/>
+                </exec>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
             <id>save-version</id>
             <phase>generate-sources</phase>
             <goals>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 12 19:41:20 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1211747
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1213339
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 12 19:41:20 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1211747
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1213339
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
Mon Dec 12 19:41:20 2011
@@ -1145,9 +1145,11 @@ public class Configuration implements It
    *         or <code>defaultValue</code>. 
    */
   public Class<?>[] getClasses(String name, Class<?> ... defaultValue) {
-    String[] classnames = getTrimmedStrings(name);
-    if (classnames == null)
+    String valueString = getRaw(name);
+    if (null == valueString) {
       return defaultValue;
+    }
+    String[] classnames = getTrimmedStrings(name);
     try {
       Class<?>[] classes = new Class<?>[classnames.length];
       for(int i = 0; i < classnames.length; i++) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
Mon Dec 12 19:41:20 2011
@@ -20,8 +20,6 @@ package org.apache.hadoop.fs;
 
 import java.io.*;
 import java.util.Arrays;
-import java.util.Iterator;
-import java.util.zip.CRC32;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -31,7 +29,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.PureJavaCrc32;
-import org.apache.hadoop.util.StringUtils;
 
 /****************************************************************
  * Abstract Checksumed FileSystem.
@@ -389,9 +386,22 @@ public abstract class ChecksumFileSystem
   public FSDataOutputStream create(Path f, FsPermission permission,
       boolean overwrite, int bufferSize, short replication, long blockSize,
       Progressable progress) throws IOException {
+    return create(f, permission, overwrite, true, bufferSize,
+        replication, blockSize, progress);
+  }
+
+  private FSDataOutputStream create(Path f, FsPermission permission,
+      boolean overwrite, boolean createParent, int bufferSize,
+      short replication, long blockSize,
+      Progressable progress) throws IOException {
     Path parent = f.getParent();
-    if (parent != null && !mkdirs(parent)) {
-      throw new IOException("Mkdirs failed to create " + parent);
+    if (parent != null) {
+      if (!createParent && !exists(parent)) {
+        throw new FileNotFoundException("Parent directory doesn't exist: "
+            + parent);
+      } else if (!mkdirs(parent)) {
+        throw new IOException("Mkdirs failed to create " + parent);
+      }
     }
     final FSDataOutputStream out = new FSDataOutputStream(
         new ChecksumFSOutputSummer(this, f, overwrite, bufferSize, replication,
@@ -402,6 +412,15 @@ public abstract class ChecksumFileSystem
     return out;
   }
 
+  /** {@inheritDoc} */
+  @Override
+  public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+      boolean overwrite, int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
+    return create(f, permission, overwrite, false, bufferSize, replication,
+        blockSize, progress);
+  }
+
   /**
    * Set replication for an existing file.
    * Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
Mon Dec 12 19:41:20 2011
@@ -829,6 +829,53 @@ public abstract class FileSystem extends
     }
   }
 
+  /**
+   * Opens an FSDataOutputStream at the indicated Path with write-progress
+   * reporting. Same as create(), except fails if parent directory doesn't
+   * already exist.
+   * @param f the file name to open
+   * @param overwrite if a file with this name already exists, then if true,
+   * the file will be overwritten, and if false an error will be thrown.
+   * @param bufferSize the size of the buffer to be used.
+   * @param replication required block replication for the file.
+   * @param blockSize
+   * @param progress
+   * @throws IOException
+   * @see #setPermission(Path, FsPermission)
+   * @deprecated API only for 0.20-append
+   */
+  @Deprecated
+  public FSDataOutputStream createNonRecursive(Path f,
+      boolean overwrite,
+      int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
+    return this.createNonRecursive(f, FsPermission.getDefault(),
+        overwrite, bufferSize, replication, blockSize, progress);
+  }
+
+  /**
+   * Opens an FSDataOutputStream at the indicated Path with write-progress
+   * reporting. Same as create(), except fails if parent directory doesn't
+   * already exist.
+   * @param f the file name to open
+   * @param permission
+   * @param overwrite if a file with this name already exists, then if true,
+   * the file will be overwritten, and if false an error will be thrown.
+   * @param bufferSize the size of the buffer to be used.
+   * @param replication required block replication for the file.
+   * @param blockSize
+   * @param progress
+   * @throws IOException
+   * @see #setPermission(Path, FsPermission)
+   * @deprecated API only for 0.20-append
+   */
+   @Deprecated
+   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+       boolean overwrite, int bufferSize, short replication, long blockSize,
+       Progressable progress) throws IOException {
+     throw new IOException("createNonRecursive unsupported for this filesystem "
+         + this.getClass());
+   }
 
   /**
    * Creates the given Path as a brand-new zero-length file.  If

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
Mon Dec 12 19:41:20 2011
@@ -29,7 +29,6 @@ import java.io.OutputStream;
 import java.net.URI;
 import java.nio.ByteBuffer;
 import java.util.Arrays;
-import java.util.EnumSet;
 import java.util.StringTokenizer;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -238,9 +237,16 @@ public class RawLocalFileSystem extends 
   }
 
   /** {@inheritDoc} */
+  @Override
   public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
     short replication, long blockSize, Progressable progress)
     throws IOException {
+    return create(f, overwrite, true, bufferSize, replication, blockSize, progress);
+  }
+
+  private FSDataOutputStream create(Path f, boolean overwrite,
+      boolean createParent, int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
     if (exists(f) && !overwrite) {
       throw new IOException("File already exists: "+f);
     }
@@ -263,7 +269,19 @@ public class RawLocalFileSystem extends 
     setPermission(f, permission);
     return out;
   }
-  
+
+  /** {@inheritDoc} */
+  @Override
+  public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+      boolean overwrite,
+      int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
+    FSDataOutputStream out = create(f,
+        overwrite, false, bufferSize, replication, blockSize, progress);
+    setPermission(f, permission);
+    return out;
+  }
+
   public boolean rename(Path src, Path dst) throws IOException {
     if (pathToFile(src).renameTo(pathToFile(dst))) {
       return true;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
Mon Dec 12 19:41:20 2011
@@ -25,6 +25,7 @@ import java.security.MessageDigest;
 import org.apache.commons.logging.*;
 import org.apache.hadoop.util.Options;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.io.compress.CodecPool;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionInputStream;
@@ -442,6 +443,67 @@ public class SequenceFile {
 
   /**
    * Construct the preferred type of SequenceFile Writer.
+   * @param fs The configured filesystem.
+   * @param conf The configuration.
+   * @param name The name of the file.
+   * @param keyClass The 'key' type.
+   * @param valClass The 'value' type.
+   * @param bufferSize buffer size for the underlaying outputstream.
+   * @param replication replication factor for the file.
+   * @param blockSize block size for the file.
+   * @param createParent create parent directory if non-existent
+   * @param compressionType The compression type.
+   * @param codec The compression codec.
+   * @param metadata The metadata of the file.
+   * @return Returns the handle to the constructed SequenceFile Writer.
+   * @throws IOException
+   */
+  @Deprecated
+  public static Writer
+  createWriter(FileSystem fs, Configuration conf, Path name,
+               Class keyClass, Class valClass, int bufferSize,
+               short replication, long blockSize, boolean createParent,
+               CompressionType compressionType, CompressionCodec codec,
+               Metadata metadata) throws IOException {
+    return createWriter(FileContext.getFileContext(fs.getUri(), conf),
+        conf, name, keyClass, valClass, compressionType, codec,
+        metadata, EnumSet.of(CreateFlag.CREATE),
+        CreateOpts.bufferSize(bufferSize),
+        createParent ? CreateOpts.createParent()
+                     : CreateOpts.donotCreateParent(),
+        CreateOpts.repFac(replication),
+        CreateOpts.blockSize(blockSize)
+      );
+  }
+
+  /**
+   * Construct the preferred type of SequenceFile Writer.
+   * @param fc The context for the specified file.
+   * @param conf The configuration.
+   * @param name The name of the file.
+   * @param keyClass The 'key' type.
+   * @param valClass The 'value' type.
+   * @param compressionType The compression type.
+   * @param codec The compression codec.
+   * @param metadata The metadata of the file.
+   * @param createFlag gives the semantics of create: overwrite, append etc.
+   * @param opts file creation options; see {@link CreateOpts}.
+   * @return Returns the handle to the constructed SequenceFile Writer.
+   * @throws IOException
+   */
+  public static Writer
+  createWriter(FileContext fc, Configuration conf, Path name,
+               Class keyClass, Class valClass,
+               CompressionType compressionType, CompressionCodec codec,
+               Metadata metadata,
+               final EnumSet<CreateFlag> createFlag, CreateOpts... opts)
+               throws IOException {
+    return createWriter(conf, fc.create(name, createFlag, opts),
+          keyClass, valClass, compressionType, codec, metadata).ownStream();
+  }
+
+  /**
+   * Construct the preferred type of SequenceFile Writer.
    * @param fs The configured filesystem. 
    * @param conf The configuration.
    * @param name The name of the file. 
@@ -1063,6 +1125,8 @@ public class SequenceFile {
     boolean isCompressed() { return compress != CompressionType.NONE; }
     boolean isBlockCompressed() { return compress == CompressionType.BLOCK; }
     
+    Writer ownStream() { this.ownOutputStream = true; return this;  }
+
     /** Write and flush the file header. */
     private void writeFileHeader() 
       throws IOException {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
Mon Dec 12 19:41:20 2011
@@ -33,14 +33,17 @@ public class ProtobufHelper {
   }
 
   /**
-   * Return the RemoteException wrapped in ServiceException as cause.
-   * @param se ServiceException that wraps RemoteException
-   * @return RemoteException wrapped in ServiceException or
-   *         a new IOException that wraps unexpected ServiceException.
+   * Return the IOException thrown by the remote server wrapped in 
+   * ServiceException as cause.
+   * @param se ServiceException that wraps IO exception thrown by the server
+   * @return Exception wrapped in ServiceException or
+   *         a new IOException that wraps the unexpected ServiceException.
    */
   public static IOException getRemoteException(ServiceException se) {
     Throwable e = se.getCause();
-    return ((e instanceof RemoteException) ? (IOException) e : 
-      new IOException(se));
+    if (e == null) {
+      return new IOException(se);
+    }
+    return e instanceof IOException ? (IOException) e : new IOException(se);
   }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
Mon Dec 12 19:41:20 2011
@@ -144,9 +144,10 @@ public class ProtobufRpcEngine implement
      * 
      * ServiceException has the following causes:
      * <ol>
-     * <li>Exceptions encountered in this methods are thrown as
-     * RpcClientException, wrapped in RemoteException</li>
-     * <li>Remote exceptions are thrown wrapped in RemoteException</li>
+     * <li>Exceptions encountered on the client side in this method are 
+     * set as cause in ServiceException as is.</li>
+     * <li>Exceptions from the server are wrapped in RemoteException and are
+     * set as cause in ServiceException</li>
      * </ol>
      * 
      * Note that the client calling protobuf RPC methods, must handle
@@ -167,9 +168,8 @@ public class ProtobufRpcEngine implement
       try {
         val = (RpcResponseWritable) client.call(RpcKind.RPC_PROTOCOL_BUFFER,
             new RpcRequestWritable(rpcRequest), remoteId);
-      } catch (Exception e) {
-        RpcClientException ce = new RpcClientException("Client exception", e);
-        throw new ServiceException(getRemoteException(ce));
+      } catch (Throwable e) {
+        throw new ServiceException(e);
       }
 
       HadoopRpcResponseProto response = val.message;
@@ -197,9 +197,8 @@ public class ProtobufRpcEngine implement
       try {
         returnMessage = prototype.newBuilderForType()
             .mergeFrom(response.getResponse()).build();
-      } catch (InvalidProtocolBufferException e) {
-        RpcClientException ce = new RpcClientException("Client exception", e);
-        throw new ServiceException(getRemoteException(ce));
+      } catch (Throwable e) {
+        throw new ServiceException(e);
       }
       return returnMessage;
     }
@@ -309,11 +308,6 @@ public class ProtobufRpcEngine implement
         numHandlers, numReaders, queueSizePerHandler, verbose, secretManager);
   }
   
-  private static RemoteException getRemoteException(Exception e) {
-    return new RemoteException(e.getClass().getName(),
-        StringUtils.stringifyException(e));
-  }
-
   public static class Server extends RPC.Server {
     /**
      * Construct an RPC server.
@@ -335,8 +329,8 @@ public class ProtobufRpcEngine implement
           numReaders, queueSizePerHandler, conf, classNameBase(protocolImpl
               .getClass().getName()), secretManager);
       this.verbose = verbose;  
-      registerProtocolAndImpl(RpcKind.RPC_PROTOCOL_BUFFER, 
-          protocolClass, protocolImpl);
+      registerProtocolAndImpl(RpcKind.RPC_PROTOCOL_BUFFER, protocolClass,
+          protocolImpl);
     }
 
     private static RpcResponseWritable handleException(Throwable e) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HadoopKerberosName.java
Mon Dec 12 19:41:20 2011
@@ -65,10 +65,8 @@ public class HadoopKerberosName extends 
    * @throws IOException
    */
   public static void setConfiguration(Configuration conf) throws IOException {
-    if (!hasRulesBeenSet()) {
-      String ruleString = conf.get("hadoop.security.auth_to_local", "DEFAULT");
-      setRules(ruleString);
-    }
+    String ruleString = conf.get("hadoop.security.auth_to_local", "DEFAULT");
+    setRules(ruleString);
   }
 
   public static void main(String[] args) throws Exception {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
Mon Dec 12 19:41:20 2011
@@ -57,6 +57,7 @@ import org.apache.hadoop.metrics2.annota
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MutableRate;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
@@ -200,7 +201,7 @@ public class UserGroupInformation {
    */
   private static synchronized void ensureInitialized() {
     if (!isInitialized) {
-      initialize(new Configuration());
+        initialize(new Configuration(), KerberosName.hasRulesBeenSet());
     }
   }
 
@@ -208,11 +209,13 @@ public class UserGroupInformation {
    * Initialize UGI and related classes.
    * @param conf the configuration to use
    */
-  private static synchronized void initialize(Configuration conf) {
+  private static synchronized void initialize(Configuration conf, boolean skipRulesSetting)
{
     initUGI(conf);
     // give the configuration on how to translate Kerberos names
     try {
-      HadoopKerberosName.setConfiguration(conf);
+      if (!skipRulesSetting) {
+        HadoopKerberosName.setConfiguration(conf);
+      }
     } catch (IOException ioe) {
       throw new RuntimeException("Problem with Kerberos auth_to_local name " +
           "configuration", ioe);
@@ -249,7 +252,7 @@ public class UserGroupInformation {
    * @param conf the configuration to use
    */
   public static void setConfiguration(Configuration conf) {
-    initialize(conf);
+    initialize(conf, false);
   }
   
   /**

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Mon Dec 12 19:41:20 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1211747
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1213339
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
Mon Dec 12 19:41:20 2011
@@ -837,6 +837,27 @@ public class TestConfiguration extends T
     assertTrue("Picked out wrong key " + key4, !res.containsKey(key4));
   }
 
+  public void testGetClassesShouldReturnDefaultValue() throws Exception {
+    Configuration config = new Configuration();
+    Class<?>[] classes = 
+      config.getClasses("testClassName", Configuration.class);
+    assertEquals(
+        "Not returning expected number of classes. Number of returned classes ="
+            + classes.length, 1, classes.length);
+    assertEquals("Not returning the default class Name", Configuration.class,
+        classes[0]);
+  }
+
+  public void testGetClassesShouldReturnEmptyArray()
+      throws Exception {
+    Configuration config = new Configuration();
+    config.set("testClassName", "");
+    Class<?>[] classes = config.getClasses("testClassName", Configuration.class);
+    assertEquals(
+        "Not returning expected number of classes. Number of returned classes ="
+            + classes.length, 0, classes.length);
+  }
+  
   public static void main(String[] argv) throws Exception {
     junit.textui.TestRunner.main(new String[]{
       TestConfiguration.class.getName()

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
Mon Dec 12 19:41:20 2011
@@ -29,7 +29,6 @@ import org.apache.commons.logging.Log;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.Options.CreateOpts;
 import org.apache.hadoop.fs.Options.Rename;
-import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
 
@@ -49,6 +48,17 @@ public class TestFilterFileSystem extend
     public boolean isDirectory(Path f) { return false; }
     public boolean isFile(Path f) { return false; }
     public boolean createNewFile(Path f) { return false; }
+    public FSDataOutputStream createNonRecursive(Path f,
+        boolean overwrite,
+        int bufferSize, short replication, long blockSize,
+        Progressable progress) throws IOException {
+      return null;
+    }
+    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+        boolean overwrite, int bufferSize, short replication, long blockSize,
+        Progressable progress) throws IOException {
+      return null;
+    }
     public boolean mkdirs(Path f) { return false; }
     public FSDataInputStream open(Path f) { return null; }
     public FSDataOutputStream create(Path f) { return null; }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
Mon Dec 12 19:41:20 2011
@@ -26,6 +26,7 @@ import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.SequenceFile.Metadata;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -516,6 +517,29 @@ public class TestSequenceFile extends Te
     assertTrue("InputStream for " + path + " should have been closed.", openedFile[0].isClosed());
   }
 
+  public void testRecursiveSeqFileCreate() throws IOException {
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.getLocal(conf);
+    Path name = new Path(new Path(System.getProperty("test.build.data","."),
+        "recursiveCreateDir") , "file");
+    boolean createParent = false;
+
+    try {
+      SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
+          RandomDatum.class, 512, (short) 1, 4096, createParent,
+          CompressionType.NONE, null, new Metadata());
+      fail("Expected an IOException due to missing parent");
+    } catch (IOException ioe) {
+      // Expected
+    }
+
+    createParent = true;
+    SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
+        RandomDatum.class, 512, (short) 1, 4096, createParent,
+        CompressionType.NONE, null, new Metadata());
+    // should succeed, fails if exception thrown
+  }
+
   /** For debugging and testing. */
   public static void main(String[] args) throws Exception {
     int count = 1024 * 1024;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
Mon Dec 12 19:41:20 2011
@@ -38,9 +38,11 @@ import java.util.Map;
 public class StaticMapping extends AbstractDNSToSwitchMapping  {
 
   /**
-   * key to define the node mapping as a comma-delimited list of host=rack
+   * Key to define the node mapping as a comma-delimited list of host=rack
    * mappings, e.g. <code>host1=r1,host2=r1,host3=r2</code>.
-   * </p>
+   * <p/>
+   * Value: {@value}
+   * <p/>
    * <b>Important: </b>spaces not trimmed and are considered significant.
    */
   public static final String KEY_HADOOP_CONFIGURED_NODE_MAPPING =
@@ -107,18 +109,16 @@ public class StaticMapping extends Abstr
   }
 
   /**
-   * This mapping is only single switch if the map is empty
-   * @return the current switching status
+   * Declare that this mapping is always multi-switch
+   * @return false, always
    */
   @Override
   public boolean isSingleSwitch() {
-    synchronized (nameToRackMap) {
-      return nameToRackMap.isEmpty();
-    }
+    return false;
   }
 
   /**
-   * Clear the map and revert to being a single switch
+   * Clear the map
    */
   public static void resetMap() {
     synchronized (nameToRackMap) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java?rev=1213389&r1=1213388&r2=1213389&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
(original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java
Mon Dec 12 19:41:20 2011
@@ -44,7 +44,8 @@ public class TestStaticMapping extends A
   @Test
   public void testStaticIsSingleSwitch() throws Throwable {
     StaticMapping mapping = newInstance();
-    assertTrue("Empty maps are not single switch", mapping.isSingleSwitch());
+    assertFalse("Empty maps should not be not single switch",
+                mapping.isSingleSwitch());
   }
 
 
@@ -53,10 +54,8 @@ public class TestStaticMapping extends A
     StaticMapping staticMapping = newInstance();
     CachedDNSToSwitchMapping mapping =
         new CachedDNSToSwitchMapping(staticMapping);
-    assertTrue("Expected single switch", mapping.isSingleSwitch());
     StaticMapping.addNodeToRack("n1", "r1");
-    assertFalse("Expected to be multi switch",
-                mapping.isSingleSwitch());
+    assertFalse("Expected multi switch", mapping.isSingleSwitch());
   }
 
   @Test
@@ -96,8 +95,9 @@ public class TestStaticMapping extends A
   public void testNullConfiguration() throws Throwable {
     StaticMapping mapping = newInstance();
     mapping.setConf(null);
-    assertTrue("Null maps is not single switch", mapping.isSingleSwitch());
-    assertTrue("Expected to be single switch",
+    assertFalse("Null maps are expected to be multi switch",
+                mapping.isSingleSwitch());
+    assertFalse("Expected to be multi switch",
                AbstractDNSToSwitchMapping.isMappingSingleSwitch(mapping));
   }
 }



Mime
View raw message