hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1143559 [2/2] - in /hadoop/common/branches/HDFS-1073/common: ./ bin/ src/ src/docs/ src/java/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/s3/ src/java/org/apache/hadoop/fs/shell/ src/java/org/apache/hadoop/http/ src/java/o...
Date Wed, 06 Jul 2011 20:45:23 GMT
Modified: hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh (original)
+++ hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh Wed Jul  6 20:45:21 2011
@@ -26,7 +26,7 @@ build_dir=$2
 user=`whoami | tr '\n\r' '\n'`
 date=`date`
 cwd=`pwd`
-if [ -d .git ]; then
+if git rev-parse HEAD 2>/dev/null > /dev/null ; then
   revision=`git log -1 --pretty=format:"%H"`
   hostname=`hostname`
   branch=`git branch | sed -n -e 's/^* //p'`

Modified: hadoop/common/branches/HDFS-1073/common/src/test/bin/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/bin/test-patch.sh?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/bin/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/bin/test-patch.sh Wed Jul  6 20:45:21
2011
@@ -18,7 +18,8 @@ ulimit -n 1024
 ### Setup some variables.  
 ### SVN_REVISION and BUILD_URL are set by Hudson if it is run by patch process
 ### Read variables from properties file
-. `dirname $0`/../test-patch.properties
+bindir=$(dirname $0)
+. $bindir/../test-patch.properties
 
 ###############################################################################
 parseArgs() {
@@ -270,7 +271,8 @@ applyPatch () {
   echo "======================================================================"
   echo ""
   echo ""
-  $PATCH -E -p0 < $PATCH_DIR/patch
+  export PATCH
+  $bindir/smart-apply-patch.sh $PATCH_DIR/patch
   if [[ $? != 0 ]] ; then
     echo "PATCH APPLICATION FAILED"
     JIRA_COMMENT="$JIRA_COMMENT

Propchange: hadoop/common/branches/HDFS-1073/common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jul  6 20:45:21 2011
@@ -1,2 +1,3 @@
+/hadoop/common/trunk/common/src/test/core:1134995-1143556
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java
Wed Jul  6 20:45:21 2011
@@ -33,6 +33,7 @@ import java.util.regex.Pattern;
 import junit.framework.TestCase;
 import static org.junit.Assert.assertArrayEquals;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.codehaus.jackson.map.ObjectMapper; 
 
@@ -246,7 +247,12 @@ public class TestConfiguration extends T
 
   public void testGetLocalPath() throws IOException {
     Configuration conf = new Configuration();
-    conf.set("dirs", "a, b, c ");
+    String[] dirs = new String[]{"a", "b", "c"};
+    for (int i = 0; i < dirs.length; i++) {
+      dirs[i] = new Path(System.getProperty("test.build.data"), dirs[i])
+          .toString();
+    }
+    conf.set("dirs", StringUtils.join(dirs, ","));
     for (int i = 0; i < 1000; i++) {
       String localPath = conf.getLocalPath("dirs", "dir" + i).toString();
       assertTrue("Path doesn't end in specified dir: " + localPath,
@@ -258,7 +264,12 @@ public class TestConfiguration extends T
   
   public void testGetFile() throws IOException {
     Configuration conf = new Configuration();
-    conf.set("dirs", "a, b, c ");
+    String[] dirs = new String[]{"a", "b", "c"};
+    for (int i = 0; i < dirs.length; i++) {
+      dirs[i] = new Path(System.getProperty("test.build.data"), dirs[i])
+          .toString();
+    }
+    conf.set("dirs", StringUtils.join(dirs, ","));
     for (int i = 0; i < 1000; i++) {
       String localPath = conf.getFile("dirs", "dir" + i).toString();
       assertTrue("Path doesn't end in specified dir: " + localPath,

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
Wed Jul  6 20:45:21 2011
@@ -25,6 +25,7 @@ import java.io.InputStream;
 
 
 import org.apache.hadoop.fs.Options.Rename;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -252,8 +253,9 @@ public abstract class FSMainOperationsBa
     }
   } 
   
+  @Test
   public void testListStatusThrowsExceptionForNonExistentFile()
-                                                    throws Exception {
+  throws Exception {
     try {
       fSys.listStatus(getTestRootPath(fSys, "test/hadoop/file"));
       Assert.fail("Should throw FileNotFoundException");
@@ -262,6 +264,27 @@ public abstract class FSMainOperationsBa
     }
   }
   
+  // TODO: update after fixing HADOOP-7352
+  @Test
+  public void testListStatusThrowsExceptionForUnreadableDir()
+  throws Exception {
+    Path testRootDir = getTestRootPath(fSys, "test/hadoop/dir");
+    Path obscuredDir = new Path(testRootDir, "foo");
+    Path subDir = new Path(obscuredDir, "bar"); //so foo is non-empty
+    fSys.mkdirs(subDir);
+    fSys.setPermission(obscuredDir, new FsPermission((short)0)); //no access
+    try {
+      fSys.listStatus(obscuredDir);
+      Assert.fail("Should throw IOException");
+    } catch (IOException ioe) {
+      // expected
+    } finally {
+      // make sure the test directory can be deleted
+      fSys.setPermission(obscuredDir, new FsPermission((short)0755)); //default
+    }
+  }
+
+
   @Test
   public void testListStatus() throws Exception {
     Path[] testDirs = {
@@ -315,6 +338,7 @@ public abstract class FSMainOperationsBa
     
   }
   
+  @Test
   public void testListStatusFilterWithSomeMatches() throws Exception {
     Path[] testDirs = {
         getTestRootPath(fSys, TEST_DIR_AAA),
@@ -919,12 +943,13 @@ public abstract class FSMainOperationsBa
 
   @Test
   public void testRenameDirectoryAsNonExistentDirectory() throws Exception {
-    testRenameDirectoryAsNonExistentDirectory(Rename.NONE);
+    doTestRenameDirectoryAsNonExistentDirectory(Rename.NONE);
     tearDown();
-    testRenameDirectoryAsNonExistentDirectory(Rename.OVERWRITE);
+    doTestRenameDirectoryAsNonExistentDirectory(Rename.OVERWRITE);
   }
 
-  private void testRenameDirectoryAsNonExistentDirectory(Rename... options) throws Exception
{
+  private void doTestRenameDirectoryAsNonExistentDirectory(Rename... options) 
+  throws Exception {
     if (!renameSupported()) return;
     
     Path src = getTestRootPath(fSys, "test/hadoop/dir");

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java
Wed Jul  6 20:45:21 2011
@@ -17,11 +17,12 @@
  */
 package org.apache.hadoop.http;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
+
+import javax.servlet.http.HttpServletRequest;
 
 import org.junit.Test;
+import org.mockito.Mockito;
 
 public class TestHtmlQuoting {
 
@@ -62,4 +63,28 @@ public class TestHtmlQuoting {
     }
     runRoundTrip(buffer.toString());
   }
+  
+
+  @Test
+  public void testRequestQuoting() throws Exception {
+    HttpServletRequest mockReq = Mockito.mock(HttpServletRequest.class);
+    HttpServer.QuotingInputFilter.RequestQuoter quoter =
+      new HttpServer.QuotingInputFilter.RequestQuoter(mockReq);
+    
+    Mockito.doReturn("a<b").when(mockReq).getParameter("x");
+    assertEquals("Test simple param quoting",
+        "a&lt;b", quoter.getParameter("x"));
+    
+    Mockito.doReturn(null).when(mockReq).getParameter("x");
+    assertEquals("Test that missing parameters dont cause NPE",
+        null, quoter.getParameter("x"));
+
+    Mockito.doReturn(new String[]{"a<b", "b"}).when(mockReq).getParameterValues("x");
+    assertArrayEquals("Test escaping of an array",
+        new String[]{"a&lt;b", "b"}, quoter.getParameterValues("x"));
+
+    Mockito.doReturn(null).when(mockReq).getParameterValues("x");
+    assertArrayEquals("Test that missing parameters dont cause NPE for array",
+        null, quoter.getParameterValues("x"));
+  }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java
Wed Jul  6 20:45:21 2011
@@ -45,16 +45,20 @@ import javax.servlet.http.HttpServletReq
 import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
 
+import junit.framework.Assert;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.http.HttpServer.QuotingInputFilter.RequestQuoter;
 import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
 import org.apache.hadoop.security.authorize.AccessControlList;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.mockito.Mockito;
 
 public class TestHttpServer extends HttpServerFunctionalTest {
   private static HttpServer server;
@@ -379,4 +383,26 @@ public class TestHttpServer extends Http
     }
     myServer.stop();
   }
+  
+  @Test
+  public void testRequestQuoterWithNull() throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.doReturn(null).when(request).getParameterValues("dummy");
+    RequestQuoter requestQuoter = new RequestQuoter(request);
+    String[] parameterValues = requestQuoter.getParameterValues("dummy");
+    Assert.assertEquals("It should return null "
+        + "when there are no values for the parameter", null, parameterValues);
+  }
+
+  @Test
+  public void testRequestQuoterWithNotNull() throws Exception {
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    String[] values = new String[] { "abc", "def" };
+    Mockito.doReturn(values).when(request).getParameterValues("dummy");
+    RequestQuoter requestQuoter = new RequestQuoter(request);
+    String[] parameterValues = requestQuoter.getParameterValues("dummy");
+    Assert.assertTrue("It should return Parameter Values", Arrays.equals(
+        values, parameterValues));
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestBytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestBytesWritable.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestBytesWritable.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestBytesWritable.java
Wed Jul  6 20:45:21 2011
@@ -17,13 +17,17 @@
  */
 package org.apache.hadoop.io;
 
-import junit.framework.TestCase;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 
 /**
  * This is the unit test for BytesWritable.
  */
-public class TestBytesWritable extends TestCase {
+public class TestBytesWritable {
 
+  @Test
   public void testSizeChange() throws Exception {
     byte[] hadoop = "hadoop".getBytes();
     BytesWritable buf = new BytesWritable(hadoop);
@@ -50,6 +54,7 @@ public class TestBytesWritable extends T
     assertEquals(hadoop[0], buf.getBytes()[0]);
   }
   
+  @Test
   public void testHash() throws Exception {
     byte[] owen = "owen".getBytes();
     BytesWritable buf = new BytesWritable(owen);
@@ -60,6 +65,7 @@ public class TestBytesWritable extends T
     assertEquals(1, buf.hashCode());
   }
   
+  @Test
   public void testCompare() throws Exception {
     byte[][] values = new byte[][]{"abc".getBytes(), 
                                    "ad".getBytes(),
@@ -88,10 +94,44 @@ public class TestBytesWritable extends T
     assertEquals(expected, actual);
   }
 
+  @Test
   public void testToString() {
     checkToString(new byte[]{0,1,2,0x10}, "00 01 02 10");
     checkToString(new byte[]{-0x80, -0x7f, -0x1, -0x2, 1, 0}, 
                   "80 81 ff fe 01 00");
   }
+  /**
+   * This test was written as result of adding the new zero
+   * copy constructor and set method to BytesWritable. These
+   * methods allow users to specify the backing buffer of the
+   * BytesWritable instance and a length. 
+   */
+  @Test
+  public void testZeroCopy() {
+    byte[] bytes = "brock".getBytes();
+    BytesWritable zeroBuf = new BytesWritable(bytes, bytes.length); // new
+    BytesWritable copyBuf = new BytesWritable(bytes); // old
+    // using zero copy constructor shouldn't result in a copy
+    assertTrue("copy took place, backing array != array passed to constructor",
+      bytes == zeroBuf.getBytes());
+    assertTrue("length of BW should backing byte array", zeroBuf.getLength() == bytes.length);
+    assertEquals("objects with same backing array should be equal", zeroBuf, copyBuf);
+    assertEquals("string repr of objects with same backing array should be equal", 
+        zeroBuf.toString(), copyBuf.toString());
+    assertTrue("compare order objects with same backing array should be equal", 
+        zeroBuf.compareTo(copyBuf) == 0);
+    assertTrue("hash of objects with same backing array should be equal",
+        zeroBuf.hashCode() == copyBuf.hashCode());
+    
+    // ensure expanding buffer is handled correctly
+    // for buffers created with zero copy api
+    byte[] buffer = new byte[bytes.length * 5];
+    zeroBuf.set(buffer, 0, buffer.length); // expand internal buffer
+    zeroBuf.set(bytes, 0, bytes.length); // set back to normal contents
+    assertEquals("buffer created with (array, len) has bad contents", 
+        zeroBuf, copyBuf);
+    assertTrue("buffer created with (array, len) has bad length",
+        zeroBuf.getLength() == copyBuf.getLength());
+  }
 }
 

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestIOUtils.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestIOUtils.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestIOUtils.java
Wed Jul  6 20:45:21 2011
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.io;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -65,4 +68,46 @@ public class TestIOUtils {
     Mockito.verify(inputStream, Mockito.atMost(0)).close();
     Mockito.verify(outputStream, Mockito.atMost(0)).close();
   }
+  
+  @Test
+  public void testCopyBytesWithCountShouldCloseStreamsWhenCloseIsTrue()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[4096], 0, 1);
+    IOUtils.copyBytes(inputStream, outputStream, (long) 1, true);
+    Mockito.verify(inputStream, Mockito.atLeastOnce()).close();
+    Mockito.verify(outputStream, Mockito.atLeastOnce()).close();
+  }
+
+  @Test
+  public void testCopyBytesWithCountShouldNotCloseStreamsWhenCloseIsFalse()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[4096], 0, 1);
+    IOUtils.copyBytes(inputStream, outputStream, (long) 1, false);
+    Mockito.verify(inputStream, Mockito.atMost(0)).close();
+    Mockito.verify(outputStream, Mockito.atMost(0)).close();
+  }
+
+  @Test
+  public void testCopyBytesWithCountShouldThrowOutTheStreamClosureExceptions()
+      throws Exception {
+    InputStream inputStream = Mockito.mock(InputStream.class);
+    OutputStream outputStream = Mockito.mock(OutputStream.class);
+    Mockito.doReturn(-1).when(inputStream).read(new byte[4096], 0, 1);
+    Mockito.doThrow(new IOException("Exception in closing the stream")).when(
+        outputStream).close();
+    try {
+      IOUtils.copyBytes(inputStream, outputStream, (long) 1, true);
+      fail("Should throw out the exception");
+    } catch (IOException e) {
+      assertEquals("Not throwing the expected exception.",
+          "Exception in closing the stream", e.getMessage());
+    }
+    Mockito.verify(inputStream, Mockito.atLeastOnce()).close();
+    Mockito.verify(outputStream, Mockito.atLeastOnce()).close();
+  }
+  
 }

Propchange: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestSequenceFile.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jul  6 20:45:21 2011
@@ -1,2 +1,3 @@
+/hadoop/common/trunk/common/src/test/core/org/apache/hadoop/io/TestSequenceFile.java:1134995-1143556
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:776175-785643

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/compress/TestCodec.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
Wed Jul  6 20:45:21 2011
@@ -40,7 +40,6 @@ import java.util.zip.GZIPOutputStream;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -52,8 +51,7 @@ import org.apache.hadoop.io.SequenceFile
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.io.compress.CompressionOutputStream;
-import org.apache.hadoop.io.compress.CompressorStream;
+import org.apache.hadoop.io.compress.snappy.LoadSnappy;
 import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
@@ -68,6 +66,7 @@ import org.apache.commons.codec.binary.B
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.junit.Assert;
 import org.junit.Test;
 import static org.junit.Assert.*;
 
@@ -96,6 +95,19 @@ public class TestCodec {
     codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
   }
+  
+  @Test
+  public void testSnappyCodec() throws IOException {
+    if (LoadSnappy.isAvailable()) {
+      if (LoadSnappy.isLoaded()) {
+        codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
+        codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec");
+      }
+      else {
+        Assert.fail("Snappy native available but Hadoop native not");
+      }
+    }
+  }
 
   @Test
   public void testDeflateCodec() throws IOException {

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
Wed Jul  6 20:45:21 2011
@@ -18,8 +18,14 @@
 
 package org.apache.hadoop.ipc;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
+
+import java.io.IOException;
 import java.net.InetSocketAddress;
 
+import javax.security.sasl.Sasl;
+
+import junit.framework.Assert;
 import junit.framework.TestCase;
 
 import org.apache.avro.ipc.AvroRemoteException;
@@ -27,7 +33,16 @@ import org.apache.avro.util.Utf8;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.ipc.TestSaslRPC.CustomSecurityInfo;
+import org.apache.hadoop.ipc.TestSaslRPC.TestTokenIdentifier;
+import org.apache.hadoop.ipc.TestSaslRPC.TestTokenSecretManager;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hadoop.security.SecurityInfo;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
 
 /** Unit tests for AvroRpc. */
 public class TestAvroRpc extends TestCase {
@@ -36,8 +51,6 @@ public class TestAvroRpc extends TestCas
   public static final Log LOG =
     LogFactory.getLog(TestAvroRpc.class);
   
-  private static Configuration conf = new Configuration();
-
   int datasize = 1024*100;
   int numThreads = 50;
 
@@ -56,19 +69,47 @@ public class TestAvroRpc extends TestCas
     }
   }
 
-  public void testCalls() throws Exception {
+  public void testReflect() throws Exception {
+    testReflect(false);
+  }
+
+  public void testSecureReflect() throws Exception {
+    testReflect(true);
+  }
+
+  public void testSpecific() throws Exception {
+    testSpecific(false);
+  }
+
+  public void testSecureSpecific() throws Exception {
+    testSpecific(true);
+  }
+
+  private void testReflect(boolean secure) throws Exception {
     Configuration conf = new Configuration();
+    TestTokenSecretManager sm = null;
+    if (secure) {
+      makeSecure(conf);
+      sm = new TestTokenSecretManager();
+    }
+    UserGroupInformation.setConfiguration(conf);
     RPC.setProtocolEngine(conf, AvroTestProtocol.class, AvroRpcEngine.class);
     Server server = RPC.getServer(AvroTestProtocol.class,
-                                  new TestImpl(), ADDRESS, 0, conf);
-    AvroTestProtocol proxy = null;
+                                  new TestImpl(), ADDRESS, 0, 5, true, 
+                                  conf, sm);
     try {
       server.start();
-
       InetSocketAddress addr = NetUtils.getConnectAddress(server);
-      proxy =
+
+      if (secure) {
+        addToken(sm, addr);
+        //QOP must be auth
+        Assert.assertEquals("auth", SaslRpcServer.SASL_PROPS.get(Sasl.QOP));
+      }
+
+      AvroTestProtocol proxy =
         (AvroTestProtocol)RPC.getProxy(AvroTestProtocol.class, 0, addr, conf);
-      
+
       proxy.ping();
 
       String echo = proxy.echo("hello world");
@@ -89,23 +130,62 @@ public class TestAvroRpc extends TestCas
       assertTrue(caught);
 
     } finally {
+      resetSecurity();
       server.stop();
     }
   }
 
-  public void testAvroSpecificRpc() throws Exception {
+  private void makeSecure(Configuration conf) {
+    conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+    conf.set("hadoop.rpc.socket.factory.class.default", "");
+    //Avro doesn't work with security annotations on protocol.
+    //Avro works ONLY with custom security context
+    SecurityUtil.setSecurityInfoProviders(new CustomSecurityInfo());
+  }
+  
+  private void resetSecurity() {
+    SecurityUtil.setSecurityInfoProviders(new SecurityInfo[0]);
+  }
+
+  private void addToken(TestTokenSecretManager sm, 
+      InetSocketAddress addr) throws IOException {
+    final UserGroupInformation current = UserGroupInformation.getCurrentUser();
+    
+    TestTokenIdentifier tokenId = new TestTokenIdentifier(new Text(current
+        .getUserName()));
+    Token<TestTokenIdentifier> token = new Token<TestTokenIdentifier>(tokenId,
+        sm);
+    Text host = new Text(addr.getAddress().getHostAddress() + ":"
+        + addr.getPort());
+    token.setService(host);
+    LOG.info("Service IP address for token is " + host);
+    current.addToken(token);
+  }
+
+  private void testSpecific(boolean secure) throws Exception {
     Configuration conf = new Configuration();
+    TestTokenSecretManager sm = null;
+    if (secure) {
+      makeSecure(conf);
+      sm = new TestTokenSecretManager();
+    }
+    UserGroupInformation.setConfiguration(conf);
     RPC.setProtocolEngine(conf, AvroSpecificTestProtocol.class, 
         AvroSpecificRpcEngine.class);
     Server server = RPC.getServer(AvroSpecificTestProtocol.class,
-                                  new AvroSpecificTestProtocolImpl(), 
-                                  ADDRESS, 0, conf);
-    AvroSpecificTestProtocol proxy = null;
+        new AvroSpecificTestProtocolImpl(), ADDRESS, 0, 5, true, 
+        conf, sm);
     try {
       server.start();
-
       InetSocketAddress addr = NetUtils.getConnectAddress(server);
-      proxy =
+
+      if (secure) {
+        addToken(sm, addr);
+        //QOP must be auth
+        Assert.assertEquals("auth", SaslRpcServer.SASL_PROPS.get(Sasl.QOP));
+      }
+
+      AvroSpecificTestProtocol proxy =
         (AvroSpecificTestProtocol)RPC.getProxy(AvroSpecificTestProtocol.class, 
             0, addr, conf);
       
@@ -116,6 +196,7 @@ public class TestAvroRpc extends TestCas
       assertEquals(3, intResult);
 
     } finally {
+      resetSecurity();
       server.stop();
     }
   }
@@ -134,5 +215,5 @@ public class TestAvroRpc extends TestCas
     }
     
   }
-  
+
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestIPC.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestIPC.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestIPC.java
Wed Jul  6 20:45:21 2011
@@ -23,6 +23,7 @@ import org.apache.commons.logging.*;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.net.NetUtils;
 
@@ -45,6 +46,9 @@ import static org.mockito.Mockito.*;
 
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Assume;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
 
 import com.google.common.primitives.Bytes;
 import com.google.common.primitives.Ints;
@@ -469,6 +473,53 @@ public class TestIPC {
     }
   }
 
+  /**
+   * Test that, if a RuntimeException is thrown after creating a socket
+   * but before successfully connecting to the IPC server, that the
+   * failure is handled properly. This is a regression test for
+   * HADOOP-7428.
+   */
+  @Test
+  public void testRTEDuringConnectionSetup() throws Exception {
+    // Set up a socket factory which returns sockets which
+    // throw an RTE when setSoTimeout is called.
+    SocketFactory spyFactory = spy(NetUtils.getDefaultSocketFactory(conf));
+    Mockito.doAnswer(new Answer<Socket>() {
+      @Override
+      public Socket answer(InvocationOnMock invocation) throws Throwable {
+        Socket s = spy((Socket)invocation.callRealMethod());
+        doThrow(new RuntimeException("Injected fault")).when(s)
+          .setSoTimeout(anyInt());
+        return s;
+      }
+    }).when(spyFactory).createSocket();
+      
+    Server server = new TestServer(1, true);
+    server.start();
+    try {
+      // Call should fail due to injected exception.
+      InetSocketAddress address = NetUtils.getConnectAddress(server);
+      Client client = new Client(LongWritable.class, conf, spyFactory);
+      try {
+        client.call(new LongWritable(RANDOM.nextLong()),
+                address, null, null, 0, conf);
+        fail("Expected an exception to have been thrown");
+      } catch (Exception e) {
+        LOG.info("caught expected exception", e);
+        assertTrue(StringUtils.stringifyException(e).contains(
+            "Injected fault"));
+      }
+      // Resetting to the normal socket behavior should succeed
+      // (i.e. it should not have cached a half-constructed connection)
+  
+      Mockito.reset(spyFactory);
+      client.call(new LongWritable(RANDOM.nextLong()),
+          address, null, null, 0, conf);
+    } finally {
+      server.stop();
+    }
+  }
+  
   @Test
   public void testIpcTimeout() throws Exception {
     // start server

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestRPC.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestRPC.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestRPC.java
Wed Jul  6 20:45:21 2011
@@ -40,6 +40,10 @@ import org.apache.hadoop.security.author
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.Service;
 import org.apache.hadoop.security.AccessControlException;
+
+import com.google.protobuf.DescriptorProtos;
+import com.google.protobuf.DescriptorProtos.EnumDescriptorProto;
+
 import static org.apache.hadoop.test.MetricsAsserts.*;
 
 import static org.mockito.Mockito.*;
@@ -71,6 +75,9 @@ public class TestRPC extends TestCase {
     int error() throws IOException;
     void testServerGet() throws IOException;
     int[] exchange(int[] values) throws IOException;
+    
+    DescriptorProtos.EnumDescriptorProto exchangeProto(
+        DescriptorProtos.EnumDescriptorProto arg);
   }
 
   public static class TestImpl implements TestProtocol {
@@ -136,6 +143,11 @@ public class TestRPC extends TestCase {
       }
       return values;
     }
+
+    @Override
+    public EnumDescriptorProto exchangeProto(EnumDescriptorProto arg) {
+      return arg;
+    }
   }
 
   //
@@ -314,6 +326,13 @@ public class TestRPC extends TestCase {
 
     intResult = proxy.add(new int[] {1, 2});
     assertEquals(intResult, 3);
+    
+    // Test protobufs
+    EnumDescriptorProto sendProto =
+      EnumDescriptorProto.newBuilder().setName("test").build();
+    EnumDescriptorProto retProto = proxy.exchangeProto(sendProto);
+    assertEquals(sendProto, retProto);
+    assertNotSame(sendProto, retProto);
 
     boolean caught = false;
     try {

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java
Wed Jul  6 20:45:21 2011
@@ -18,12 +18,15 @@
 
 package org.apache.hadoop.ipc;
 
-import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
-import static org.junit.Assert.*;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.lang.annotation.Annotation;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
 import java.util.Collection;
@@ -33,28 +36,28 @@ import javax.security.sasl.Sasl;
 
 import junit.framework.Assert;
 
-import org.apache.commons.logging.*;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.KerberosInfo;
-import org.apache.hadoop.security.token.SecretManager;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.token.TokenInfo;
-import org.apache.hadoop.security.token.TokenSelector;
-import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.SaslInputStream;
 import org.apache.hadoop.security.SaslRpcClient;
 import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hadoop.security.SecurityInfo;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.TestUserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-
+import org.apache.hadoop.security.token.SecretManager;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.token.TokenInfo;
+import org.apache.hadoop.security.token.TokenSelector;
+import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.log4j.Level;
 import org.junit.Test;
 
@@ -187,6 +190,42 @@ public class TestSaslRPC {
     }
   }
 
+  public static class CustomSecurityInfo extends SecurityInfo {
+
+    @Override
+    public KerberosInfo getKerberosInfo(Class<?> protocol) {
+      return new KerberosInfo() {
+        @Override
+        public Class<? extends Annotation> annotationType() {
+          return null;
+        }
+        @Override
+        public String serverPrincipal() {
+          return SERVER_PRINCIPAL_KEY;
+        }
+        @Override
+        public String clientPrincipal() {
+          return null;
+        }
+      };
+    }
+
+    @Override
+    public TokenInfo getTokenInfo(Class<?> protocol) {
+      return new TokenInfo() {
+        @Override
+        public Class<? extends TokenSelector<? extends 
+            TokenIdentifier>> value() {
+          return TestTokenSelector.class;
+        }
+        @Override
+        public Class<? extends Annotation> annotationType() {
+          return null;
+        }
+      };
+    }
+  }
+
   @Test
   public void testDigestRpc() throws Exception {
     TestTokenSecretManager sm = new TestTokenSecretManager();
@@ -195,7 +234,21 @@ public class TestSaslRPC {
 
     doDigestRpc(server, sm);
   }
-  
+
+  @Test
+  public void testDigestRpcWithoutAnnotation() throws Exception {
+    TestTokenSecretManager sm = new TestTokenSecretManager();
+    try {
+      SecurityUtil.setSecurityInfoProviders(new CustomSecurityInfo());
+      final Server server = RPC.getServer(TestSaslProtocol.class,
+                                          new TestSaslImpl(), ADDRESS, 0, 5, 
+                                          true, conf, sm);
+      doDigestRpc(server, sm);
+    } finally {
+      SecurityUtil.setSecurityInfoProviders(new SecurityInfo[0]);
+    }
+  }
+
   @Test
   public void testSecureToInsecureRpc() throws Exception {
     Server server = RPC.getServer(TestSaslProtocol.class,
@@ -223,8 +276,8 @@ public class TestSaslRPC {
     assertTrue(succeeded);
   }
   
-  private void doDigestRpc(Server server, TestTokenSecretManager sm)
-      throws Exception {
+  private void doDigestRpc(Server server, TestTokenSecretManager sm
+                           ) throws Exception {
     server.start();
 
     final UserGroupInformation current = UserGroupInformation.getCurrentUser();

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/jmx/TestJMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/jmx/TestJMXJsonServlet.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/jmx/TestJMXJsonServlet.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/jmx/TestJMXJsonServlet.java
Wed Jul  6 20:45:21 2011
@@ -65,5 +65,18 @@ public class TestJMXJsonServlet extends 
     result = readOutput(new URL(baseUrl, "/jmx"));
     LOG.info("/jmx RESULT: "+result);
     assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    
+    // test to get an attribute of a mbean
+    result = readOutput(new URL(baseUrl, 
+        "/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
+    assertReFind("\"committed\"\\s*:", result);
+    
+    // negative test to get an attribute of a mbean
+    result = readOutput(new URL(baseUrl, 
+        "/jmx?get=java.lang:type=Memory::"));
+    LOG.info("/jmx RESULT: "+result);
+    assertReFind("\"ERROR\"", result);
   }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
Wed Jul  6 20:45:21 2011
@@ -138,6 +138,6 @@ public class TestMetricsConfig {
    * @return the filename
    */
   public static String getTestFilename(String basename) {
-    return "build/classes/"+ basename +".properties";
+    return "build/test/"+ basename +".properties";
   }
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java
Wed Jul  6 20:45:21 2011
@@ -62,6 +62,29 @@ public class TestUserGroupInformation {
         + "DEFAULT");
     UserGroupInformation.setConfiguration(conf);
   }
+  
+  /** Test login method */
+  @Test
+  public void testLogin() throws Exception {
+    // login from unix
+    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    assertEquals(UserGroupInformation.getCurrentUser(),
+                 UserGroupInformation.getLoginUser());
+    assertTrue(ugi.getGroupNames().length >= 1);
+
+    // ensure that doAs works correctly
+    UserGroupInformation userGroupInfo = 
+      UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
+    UserGroupInformation curUGI = 
+      userGroupInfo.doAs(new PrivilegedExceptionAction<UserGroupInformation>(){
+        public UserGroupInformation run() throws IOException {
+          return UserGroupInformation.getCurrentUser();
+        }});
+    // make sure in the scope of the doAs, the right user is current
+    assertEquals(curUGI, userGroupInfo);
+    // make sure it is not the same as the login user
+    assertFalse(curUGI.equals(UserGroupInformation.getLoginUser()));
+  }
 
   /**
    * given user name - get all the groups.
@@ -107,29 +130,6 @@ public class TestUserGroupInformation {
       }});
   }
 
-  /** Test login method */
-  @Test
-  public void testLogin() throws Exception {
-    // login from unix
-    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-    assertEquals(UserGroupInformation.getCurrentUser(),
-                 UserGroupInformation.getLoginUser());
-    assertTrue(ugi.getGroupNames().length >= 1);
-
-    // ensure that doAs works correctly
-    UserGroupInformation userGroupInfo = 
-      UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
-    UserGroupInformation curUGI = 
-      userGroupInfo.doAs(new PrivilegedExceptionAction<UserGroupInformation>(){
-        public UserGroupInformation run() throws IOException {
-          return UserGroupInformation.getCurrentUser();
-        }});
-    // make sure in the scope of the doAs, the right user is current
-    assertEquals(curUGI, userGroupInfo);
-    // make sure it is not the same as the login user
-    assertFalse(curUGI.equals(UserGroupInformation.getLoginUser()));
-  }
-
   /** test constructor */
   @Test
   public void testConstructor() throws Exception {

Modified: hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/token/delegation/TestDelegationToken.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
(original)
+++ hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
Wed Jul  6 20:45:21 2011
@@ -164,7 +164,7 @@ public class TestDelegationToken {
       action.run();
       Assert.fail("action did not throw " + except);
     } catch (Throwable th) {
-      LOG.info("Caught an exception: " + StringUtils.stringifyException(th));
+      LOG.info("Caught an exception: ", th);
       assertEquals("action threw wrong exception", except, th.getClass());
     }
   }



Mime
View raw message