hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cmcc...@apache.org
Subject svn commit: r1588509 [4/4] - in /hadoop/common/branches/HADOOP-10388/hadoop-common-project: hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/ hadoop-a...
Date Fri, 18 Apr 2014 16:32:44 GMT
Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java Fri Apr 18 16:32:35 2014
@@ -19,12 +19,21 @@ package org.apache.hadoop.crypto.key;
 
 import java.io.File;
 import java.io.IOException;
+import java.net.URI;
 import java.util.List;
+import java.util.UUID;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 import static org.junit.Assert.assertArrayEquals;
@@ -33,8 +42,14 @@ import static org.junit.Assert.assertTru
 
 public class TestKeyProviderFactory {
 
-  private static final File tmpDir =
-      new File(System.getProperty("test.build.data", "/tmp"), "key");
+  private static File tmpDir;
+
+  @Before
+  public void setup() {
+    tmpDir = new File(System.getProperty("test.build.data", "target"),
+        UUID.randomUUID().toString());
+    tmpDir.mkdirs();
+  }
 
   @Test
   public void testFactory() throws Exception {
@@ -193,10 +208,87 @@ public class TestKeyProviderFactory {
     Configuration conf = new Configuration();
     final String ourUrl =
         JavaKeyStoreProvider.SCHEME_NAME + "://file" + tmpDir + "/test.jks";
+
     File file = new File(tmpDir, "test.jks");
     file.delete();
     conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl);
     checkSpecificProvider(conf, ourUrl);
+    Path path = KeyProvider.unnestUri(new URI(ourUrl));
+    FileSystem fs = path.getFileSystem(conf);
+    FileStatus s = fs.getFileStatus(path);
+    assertTrue(s.getPermission().toString().equals("rwx------"));
     assertTrue(file + " should exist", file.isFile());
+
+    // check permission retention after explicit change
+    fs.setPermission(path, new FsPermission("777"));
+    checkPermissionRetention(conf, ourUrl, path);
+  }
+
+  public void checkPermissionRetention(Configuration conf, String ourUrl, Path path) throws Exception {
+    KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
+    // let's add a new key and flush and check that permissions are still set to 777
+    byte[] key = new byte[32];
+    for(int i =0; i < key.length; ++i) {
+      key[i] = (byte) i;
+    }
+    // create a new key
+    try {
+      provider.createKey("key5", key, KeyProvider.options(conf));
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw e;
+    }
+    provider.flush();
+    // get a new instance of the provider to ensure it was saved correctly
+    provider = KeyProviderFactory.getProviders(conf).get(0);
+    assertArrayEquals(key, provider.getCurrentKey("key5").getMaterial());
+
+    FileSystem fs = path.getFileSystem(conf);
+    FileStatus s = fs.getFileStatus(path);
+    assertTrue("Permissions should have been retained from the preexisting keystore.", s.getPermission().toString().equals("rwxrwxrwx"));
   }
+
+  @Test
+  public void testJksProviderPasswordViaConfig() throws Exception {
+    Configuration conf = new Configuration();
+    final String ourUrl =
+        JavaKeyStoreProvider.SCHEME_NAME + "://file" + tmpDir + "/test.jks";
+    File file = new File(tmpDir, "test.jks");
+    file.delete();
+    try {
+      conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl);
+      conf.set(JavaKeyStoreProvider.KEYSTORE_PASSWORD_FILE_KEY,
+          "javakeystoreprovider.password");
+      KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
+      provider.createKey("key3", new byte[32], KeyProvider.options(conf));
+      provider.flush();
+    } catch (Exception ex) {
+      Assert.fail("could not create keystore with password file");
+    }
+    KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
+    Assert.assertNotNull(provider.getCurrentKey("key3"));
+
+    try {
+      conf.set(JavaKeyStoreProvider.KEYSTORE_PASSWORD_FILE_KEY, "bar");
+      KeyProviderFactory.getProviders(conf).get(0);
+      Assert.fail("using non existing password file, it should fail");
+    } catch (IOException ex) {
+      //NOP
+    }
+    try {
+      conf.set(JavaKeyStoreProvider.KEYSTORE_PASSWORD_FILE_KEY, "core-site.xml");
+      KeyProviderFactory.getProviders(conf).get(0);
+      Assert.fail("using different password file, it should fail");
+    } catch (IOException ex) {
+      //NOP
+    }
+    try {
+      conf.unset(JavaKeyStoreProvider.KEYSTORE_PASSWORD_FILE_KEY);
+      KeyProviderFactory.getProviders(conf).get(0);
+      Assert.fail("No password file property, env not set, it should fail");
+    } catch (IOException ex) {
+      //NOP
+    }
+  }
+
 }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Fri Apr 18 16:32:35 2014
@@ -22,23 +22,42 @@ import static org.junit.Assert.*;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.PrintStream;
+import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
+import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
 public class TestKeyShell {
   private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
   private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
-  private static final File tmpDir =
-      new File(System.getProperty("test.build.data", "/tmp"), "key");
-  
+
+  private static File tmpDir;
+
+  private PrintStream initialStdOut;
+  private PrintStream initialStdErr;
+
   @Before
   public void setup() throws Exception {
+    outContent.reset();
+    errContent.reset();
+    tmpDir = new File(System.getProperty("test.build.data", "target"),
+        UUID.randomUUID().toString());
+    tmpDir.mkdirs();
+    initialStdOut = System.out;
+    initialStdErr = System.err;
     System.setOut(new PrintStream(outContent));
     System.setErr(new PrintStream(errContent));
   }
-  
+
+  @After
+  public void cleanUp() throws Exception {
+    System.setOut(initialStdOut);
+    System.setErr(initialStdErr);
+  }
+
   @Test
   public void testKeySuccessfulKeyLifecycle() throws Exception {
     outContent.reset();
@@ -53,13 +72,22 @@ public class TestKeyShell {
     		"created."));
 
     outContent.reset();
-    String[] args2 = {"list", "--provider", 
+    String[] args2 = {"list", "--provider",
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1"));
 
     outContent.reset();
+    String[] args2a = {"list", "--metadata", "--provider",
+                      "jceks://file" + tmpDir + "/keystore.jceks"};
+    rc = ks.run(args2a);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains("key1"));
+    assertTrue(outContent.toString().contains("description"));
+    assertTrue(outContent.toString().contains("created"));
+
+    outContent.reset();
     String[] args3 = {"roll", "key1", "--provider", 
         "jceks://file" + tmpDir + "/keystore.jceks"};
     rc = ks.run(args3);

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java Fri Apr 18 16:32:35 2014
@@ -90,6 +90,10 @@ public abstract class FSMainOperationsBa
   public FSMainOperationsBaseTest() {
   }
   
+  public FSMainOperationsBaseTest(String testRootDir) {
+      super(testRootDir);
+  }
+  
   @Before
   public void setUp() throws Exception {
     fSys = createFileSystem();

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java Fri Apr 18 16:32:35 2014
@@ -49,7 +49,7 @@ public final class FileContextTestHelper
   /**
    * Create a context with the given test root
    */
-  private FileContextTestHelper(String testRootDir) {
+  public FileContextTestHelper(String testRootDir) {
     this.testRootDir = testRootDir;
   }
   

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Fri Apr 18 16:32:35 2014
@@ -52,7 +52,7 @@ public class FileSystemTestHelper {
   /**
    * Create helper with the specified test root dir
    */
-  private FileSystemTestHelper(String testRootDir) {
+  public FileSystemTestHelper(String testRootDir) {
       this.testRootDir = testRootDir;
   }
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java Fri Apr 18 16:32:35 2014
@@ -25,7 +25,6 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.StringReader;
-import java.util.EnumSet;
 import java.util.Random;
 
 import org.apache.hadoop.test.GenericTestUtils;
@@ -48,16 +47,19 @@ public class TestDFVariations {
   }
 
   @Test(timeout=5000)
-  public void testMountAndFileSystem() throws Exception {
+  public void testMount() throws Exception {
     XXDF df = new XXDF();
     String expectedMount =
         Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar";
-    String expectedFileSystem =
-        Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
-
     assertEquals("Invalid mount point",
         expectedMount, df.getMount());
+  }
 
+  @Test(timeout=5000)
+  public void testFileSystem() throws Exception {
+    XXDF df = new XXDF();
+    String expectedFileSystem =
+        Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
     assertEquals("Invalid filesystem",
         expectedFileSystem, df.getFilesystem());
   }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java Fri Apr 18 16:32:35 2014
@@ -589,7 +589,7 @@ public class TestFileUtil {
         // should never happen since that method never throws InterruptedException.      
         assertNull(ie);  
       }
-      assertFalse(notADirectory.canRead());
+      assertFalse(FileUtil.canRead(notADirectory));
       final long du3 = FileUtil.getDU(partitioned);
       assertEquals(expected, du3);
 
@@ -600,7 +600,7 @@ public class TestFileUtil {
         // should never happen since that method never throws InterruptedException.      
         assertNull(ie);  
       }
-      assertFalse(partitioned.canRead());
+      assertFalse(FileUtil.canRead(partitioned));
       final long du4 = FileUtil.getDU(partitioned);
       assertEquals(0, du4);
     } finally {

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java Fri Apr 18 16:32:35 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.io;
 
+import static org.mockito.Mockito.*;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
@@ -31,6 +33,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionInputStream;
@@ -63,39 +66,44 @@ public class TestBloomMapFile extends Te
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(TEST_DIR);
     conf.setInt("io.mapfile.bloom.size", 2048);
-    BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
-        qualifiedDirName.toString(), IntWritable.class, Text.class);
-    IntWritable key = new IntWritable();
-    Text value = new Text();
-    for (int i = 0; i < 2000; i += 2) {
-      key.set(i);
-      value.set("00" + i);
-      writer.append(key, value);
-    }
-    writer.close();
-
-    BloomMapFile.Reader reader = new BloomMapFile.Reader(fs,
-        qualifiedDirName.toString(), conf);
-    // check false positives rate
-    int falsePos = 0;
-    int falseNeg = 0;
-    for (int i = 0; i < 2000; i++) {
-      key.set(i);
-      boolean exists = reader.probablyHasKey(key);
-      if (i % 2 == 0) {
-        if (!exists)
-          falseNeg++;
-      } else {
-        if (exists)
-          falsePos++;
+    BloomMapFile.Writer writer = null;
+    BloomMapFile.Reader reader = null;
+    try {
+      writer = new BloomMapFile.Writer(conf, fs, qualifiedDirName.toString(),
+        IntWritable.class, Text.class);
+      IntWritable key = new IntWritable();
+      Text value = new Text();
+      for (int i = 0; i < 2000; i += 2) {
+        key.set(i);
+        value.set("00" + i);
+        writer.append(key, value);
+      }
+      writer.close();
+
+      reader = new BloomMapFile.Reader(fs, qualifiedDirName.toString(), conf);
+      // check false positives rate
+      int falsePos = 0;
+      int falseNeg = 0;
+      for (int i = 0; i < 2000; i++) {
+        key.set(i);
+        boolean exists = reader.probablyHasKey(key);
+        if (i % 2 == 0) {
+          if (!exists)
+            falseNeg++;
+        } else {
+          if (exists)
+            falsePos++;
+        }
       }
+      reader.close();
+      fs.delete(qualifiedDirName, true);
+      System.out.println("False negatives: " + falseNeg);
+      assertEquals(0, falseNeg);
+      System.out.println("False positives: " + falsePos);
+      assertTrue(falsePos < 2);
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
-    reader.close();
-    fs.delete(qualifiedDirName, true);
-    System.out.println("False negatives: " + falseNeg);
-    assertEquals(0, falseNeg);
-    System.out.println("False positives: " + falsePos);
-    assertTrue(falsePos < 2);
   }
 
   @SuppressWarnings("deprecation")
@@ -103,23 +111,28 @@ public class TestBloomMapFile extends Te
       throws Exception {
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(TEST_DIR);
-    BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
-        qualifiedDirName.toString(), Text.class, NullWritable.class);
-    for (Text key : keys) {
-      writer.append(key, NullWritable.get());
-    }
-    writer.close();
-
-    // will check for membership in the opposite order of how keys were inserted
-    BloomMapFile.Reader reader = new BloomMapFile.Reader(fs,
-        qualifiedDirName.toString(), conf);
-    Collections.reverse(keys);
-    for (Text key : keys) {
-      assertTrue("False negative for existing key " + key,
+    BloomMapFile.Writer writer = null;
+    BloomMapFile.Reader reader = null;
+    try {
+      writer = new BloomMapFile.Writer(conf, fs, qualifiedDirName.toString(),
+        Text.class, NullWritable.class);
+      for (Text key : keys) {
+        writer.append(key, NullWritable.get());
+      }
+      writer.close();
+
+      // will check for membership in opposite order of how keys were inserted
+      reader = new BloomMapFile.Reader(fs, qualifiedDirName.toString(), conf);
+      Collections.reverse(keys);
+      for (Text key : keys) {
+        assertTrue("False negative for existing key " + key,
           reader.probablyHasKey(key));
+      }
+      reader.close();
+      fs.delete(qualifiedDirName, true);
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
-    reader.close();
-    fs.delete(qualifiedDirName, true);
   }
 
   public void testMembershipVaryingSizedKeysTest1() throws Exception {
@@ -140,15 +153,19 @@ public class TestBloomMapFile extends Te
    * test {@code BloomMapFile.delete()} method
    */
   public void testDeleteFile() {
+    BloomMapFile.Writer writer = null;
     try {
       FileSystem fs = FileSystem.getLocal(conf);
-      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+      writer = new BloomMapFile.Writer(conf, TEST_FILE,
           MapFile.Writer.keyClass(IntWritable.class),
           MapFile.Writer.valueClass(Text.class));
       assertNotNull("testDeleteFile error !!!", writer);
-      BloomMapFile.delete(fs, "." + TEST_FILE);
+      writer.close();
+      BloomMapFile.delete(fs, TEST_FILE.toString());
     } catch (Exception ex) {
       fail("unexpect ex in testDeleteFile !!!");
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
   
@@ -157,24 +174,26 @@ public class TestBloomMapFile extends Te
    * IOException
    */
   public void testIOExceptionInWriterConstructor() {
-    Path dirNameSpy = org.mockito.Mockito.spy(TEST_FILE);
+    Path dirNameSpy = spy(TEST_FILE);
+    BloomMapFile.Reader reader = null;
+    BloomMapFile.Writer writer = null;
     try {
-      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+      writer = new BloomMapFile.Writer(conf, TEST_FILE,
           MapFile.Writer.keyClass(IntWritable.class),
           MapFile.Writer.valueClass(Text.class));
       writer.append(new IntWritable(1), new Text("123124142"));
       writer.close();
 
-      org.mockito.Mockito.when(dirNameSpy.getFileSystem(conf)).thenThrow(
-          new IOException());
-      BloomMapFile.Reader reader = new BloomMapFile.Reader(dirNameSpy, conf,
+      when(dirNameSpy.getFileSystem(conf)).thenThrow(new IOException());
+      reader = new BloomMapFile.Reader(dirNameSpy, conf,
           MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
 
       assertNull("testIOExceptionInWriterConstructor error !!!",
           reader.getBloomFilter());
-      reader.close();
     } catch (Exception ex) {
       fail("unexpect ex in testIOExceptionInWriterConstructor !!!");
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
   }
 
@@ -183,8 +202,10 @@ public class TestBloomMapFile extends Te
    */
   public void testGetBloomMapFile() {
     int SIZE = 10;
+    BloomMapFile.Reader reader = null;
+    BloomMapFile.Writer writer = null;
     try {
-      BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+      writer = new BloomMapFile.Writer(conf, TEST_FILE,
           MapFile.Writer.keyClass(IntWritable.class),
           MapFile.Writer.valueClass(Text.class));
 
@@ -193,7 +214,7 @@ public class TestBloomMapFile extends Te
       }
       writer.close();
 
-      BloomMapFile.Reader reader = new BloomMapFile.Reader(TEST_FILE, conf,
+      reader = new BloomMapFile.Reader(TEST_FILE, conf,
           MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
 
       for (int i = 0; i < SIZE; i++) {
@@ -203,9 +224,10 @@ public class TestBloomMapFile extends Te
             
       assertNull("testGetBloomMapFile error !!!",
           reader.get(new IntWritable(SIZE + 5), new Text()));
-      reader.close();
     } catch (Exception ex) {
       fail("unexpect ex in testGetBloomMapFile !!!");
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
   }
 
@@ -214,36 +236,46 @@ public class TestBloomMapFile extends Te
    */
   @SuppressWarnings("deprecation")
   public void testBloomMapFileConstructors() {
+    BloomMapFile.Writer writer = null;
     try {
       FileSystem ts = FileSystem.get(conf);
       String testFileName = TEST_FILE.toString();
-      BloomMapFile.Writer writer1 = new BloomMapFile.Writer(conf, ts,
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
           defaultCodec, defaultProgress);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer1);
-      BloomMapFile.Writer writer2 = new BloomMapFile.Writer(conf, ts,
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
           defaultProgress);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer2);
-      BloomMapFile.Writer writer3 = new BloomMapFile.Writer(conf, ts,
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, IntWritable.class, Text.class, CompressionType.BLOCK);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer3);
-      BloomMapFile.Writer writer4 = new BloomMapFile.Writer(conf, ts,
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
           defaultCodec, defaultProgress);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer4);
-      BloomMapFile.Writer writer5 = new BloomMapFile.Writer(conf, ts,
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
           defaultProgress);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer5);
-      BloomMapFile.Writer writer6 = new BloomMapFile.Writer(conf, ts,
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, IntWritable.class, Text.class, CompressionType.RECORD);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer6);
-      BloomMapFile.Writer writer7 = new BloomMapFile.Writer(conf, ts,
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
+      writer = new BloomMapFile.Writer(conf, ts,
           testFileName, WritableComparator.get(Text.class), Text.class);
-      assertNotNull("testBloomMapFileConstructors error !!!", writer7);
+      assertNotNull("testBloomMapFileConstructors error !!!", writer);
+      writer.close();
     } catch (Exception ex) {
       fail("testBloomMapFileConstructors error !!!");
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
 
@@ -272,13 +304,13 @@ public class TestBloomMapFile extends Te
     @Override
     public CompressionOutputStream createOutputStream(OutputStream out,
         Compressor compressor) throws IOException {
-      return null;
+      return mock(CompressionOutputStream.class);
     }
 
     @Override
     public CompressionOutputStream createOutputStream(OutputStream out)
         throws IOException {
-      return null;
+      return mock(CompressionOutputStream.class);
     }
 
     @Override

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java Fri Apr 18 16:32:35 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.CompressionInputStream;
@@ -68,13 +69,13 @@ public class TestMapFile {
     @Override
     public CompressionOutputStream createOutputStream(OutputStream out)
         throws IOException {
-      return null;
+      return mock(CompressionOutputStream.class);
     }
 
     @Override
     public CompressionOutputStream createOutputStream(OutputStream out,
         Compressor compressor) throws IOException {
-      return null;
+      return mock(CompressionOutputStream.class);
     }
 
     @Override
@@ -138,46 +139,52 @@ public class TestMapFile {
   @Test
   public void testGetClosestOnCurrentApi() throws Exception {
     final String TEST_PREFIX = "testGetClosestOnCurrentApi.mapfile";
-    MapFile.Writer writer = createWriter(TEST_PREFIX, Text.class, Text.class);
-    int FIRST_KEY = 1;
-    // Test keys: 11,21,31,...,91
-    for (int i = FIRST_KEY; i < 100; i += 10) {      
-      Text t = new Text(Integer.toString(i));
-      writer.append(t, t);
-    }
-    writer.close();
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
+    try {
+      writer = createWriter(TEST_PREFIX, Text.class, Text.class);
+      int FIRST_KEY = 1;
+      // Test keys: 11,21,31,...,91
+      for (int i = FIRST_KEY; i < 100; i += 10) {      
+        Text t = new Text(Integer.toString(i));
+        writer.append(t, t);
+      }
+      writer.close();
 
-    MapFile.Reader reader = createReader(TEST_PREFIX, Text.class);
-    Text key = new Text("55");
-    Text value = new Text();
-
-    // Test get closest with step forward
-    Text closest = (Text) reader.getClosest(key, value);
-    assertEquals(new Text("61"), closest);
-
-    // Test get closest with step back
-    closest = (Text) reader.getClosest(key, value, true);
-    assertEquals(new Text("51"), closest);
-
-    // Test get closest when we pass explicit key
-    final Text explicitKey = new Text("21");
-    closest = (Text) reader.getClosest(explicitKey, value);
-    assertEquals(new Text("21"), explicitKey);
-
-    // Test what happens at boundaries. Assert if searching a key that is
-    // less than first key in the mapfile, that the first key is returned.
-    key = new Text("00");
-    closest = (Text) reader.getClosest(key, value);
-    assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
-
-    // Assert that null is returned if key is > last entry in mapfile.
-    key = new Text("92");
-    closest = (Text) reader.getClosest(key, value);
-    assertNull("Not null key in testGetClosestWithNewCode", closest);
-
-    // If we were looking for the key before, we should get the last key
-    closest = (Text) reader.getClosest(key, value, true);
-    assertEquals(new Text("91"), closest);
+      reader = createReader(TEST_PREFIX, Text.class);
+      Text key = new Text("55");
+      Text value = new Text();
+
+      // Test get closest with step forward
+      Text closest = (Text) reader.getClosest(key, value);
+      assertEquals(new Text("61"), closest);
+
+      // Test get closest with step back
+      closest = (Text) reader.getClosest(key, value, true);
+      assertEquals(new Text("51"), closest);
+
+      // Test get closest when we pass explicit key
+      final Text explicitKey = new Text("21");
+      closest = (Text) reader.getClosest(explicitKey, value);
+      assertEquals(new Text("21"), explicitKey);
+
+      // Test what happens at boundaries. Assert if searching a key that is
+      // less than first key in the mapfile, that the first key is returned.
+      key = new Text("00");
+      closest = (Text) reader.getClosest(key, value);
+      assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
+
+      // Assert that null is returned if key is > last entry in mapfile.
+      key = new Text("92");
+      closest = (Text) reader.getClosest(key, value);
+      assertNull("Not null key in testGetClosestWithNewCode", closest);
+
+      // If we were looking for the key before, we should get the last key
+      closest = (Text) reader.getClosest(key, value, true);
+      assertEquals(new Text("91"), closest);
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
+    }
   }
   
   /**
@@ -187,16 +194,21 @@ public class TestMapFile {
   public void testMidKeyOnCurrentApi() throws Exception {
     // Write a mapfile of simple data: keys are
     final String TEST_PREFIX = "testMidKeyOnCurrentApi.mapfile";
-    MapFile.Writer writer = createWriter(TEST_PREFIX, IntWritable.class,
-        IntWritable.class);
-    // 0,1,....9
-    int SIZE = 10;
-    for (int i = 0; i < SIZE; i++)
-      writer.append(new IntWritable(i), new IntWritable(i));
-    writer.close();
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
+    try {
+      writer = createWriter(TEST_PREFIX, IntWritable.class, IntWritable.class);
+      // 0,1,....9
+      int SIZE = 10;
+      for (int i = 0; i < SIZE; i++)
+        writer.append(new IntWritable(i), new IntWritable(i));
+      writer.close();
 
-    MapFile.Reader reader = createReader(TEST_PREFIX, IntWritable.class);
-    assertEquals(new IntWritable((SIZE - 1) / 2), reader.midKey());
+      reader = createReader(TEST_PREFIX, IntWritable.class);
+      assertEquals(new IntWritable((SIZE - 1) / 2), reader.midKey());
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
+    }
   }
   
   /**
@@ -206,16 +218,18 @@ public class TestMapFile {
   public void testRename() {
     final String NEW_FILE_NAME = "test-new.mapfile";
     final String OLD_FILE_NAME = "test-old.mapfile";
+    MapFile.Writer writer = null;
     try {
       FileSystem fs = FileSystem.getLocal(conf);
-      MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
-          IntWritable.class);
+      writer = createWriter(OLD_FILE_NAME, IntWritable.class, IntWritable.class);
       writer.close();
       MapFile.rename(fs, new Path(TEST_DIR, OLD_FILE_NAME).toString(), 
           new Path(TEST_DIR, NEW_FILE_NAME).toString());
       MapFile.delete(fs, new Path(TEST_DIR, NEW_FILE_NAME).toString());
     } catch (IOException ex) {
       fail("testRename error " + ex);
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
   
@@ -228,12 +242,12 @@ public class TestMapFile {
     final String ERROR_MESSAGE = "Can't rename file";
     final String NEW_FILE_NAME = "test-new.mapfile";
     final String OLD_FILE_NAME = "test-old.mapfile";
+    MapFile.Writer writer = null;
     try {
       FileSystem fs = FileSystem.getLocal(conf);
       FileSystem spyFs = spy(fs);
 
-      MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
-          IntWritable.class);
+      writer = createWriter(OLD_FILE_NAME, IntWritable.class, IntWritable.class);
       writer.close();
 
       Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME);
@@ -246,6 +260,8 @@ public class TestMapFile {
     } catch (IOException ex) {
       assertEquals("testRenameWithException invalid IOExceptionMessage !!!",
           ex.getMessage(), ERROR_MESSAGE);
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
 
@@ -254,12 +270,12 @@ public class TestMapFile {
     final String ERROR_MESSAGE = "Could not rename";
     final String NEW_FILE_NAME = "test-new.mapfile";
     final String OLD_FILE_NAME = "test-old.mapfile";
+    MapFile.Writer writer = null;
     try {
       FileSystem fs = FileSystem.getLocal(conf);
       FileSystem spyFs = spy(fs);
 
-      MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
-          IntWritable.class);
+      writer = createWriter(OLD_FILE_NAME, IntWritable.class, IntWritable.class);
       writer.close();
 
       Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME);
@@ -271,6 +287,8 @@ public class TestMapFile {
     } catch (IOException ex) {
       assertTrue("testRenameWithFalse invalid IOExceptionMessage error !!!", ex
           .getMessage().startsWith(ERROR_MESSAGE));
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
   
@@ -297,11 +315,7 @@ public class TestMapFile {
       assertTrue("testWriteWithFailDirCreation ex error !!!", ex.getMessage()
           .startsWith(ERROR_MESSAGE));
     } finally {
-      if (writer != null)
-        try {
-          writer.close();
-        } catch (IOException e) {
-        }
+      IOUtils.cleanup(null, writer);
     }
   }
 
@@ -312,20 +326,24 @@ public class TestMapFile {
   public void testOnFinalKey() {
     final String TEST_METHOD_KEY = "testOnFinalKey.mapfile";
     int SIZE = 10;
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
     try {
-      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
-          IntWritable.class);
+      writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+        IntWritable.class);
       for (int i = 0; i < SIZE; i++)
         writer.append(new IntWritable(i), new IntWritable(i));
       writer.close();
 
-      MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class);
+      reader = createReader(TEST_METHOD_KEY, IntWritable.class);
       IntWritable expectedKey = new IntWritable(0);
       reader.finalKey(expectedKey);
       assertEquals("testOnFinalKey not same !!!", expectedKey, new IntWritable(
           9));
     } catch (IOException ex) {
       fail("testOnFinalKey error !!!");
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
   }
   
@@ -338,7 +356,8 @@ public class TestMapFile {
     Class<? extends WritableComparable<?>> keyClass = IntWritable.class;
     Class<?> valueClass = Text.class;
     try {
-      createWriter("testKeyValueClasses.mapfile", IntWritable.class, Text.class);
+      createWriter("testKeyValueClasses.mapfile", IntWritable.class, Text.class)
+        .close();
       assertNotNull("writer key class null error !!!",
           MapFile.Writer.keyClass(keyClass));
       assertNotNull("writer value class null error !!!",
@@ -354,19 +373,22 @@ public class TestMapFile {
   @Test
   public void testReaderGetClosest() throws Exception {
     final String TEST_METHOD_KEY = "testReaderWithWrongKeyClass.mapfile";
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
     try {
-      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
-          Text.class);
+      writer = createWriter(TEST_METHOD_KEY, IntWritable.class, Text.class);
 
       for (int i = 0; i < 10; i++)
         writer.append(new IntWritable(i), new Text("value" + i));
       writer.close();
 
-      MapFile.Reader reader = createReader(TEST_METHOD_KEY, Text.class);
+      reader = createReader(TEST_METHOD_KEY, Text.class);
       reader.getClosest(new Text("2"), new Text(""));
       fail("no excepted exception in testReaderWithWrongKeyClass !!!");
     } catch (IOException ex) {
       /* Should be thrown to pass the test */
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
   }
   
@@ -376,13 +398,15 @@ public class TestMapFile {
   @Test
   public void testReaderWithWrongValueClass() {
     final String TEST_METHOD_KEY = "testReaderWithWrongValueClass.mapfile";
+    MapFile.Writer writer = null;
     try {
-      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
-          Text.class);
+      writer = createWriter(TEST_METHOD_KEY, IntWritable.class, Text.class);
       writer.append(new IntWritable(0), new IntWritable(0));
       fail("no excepted exception in testReaderWithWrongKeyClass !!!");
     } catch (IOException ex) {
       /* Should be thrown to pass the test */
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
   
@@ -394,15 +418,16 @@ public class TestMapFile {
     final String TEST_METHOD_KEY = "testReaderKeyIteration.mapfile";
     int SIZE = 10;
     int ITERATIONS = 5;
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
     try {
-      MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
-          Text.class);
+      writer = createWriter(TEST_METHOD_KEY, IntWritable.class, Text.class);
       int start = 0;
       for (int i = 0; i < SIZE; i++)
         writer.append(new IntWritable(i), new Text("Value:" + i));
       writer.close();
 
-      MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class);
+      reader = createReader(TEST_METHOD_KEY, IntWritable.class);
       // test iteration
       Writable startValue = new Text("Value:" + start);
       int i = 0;
@@ -421,6 +446,8 @@ public class TestMapFile {
           reader.seek(new IntWritable(SIZE * 2)));
     } catch (IOException ex) {
       fail("reader seek error !!!");
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
   }
 
@@ -431,11 +458,11 @@ public class TestMapFile {
   public void testFix() {
     final String INDEX_LESS_MAP_FILE = "testFix.mapfile";
     int PAIR_SIZE = 20;
+    MapFile.Writer writer = null;
     try {
       FileSystem fs = FileSystem.getLocal(conf);
       Path dir = new Path(TEST_DIR, INDEX_LESS_MAP_FILE);
-      MapFile.Writer writer = createWriter(INDEX_LESS_MAP_FILE,
-          IntWritable.class, Text.class);
+      writer = createWriter(INDEX_LESS_MAP_FILE, IntWritable.class, Text.class);
       for (int i = 0; i < PAIR_SIZE; i++)
         writer.append(new IntWritable(0), new Text("value"));
       writer.close();
@@ -450,6 +477,8 @@ public class TestMapFile {
             MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE);
     } catch (Exception ex) {
       fail("testFix error !!!");
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
   /**
@@ -459,38 +488,46 @@ public class TestMapFile {
   @SuppressWarnings("deprecation")
   public void testDeprecatedConstructors() {
     String path = new Path(TEST_DIR, "writes.mapfile").toString();
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
     try {
       FileSystem fs = FileSystem.getLocal(conf);
-      MapFile.Writer writer = new MapFile.Writer(conf, fs, path,
+      writer = new MapFile.Writer(conf, fs, path,
           IntWritable.class, Text.class, CompressionType.RECORD);
       assertNotNull(writer);
+      writer.close();
       writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
           Text.class, CompressionType.RECORD, defaultProgressable);
       assertNotNull(writer);
+      writer.close();
       writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
           Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable);
       assertNotNull(writer);
+      writer.close();
       writer = new MapFile.Writer(conf, fs, path,
           WritableComparator.get(Text.class), Text.class);
       assertNotNull(writer);
+      writer.close();
       writer = new MapFile.Writer(conf, fs, path,
           WritableComparator.get(Text.class), Text.class,
           SequenceFile.CompressionType.RECORD);
       assertNotNull(writer);
+      writer.close();
       writer = new MapFile.Writer(conf, fs, path,
           WritableComparator.get(Text.class), Text.class,
           CompressionType.RECORD, defaultProgressable);
       assertNotNull(writer);
       writer.close();
 
-      MapFile.Reader reader = new MapFile.Reader(fs, path,
+      reader = new MapFile.Reader(fs, path,
           WritableComparator.get(IntWritable.class), conf);
       assertNotNull(reader);
       assertNotNull("reader key is null !!!", reader.getKeyClass());
       assertNotNull("reader value in null", reader.getValueClass());
-
     } catch (IOException e) {
       fail(e.getMessage());
+    } finally {
+      IOUtils.cleanup(null, writer, reader);
     }
   }
   
@@ -509,11 +546,7 @@ public class TestMapFile {
     } catch (Exception e) {
       fail("fail in testKeyLessWriterCreation. Other ex !!!");
     } finally {
-      if (writer != null)
-        try {
-          writer.close();
-        } catch (IOException e) {
-        }
+      IOUtils.cleanup(null, writer);
     }
   }
   /**
@@ -542,11 +575,7 @@ public class TestMapFile {
     } catch (Exception e) {
       fail("fail in testPathExplosionWriterCreation. Other ex !!!");
     } finally {
-      if (writer != null)
-        try {
-          writer.close();
-        } catch (IOException e) {
-        }
+      IOUtils.cleanup(null, writer);
     }
   }
 
@@ -555,9 +584,9 @@ public class TestMapFile {
    */
   @Test
   public void testDescOrderWithThrowExceptionWriterAppend() {
+    MapFile.Writer writer = null;
     try {
-      MapFile.Writer writer = createWriter(".mapfile", IntWritable.class,
-          Text.class);
+      writer = createWriter(".mapfile", IntWritable.class, Text.class);
       writer.append(new IntWritable(2), new Text("value: " + 1));
       writer.append(new IntWritable(2), new Text("value: " + 2));
       writer.append(new IntWritable(2), new Text("value: " + 4));
@@ -566,6 +595,8 @@ public class TestMapFile {
     } catch (IOException ex) {
     } catch (Exception e) {
       fail("testDescOrderWithThrowExceptionWriterAppend other ex throw !!!");
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
 
@@ -575,15 +606,17 @@ public class TestMapFile {
     String inFile = "mainMethodMapFile.mapfile";
     String outFile = "mainMethodMapFile.mapfile";
     String[] args = { path, outFile };
+    MapFile.Writer writer = null;
     try {
-      MapFile.Writer writer = createWriter(inFile, IntWritable.class,
-          Text.class);
+      writer = createWriter(inFile, IntWritable.class, Text.class);
       writer.append(new IntWritable(1), new Text("test_text1"));
       writer.append(new IntWritable(2), new Text("test_text2"));
       writer.close();
       MapFile.main(args);
     } catch (Exception ex) {
       fail("testMainMethodMapFile error !!!");
+    } finally {
+      IOUtils.cleanup(null, writer);
     }
   }
 
@@ -601,56 +634,58 @@ public class TestMapFile {
     Path qualifiedDirName = fs.makeQualified(dirName);
     // Make an index entry for every third insertion.
     MapFile.Writer.setIndexInterval(conf, 3);
-    MapFile.Writer writer = new MapFile.Writer(conf, fs,
-        qualifiedDirName.toString(), Text.class, Text.class);
-    // Assert that the index interval is 1
-    assertEquals(3, writer.getIndexInterval());
-    // Add entries up to 100 in intervals of ten.
-    final int FIRST_KEY = 10;
-    for (int i = FIRST_KEY; i < 100; i += 10) {
-      String iStr = Integer.toString(i);
-      Text t = new Text("00".substring(iStr.length()) + iStr);
-      writer.append(t, t);
-    }
-    writer.close();
-    // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
     try {
-    Text key = new Text("55");
-    Text value = new Text();
-    Text closest = (Text) reader.getClosest(key, value);
-    // Assert that closest after 55 is 60
-    assertEquals(new Text("60"), closest);
-    // Get closest that falls before the passed key: 50
-    closest = (Text) reader.getClosest(key, value, true);
-    assertEquals(new Text("50"), closest);
-    // Test get closest when we pass explicit key
-    final Text TWENTY = new Text("20");
-    closest = (Text) reader.getClosest(TWENTY, value);
-    assertEquals(TWENTY, closest);
-    closest = (Text) reader.getClosest(TWENTY, value, true);
-    assertEquals(TWENTY, closest);
-    // Test what happens at boundaries. Assert if searching a key that is
-    // less than first key in the mapfile, that the first key is returned.
-    key = new Text("00");
-    closest = (Text) reader.getClosest(key, value);
-    assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
-
-    // If we're looking for the first key before, and we pass in a key before
-    // the first key in the file, we should get null
-    closest = (Text) reader.getClosest(key, value, true);
-    assertNull(closest);
-
-    // Assert that null is returned if key is > last entry in mapfile.
-    key = new Text("99");
-    closest = (Text) reader.getClosest(key, value);
-    assertNull(closest);
-
-    // If we were looking for the key before, we should get the last key
-    closest = (Text) reader.getClosest(key, value, true);
-    assertEquals(new Text("90"), closest);
+      writer = new MapFile.Writer(conf, fs, qualifiedDirName.toString(),
+        Text.class, Text.class);
+      // Assert that the index interval is 1
+      assertEquals(3, writer.getIndexInterval());
+      // Add entries up to 100 in intervals of ten.
+      final int FIRST_KEY = 10;
+      for (int i = FIRST_KEY; i < 100; i += 10) {
+        String iStr = Integer.toString(i);
+        Text t = new Text("00".substring(iStr.length()) + iStr);
+        writer.append(t, t);
+      }
+      writer.close();
+      // Now do getClosest on created mapfile.
+      reader = new MapFile.Reader(qualifiedDirName, conf);
+      Text key = new Text("55");
+      Text value = new Text();
+      Text closest = (Text) reader.getClosest(key, value);
+      // Assert that closest after 55 is 60
+      assertEquals(new Text("60"), closest);
+      // Get closest that falls before the passed key: 50
+      closest = (Text) reader.getClosest(key, value, true);
+      assertEquals(new Text("50"), closest);
+      // Test get closest when we pass explicit key
+      final Text TWENTY = new Text("20");
+      closest = (Text) reader.getClosest(TWENTY, value);
+      assertEquals(TWENTY, closest);
+      closest = (Text) reader.getClosest(TWENTY, value, true);
+      assertEquals(TWENTY, closest);
+      // Test what happens at boundaries. Assert if searching a key that is
+      // less than first key in the mapfile, that the first key is returned.
+      key = new Text("00");
+      closest = (Text) reader.getClosest(key, value);
+      assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
+
+      // If we're looking for the first key before, and we pass in a key before
+      // the first key in the file, we should get null
+      closest = (Text) reader.getClosest(key, value, true);
+      assertNull(closest);
+
+      // Assert that null is returned if key is > last entry in mapfile.
+      key = new Text("99");
+      closest = (Text) reader.getClosest(key, value);
+      assertNull(closest);
+
+      // If we were looking for the key before, we should get the last key
+      closest = (Text) reader.getClosest(key, value, true);
+      assertEquals(new Text("90"), closest);
     } finally {
-      reader.close();
+      IOUtils.cleanup(null, writer, reader);
     }
   }
 
@@ -662,16 +697,18 @@ public class TestMapFile {
     FileSystem fs = FileSystem.getLocal(conf);
     Path qualifiedDirName = fs.makeQualified(dirName);
 
-    MapFile.Writer writer = new MapFile.Writer(conf, fs,
-        qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
-    writer.append(new IntWritable(1), new IntWritable(1));
-    writer.close();
-    // Now do getClosest on created mapfile.
-    MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+    MapFile.Writer writer = null;
+    MapFile.Reader reader = null;
     try {
+      writer = new MapFile.Writer(conf, fs, qualifiedDirName.toString(),
+        IntWritable.class, IntWritable.class);
+      writer.append(new IntWritable(1), new IntWritable(1));
+      writer.close();
+      // Now do getClosest on created mapfile.
+      reader = new MapFile.Reader(qualifiedDirName, conf);
       assertEquals(new IntWritable(1), reader.midKey());
     } finally {
-      reader.close();
+      IOUtils.cleanup(null, writer, reader);
     }
   }
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestCallQueueManager.java Fri Apr 18 16:32:35 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.ipc;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -49,7 +48,7 @@ public class TestCallQueueManager {
     public volatile int callsAdded = 0; // How many calls we added, accurate unless interrupted
     private final int maxCalls;
 
-    private boolean isRunning = true;
+    private volatile boolean isRunning = true;
 
     public Putter(CallQueueManager<FakeCall> aCq, int maxCalls, int tag) {
       this.maxCalls = maxCalls;
@@ -201,16 +200,22 @@ public class TestCallQueueManager {
 
     // Ensure no calls were dropped
     long totalCallsCreated = 0;
-    long totalCallsConsumed = 0;
-
     for (Putter p : producers) {
-      totalCallsCreated += p.callsAdded;
       threads.get(p).interrupt();
     }
+    for (Putter p : producers) {
+      threads.get(p).join();
+      totalCallsCreated += p.callsAdded;
+    }
+    
+    long totalCallsConsumed = 0;
     for (Taker t : consumers) {
-      totalCallsConsumed += t.callsTaken;
       threads.get(t).interrupt();
     }
+    for (Taker t : consumers) {
+      threads.get(t).join();
+      totalCallsConsumed += t.callsTaken;
+    }
 
     assertEquals(totalCallsConsumed, totalCallsCreated);
   }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java Fri Apr 18 16:32:35 2014
@@ -60,11 +60,12 @@ public class TestGangliaMetrics {
   @Test
   public void testTagsForPrefix() throws Exception {
     ConfigBuilder cb = new ConfigBuilder()
-      .add("test.sink.ganglia.tagsForPrefix.all", "*")
-      .add("test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, NumActiveSources")
-      .add("test.sink.ganglia.tagsForPrefix.none", "");
+      .add("Test.sink.ganglia.tagsForPrefix.all", "*")
+      .add("Test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " +
+              "NumActiveSources")
+      .add("Test.sink.ganglia.tagsForPrefix.none", "");
     GangliaSink30 sink = new GangliaSink30();
-    sink.init(cb.subset("test.sink.ganglia"));
+    sink.init(cb.subset("Test.sink.ganglia"));
 
     List<MetricsTag> tags = new ArrayList<MetricsTag>();
     tags.add(new MetricsTag(MsInfo.Context, "all"));
@@ -97,8 +98,8 @@ public class TestGangliaMetrics {
   
   @Test public void testGangliaMetrics2() throws Exception {
     ConfigBuilder cb = new ConfigBuilder().add("default.period", 10)
-        .add("test.sink.gsink30.context", "test") // filter out only "test"
-        .add("test.sink.gsink31.context", "test") // filter out only "test"
+        .add("Test.sink.gsink30.context", "test") // filter out only "test"
+        .add("Test.sink.gsink31.context", "test") // filter out only "test"
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
 
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java Fri Apr 18 16:32:35 2014
@@ -88,11 +88,11 @@ public class TestMetricsSystemImpl {
     DefaultMetricsSystem.shutdown();
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
-        .add("test.sink.test.class", TestSink.class.getName())
-        .add("test.*.source.filter.exclude", "s0")
-        .add("test.source.s1.metric.filter.exclude", "X*")
-        .add("test.sink.sink1.metric.filter.exclude", "Y*")
-        .add("test.sink.sink2.metric.filter.exclude", "Y*")
+        .add("Test.sink.test.class", TestSink.class.getName())
+        .add("Test.*.source.filter.exclude", "s0")
+        .add("Test.source.s1.metric.filter.exclude", "X*")
+        .add("Test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("Test.sink.sink2.metric.filter.exclude", "Y*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -130,11 +130,11 @@ public class TestMetricsSystemImpl {
     DefaultMetricsSystem.shutdown(); 
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
-        .add("test.sink.test.class", TestSink.class.getName())
-        .add("test.*.source.filter.exclude", "s0")
-        .add("test.source.s1.metric.filter.exclude", "X*")
-        .add("test.sink.sink1.metric.filter.exclude", "Y*")
-        .add("test.sink.sink2.metric.filter.exclude", "Y*")
+        .add("Test.sink.test.class", TestSink.class.getName())
+        .add("Test.*.source.filter.exclude", "s0")
+        .add("Test.source.s1.metric.filter.exclude", "X*")
+        .add("Test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("Test.sink.sink2.metric.filter.exclude", "Y*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -167,12 +167,13 @@ public class TestMetricsSystemImpl {
   }
   
   @Test public void testMultiThreadedPublish() throws Exception {
+    final int numThreads = 10;
     new ConfigBuilder().add("*.period", 80)
-      .add("test.sink.Collector.queue.capacity", "20")
+      .add("Test.sink.Collector." + MetricsConfig.QUEUE_CAPACITY_KEY,
+              numThreads)
       .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     final MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
-    final int numThreads = 10;
     final CollectingSink sink = new CollectingSink(numThreads);
     ms.registerSink("Collector",
         "Collector of values from all threads.", sink);
@@ -279,10 +280,10 @@ public class TestMetricsSystemImpl {
 
   @Test public void testHangingSink() {
     new ConfigBuilder().add("*.period", 8)
-      .add("test.sink.test.class", TestSink.class.getName())
-      .add("test.sink.hanging.retry.delay", "1")
-      .add("test.sink.hanging.retry.backoff", "1.01")
-      .add("test.sink.hanging.retry.count", "0")
+      .add("Test.sink.test.class", TestSink.class.getName())
+      .add("Test.sink.hanging.retry.delay", "1")
+      .add("Test.sink.hanging.retry.backoff", "1.01")
+      .add("Test.sink.hanging.retry.count", "0")
       .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java Fri Apr 18 16:32:35 2014
@@ -105,11 +105,18 @@ public class TestFileSink {
     ms.publishMetricsNow(); // publish the metrics
     ms.stop();
     ms.shutdown();
-    
-    InputStream is = new FileInputStream(outFile);
-    ByteArrayOutputStream baos = new ByteArrayOutputStream((int)outFile.length());
-    IOUtils.copyBytes(is, baos, 1024, true);
-    String outFileContent = new String(baos.toByteArray(), "UTF-8");
+
+    InputStream is = null;
+    ByteArrayOutputStream baos = null;
+    String outFileContent = null;
+    try {
+      is = new FileInputStream(outFile);
+      baos = new ByteArrayOutputStream((int)outFile.length());
+      IOUtils.copyBytes(is, baos, 1024, true);
+      outFileContent = new String(baos.toByteArray(), "UTF-8");
+    } finally {
+      IOUtils.cleanup(null, baos, is);
+    }
 
     // Check the out file content. Should be something like the following:
     //1360244820087 test1.testRecord1: Context=test1, testTag1=testTagValue1, testTag2=testTagValue2, Hostname=myhost, testMetric1=1, testMetric2=2

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java Fri Apr 18 16:32:35 2014
@@ -74,8 +74,9 @@ public class TestDoAsEffectiveUser {
   }
 
   @Before
-  public void setMasterConf() {
+  public void setMasterConf() throws IOException {
     UserGroupInformation.setConfiguration(masterConf);
+    refreshConf(masterConf);
   }
 
   private void configureSuperUserIPAddresses(Configuration conf,
@@ -297,6 +298,8 @@ public class TestDoAsEffectiveUser {
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
         .setNumHandlers(2).setVerbose(false).build();
 
+    refreshConf(conf);
+
     try {
       server.start();
 
@@ -379,6 +382,8 @@ public class TestDoAsEffectiveUser {
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
         .setNumHandlers(2).setVerbose(false).build();
     
+    refreshConf(conf);
+
     try {
       server.start();
 

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Fri Apr 18 16:32:35 2014
@@ -19,6 +19,7 @@ package org.apache.hadoop.security.autho
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.Collection;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -223,9 +224,54 @@ public class TestProxyUsers {
     assertNotAuthorized(proxyUserUgi, "1.2.3.5");
   }
 
+  @Test
+  public void testWithDuplicateProxyGroups() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES)));
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      PROXY_IP);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    
+    Collection<String> groupsToBeProxied = ProxyUsers.getProxyGroups().get(
+        ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+    
+    assertEquals (1,groupsToBeProxied.size());
+  }
+  
+  @Test
+  public void testWithDuplicateProxyHosts() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP)));
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    
+    Collection<String> hosts = ProxyUsers.getProxyHosts().get(
+        ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME));
+    
+    assertEquals (1,hosts.size());
+  }
+
+  @Test
+  public void testProxyServer() {
+    Configuration conf = new Configuration();
+    assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
+    conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3");
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
+    assertTrue(ProxyUsers.isProxyServer("2.2.2.2"));
+    assertTrue(ProxyUsers.isProxyServer("3.3.3.3"));
+  }
+
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
-      ProxyUsers.authorize(proxyUgi, host, null);
+      ProxyUsers.authorize(proxyUgi, host);
       fail("Allowed authorization of " + proxyUgi + " from " + host);
     } catch (AuthorizationException e) {
       // Expected
@@ -234,7 +280,7 @@ public class TestProxyUsers {
   
   private void assertAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
-      ProxyUsers.authorize(proxyUgi, host, null);
+      ProxyUsers.authorize(proxyUgi, host);
     } catch (AuthorizationException e) {
       fail("Did not allowed authorization of " + proxyUgi + " from " + host);
     }

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java Fri Apr 18 16:32:35 2014
@@ -22,9 +22,12 @@ import static org.apache.hadoop.util.Str
 import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long;
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -401,6 +404,14 @@ public class TestStringUtils extends Uni
       "begin %foo%_%bar%_%baz% end", pattern, replacements));
   }
 
+  @Test 
+  public void testGetUniqueNonEmptyTrimmedStrings (){
+    final String TO_SPLIT = ",foo, bar,baz,,blah,blah,bar,";
+    Collection<String> col = StringUtils.getTrimmedStringCollection(TO_SPLIT);
+    assertEquals(4, col.size());
+    assertTrue(col.containsAll(Arrays.asList(new String[]{"foo","bar","baz","blah"})));
+  }
+
   // Benchmark for StringUtils split
   public static void main(String []args) {
     final String TO_SPLIT = "foo,bar,baz,blah,blah";

Modified: hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java?rev=1588509&r1=1588508&r2=1588509&view=diff
==============================================================================
--- hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java (original)
+++ hadoop/common/branches/HADOOP-10388/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java Fri Apr 18 16:32:35 2014
@@ -273,6 +273,85 @@ public class TestWinUtils {
     assertTrue(aExe.delete());
   }
 
+  /** Validate behavior of chmod commands on directories on Windows. */
+  @Test (timeout = 30000)
+  public void testBasicChmodOnDir() throws IOException {
+    // Validate that listing a directory with no read permission fails
+    File a = new File(TEST_DIR, "a");
+    File b = new File(a, "b");
+    a.mkdirs();
+    assertTrue(b.createNewFile());
+
+    // Remove read permissions on directory a
+    chmod("300", a);
+    String[] files = a.list();
+    assertTrue("Listing a directory without read permission should fail",
+        null == files);
+
+    // restore permissions
+    chmod("700", a);
+    // validate that the directory can be listed now
+    files = a.list();
+    assertEquals("b", files[0]);
+
+    // Remove write permissions on the directory and validate the
+    // behavior for adding, deleting and renaming files
+    chmod("500", a);
+    File c = new File(a, "c");
+ 
+    try {
+      // Adding a new file will fail as expected because the
+      // FILE_WRITE_DATA/FILE_ADD_FILE privilege is denied on
+      // the dir.
+      c.createNewFile();
+      assertFalse("writeFile should have failed!", true);
+    } catch (IOException ex) {
+      LOG.info("Expected: Failed to create a file when directory "
+          + "permissions are 577");
+    }
+
+    // Deleting a file will succeed even if write permissions are not present
+    // on the parent dir. Check the following link for additional details:
+    // http://support.microsoft.com/kb/238018
+    assertTrue("Special behavior: deleting a file will succeed on Windows "
+        + "even if a user does not have write permissions on the parent dir",
+        b.delete());
+
+    assertFalse("Renaming a file should fail on the dir where a user does "
+        + "not have write permissions", b.renameTo(new File(a, "d")));
+
+    // restore permissions
+    chmod("700", a);
+
+    // Make sure adding new files and rename succeeds now
+    assertTrue(c.createNewFile());
+    File d = new File(a, "d");
+    assertTrue(c.renameTo(d));
+    // at this point in the test, d is the only remaining file in directory a
+
+    // Removing execute permissions does not have the same behavior on
+    // Windows as on Linux. Adding, renaming, deleting and listing files
+    // will still succeed. Windows default behavior is to bypass directory
+    // traverse checking (BYPASS_TRAVERSE_CHECKING privilege) for all users.
+    // See the following link for additional details:
+    // http://msdn.microsoft.com/en-us/library/windows/desktop/aa364399(v=vs.85).aspx
+    chmod("600", a);
+
+    // validate directory listing
+    files = a.list();
+    assertEquals("d", files[0]);
+    // validate delete
+    assertTrue(d.delete());
+    // validate add
+    File e = new File(a, "e");
+    assertTrue(e.createNewFile());
+    // validate rename
+    assertTrue(e.renameTo(new File(a, "f")));
+
+    // restore permissions
+    chmod("700", a);
+  }
+
   @Test (timeout = 30000)
   public void testChmod() throws IOException {
     testChmodInternal("7", "-------rwx");



Mime
View raw message