hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From su...@apache.org
Subject svn commit: r1613514 [3/4] - in /hadoop/common/branches/YARN-1051/hadoop-common-project: hadoop-auth/ hadoop-common/ hadoop-common/src/main/bin/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/crypto/key/ hadoop-common/src/ma...
Date Fri, 25 Jul 2014 20:33:18 GMT
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java Fri Jul 25 20:33:09 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.viewfs;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.ArrayList;
 import java.util.List;
 
 
@@ -28,9 +29,16 @@ import org.apache.hadoop.fs.BlockLocatio
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
 import static org.apache.hadoop.fs.FileSystemTestHelper.*;
+import org.apache.hadoop.fs.permission.AclEntry;
+import static org.apache.hadoop.fs.viewfs.Constants.PERMISSION_555;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.AclUtil;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.viewfs.ConfigUtil;
@@ -38,6 +46,7 @@ import org.apache.hadoop.fs.viewfs.ViewF
 import org.apache.hadoop.fs.viewfs.ViewFileSystem.MountPoint;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.junit.After;
 import org.junit.Assert;
@@ -96,7 +105,6 @@ public class ViewFileSystemBaseTest {
     // in the test root
     
     // Set up the defaultMT in the config with our mount point links
-    //Configuration conf = new Configuration();
     conf = ViewFileSystemTestSetup.createConfig();
     setupMountPoints();
     fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf);
@@ -720,4 +728,79 @@ public class ViewFileSystemBaseTest {
     Assert.assertTrue("Other-readable permission not set!",
         perms.getOtherAction().implies(FsAction.READ));
   }
+
+  /**
+   * Verify the behavior of ACL operations on paths above the root of
+   * any mount table entry.
+   */
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalModifyAclEntries() throws IOException {
+    fsView.modifyAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAclEntries() throws IOException {
+    fsView.removeAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveDefaultAcl() throws IOException {
+    fsView.removeDefaultAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAcl() throws IOException {
+    fsView.removeAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetAcl() throws IOException {
+    fsView.setAcl(new Path("/internalDir"), new ArrayList<AclEntry>());
+  }
+
+  @Test
+  public void testInternalGetAclStatus() throws IOException {
+    final UserGroupInformation currentUser =
+        UserGroupInformation.getCurrentUser();
+    AclStatus aclStatus = fsView.getAclStatus(new Path("/internalDir"));
+    assertEquals(aclStatus.getOwner(), currentUser.getUserName());
+    assertEquals(aclStatus.getGroup(), currentUser.getGroupNames()[0]);
+    assertEquals(aclStatus.getEntries(),
+        AclUtil.getMinimalAcl(PERMISSION_555));
+    assertFalse(aclStatus.isStickyBit());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetXAttr() throws IOException {
+    fsView.setXAttr(new Path("/internalDir"), "xattrName", null);
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttr() throws IOException {
+    fsView.getXAttr(new Path("/internalDir"), "xattrName");
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrs() throws IOException {
+    fsView.getXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrsWithNames() throws IOException {
+    fsView.getXAttrs(new Path("/internalDir"), new ArrayList<String>());
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalListXAttr() throws IOException {
+    fsView.listXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveXAttr() throws IOException {
+    fsView.removeXAttr(new Path("/internalDir"), "xattrName");
+  }
+
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsBaseTest.java Fri Jul 25 20:33:09 2014
@@ -22,10 +22,14 @@ import static org.apache.hadoop.fs.FileC
 import static org.apache.hadoop.fs.FileContextTestHelper.exists;
 import static org.apache.hadoop.fs.FileContextTestHelper.isDir;
 import static org.apache.hadoop.fs.FileContextTestHelper.isFile;
+import static org.apache.hadoop.fs.viewfs.Constants.PERMISSION_555;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -39,8 +43,12 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.UnresolvedLinkException;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.AclUtil;
 import org.apache.hadoop.fs.viewfs.ViewFs.MountPoint;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.junit.After;
 import org.junit.Assert;
@@ -695,4 +703,78 @@ public class ViewFsBaseTest {
   public void testInternalSetOwner() throws IOException {
     fcView.setOwner(new Path("/internalDir"), "foo", "bar");
   }
+
+  /**
+   * Verify the behavior of ACL operations on paths above the root of
+   * any mount table entry.
+   */
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalModifyAclEntries() throws IOException {
+    fcView.modifyAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAclEntries() throws IOException {
+    fcView.removeAclEntries(new Path("/internalDir"),
+        new ArrayList<AclEntry>());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveDefaultAcl() throws IOException {
+    fcView.removeDefaultAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveAcl() throws IOException {
+    fcView.removeAcl(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetAcl() throws IOException {
+    fcView.setAcl(new Path("/internalDir"), new ArrayList<AclEntry>());
+  }
+
+  @Test
+  public void testInternalGetAclStatus() throws IOException {
+    final UserGroupInformation currentUser =
+        UserGroupInformation.getCurrentUser();
+    AclStatus aclStatus = fcView.getAclStatus(new Path("/internalDir"));
+    assertEquals(aclStatus.getOwner(), currentUser.getUserName());
+    assertEquals(aclStatus.getGroup(), currentUser.getGroupNames()[0]);
+    assertEquals(aclStatus.getEntries(),
+        AclUtil.getMinimalAcl(PERMISSION_555));
+    assertFalse(aclStatus.isStickyBit());
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalSetXAttr() throws IOException {
+    fcView.setXAttr(new Path("/internalDir"), "xattrName", null);
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttr() throws IOException {
+    fcView.getXAttr(new Path("/internalDir"), "xattrName");
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrs() throws IOException {
+    fcView.getXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalGetXAttrsWithNames() throws IOException {
+    fcView.getXAttrs(new Path("/internalDir"), new ArrayList<String>());
+  }
+
+  @Test(expected=NotInMountpointException.class)
+  public void testInternalListXAttr() throws IOException {
+    fcView.listXAttrs(new Path("/internalDir"));
+  }
+
+  @Test(expected=AccessControlException.class)
+  public void testInternalRemoveXAttr() throws IOException {
+    fcView.removeXAttr(new Path("/internalDir"), "xattrName");
+  }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java Fri Jul 25 20:33:09 2014
@@ -24,6 +24,7 @@ import java.nio.BufferUnderflowException
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.util.Random;
+import com.google.common.base.Charsets;
 import com.google.common.primitives.Bytes;
 
 /** Unit tests for LargeUTF8. */
@@ -363,6 +364,27 @@ public class TestText extends TestCase {
       fail("testReadWriteOperations error !!!");
     }        
   }
+
+  public void testReadWithKnownLength() throws IOException {
+    String line = "hello world";
+    byte[] inputBytes = line.getBytes(Charsets.UTF_8);
+    DataInputBuffer in = new DataInputBuffer();
+    Text text = new Text();
+
+    in.reset(inputBytes, inputBytes.length);
+    text.readWithKnownLength(in, 5);
+    assertEquals("hello", text.toString());
+
+    // Read longer length, make sure it lengthens
+    in.reset(inputBytes, inputBytes.length);
+    text.readWithKnownLength(in, 7);
+    assertEquals("hello w", text.toString());
+
+    // Read shorter length, make sure it shortens
+    in.reset(inputBytes, inputBytes.length);
+    text.readWithKnownLength(in, 2);
+    assertEquals("he", text.toString());
+  }
   
   /**
    * test {@code Text.bytesToCodePoint(bytes) } 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java Fri Jul 25 20:33:09 2014
@@ -327,8 +327,8 @@ public class MiniRPCBenchmark {
     String shortUserName =
       UserGroupInformation.createRemoteUser(user).getShortUserName();
     try {
-      conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(shortUserName),
-          GROUP_NAME_1);
+      conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+              getProxySuperuserGroupConfKey(shortUserName), GROUP_NAME_1);
       configureSuperUserIPAddresses(conf, shortUserName);
       // start the server
       miniServer = new MiniServer(conf, user, keytabFile);
@@ -411,7 +411,7 @@ public class MiniRPCBenchmark {
     }
     builder.append("127.0.1.1,");
     builder.append(InetAddress.getLocalHost().getCanonicalHostName());
-    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
-        builder.toString());
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(superUserShortName), builder.toString());
   }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java Fri Jul 25 20:33:09 2014
@@ -496,6 +496,8 @@ public class TestRPC {
       caught = true;
     }
     assertTrue(caught);
+    rb = getMetrics(server.rpcDetailedMetrics.name());
+    assertCounter("IOExceptionNumOps", 1L, rb);
 
     proxy.testServerGet();
 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java Fri Jul 25 20:33:09 2014
@@ -60,12 +60,12 @@ public class TestGangliaMetrics {
   @Test
   public void testTagsForPrefix() throws Exception {
     ConfigBuilder cb = new ConfigBuilder()
-      .add("Test.sink.ganglia.tagsForPrefix.all", "*")
-      .add("Test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " +
+      .add("test.sink.ganglia.tagsForPrefix.all", "*")
+      .add("test.sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " +
               "NumActiveSources")
-      .add("Test.sink.ganglia.tagsForPrefix.none", "");
+      .add("test.sink.ganglia.tagsForPrefix.none", "");
     GangliaSink30 sink = new GangliaSink30();
-    sink.init(cb.subset("Test.sink.ganglia"));
+    sink.init(cb.subset("test.sink.ganglia"));
 
     List<MetricsTag> tags = new ArrayList<MetricsTag>();
     tags.add(new MetricsTag(MsInfo.Context, "all"));
@@ -98,8 +98,8 @@ public class TestGangliaMetrics {
   
   @Test public void testGangliaMetrics2() throws Exception {
     ConfigBuilder cb = new ConfigBuilder().add("default.period", 10)
-        .add("Test.sink.gsink30.context", "test") // filter out only "test"
-        .add("Test.sink.gsink31.context", "test") // filter out only "test"
+        .add("test.sink.gsink30.context", "test") // filter out only "test"
+        .add("test.sink.gsink31.context", "test") // filter out only "test"
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
 
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java Fri Jul 25 20:33:09 2014
@@ -88,11 +88,11 @@ public class TestMetricsSystemImpl {
     DefaultMetricsSystem.shutdown();
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
-        .add("Test.sink.test.class", TestSink.class.getName())
-        .add("Test.*.source.filter.exclude", "s0")
-        .add("Test.source.s1.metric.filter.exclude", "X*")
-        .add("Test.sink.sink1.metric.filter.exclude", "Y*")
-        .add("Test.sink.sink2.metric.filter.exclude", "Y*")
+        .add("test.sink.test.class", TestSink.class.getName())
+        .add("test.*.source.filter.exclude", "s0")
+        .add("test.source.s1.metric.filter.exclude", "X*")
+        .add("test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("test.sink.sink2.metric.filter.exclude", "Y*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -130,11 +130,11 @@ public class TestMetricsSystemImpl {
     DefaultMetricsSystem.shutdown(); 
     new ConfigBuilder().add("*.period", 8)
         //.add("test.sink.plugin.urls", getPluginUrlsAsString())
-        .add("Test.sink.test.class", TestSink.class.getName())
-        .add("Test.*.source.filter.exclude", "s0")
-        .add("Test.source.s1.metric.filter.exclude", "X*")
-        .add("Test.sink.sink1.metric.filter.exclude", "Y*")
-        .add("Test.sink.sink2.metric.filter.exclude", "Y*")
+        .add("test.sink.test.class", TestSink.class.getName())
+        .add("test.*.source.filter.exclude", "s0")
+        .add("test.source.s1.metric.filter.exclude", "X*")
+        .add("test.sink.sink1.metric.filter.exclude", "Y*")
+        .add("test.sink.sink2.metric.filter.exclude", "Y*")
         .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -169,13 +169,14 @@ public class TestMetricsSystemImpl {
   @Test public void testMultiThreadedPublish() throws Exception {
     final int numThreads = 10;
     new ConfigBuilder().add("*.period", 80)
-      .add("Test.sink.Collector." + MetricsConfig.QUEUE_CAPACITY_KEY,
+      .add("test.sink.collector." + MetricsConfig.QUEUE_CAPACITY_KEY,
               numThreads)
       .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     final MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
+
     final CollectingSink sink = new CollectingSink(numThreads);
-    ms.registerSink("Collector",
+    ms.registerSink("collector",
         "Collector of values from all threads.", sink);
     final TestSource[] sources = new TestSource[numThreads];
     final Thread[] threads = new Thread[numThreads];
@@ -280,10 +281,10 @@ public class TestMetricsSystemImpl {
 
   @Test public void testHangingSink() {
     new ConfigBuilder().add("*.period", 8)
-      .add("Test.sink.test.class", TestSink.class.getName())
-      .add("Test.sink.hanging.retry.delay", "1")
-      .add("Test.sink.hanging.retry.backoff", "1.01")
-      .add("Test.sink.hanging.retry.count", "0")
+      .add("test.sink.test.class", TestSink.class.getName())
+      .add("test.sink.hanging.retry.delay", "1")
+      .add("test.sink.hanging.retry.backoff", "1.01")
+      .add("test.sink.hanging.retry.count", "0")
       .save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
     MetricsSystemImpl ms = new MetricsSystemImpl("Test");
     ms.start();
@@ -379,6 +380,23 @@ public class TestMetricsSystemImpl {
     ms.shutdown();
   }
 
+  @Test public void testUnregisterSource() {
+    MetricsSystem ms = new MetricsSystemImpl();
+    TestSource ts1 = new TestSource("ts1");
+    TestSource ts2 = new TestSource("ts2");
+    ms.register("ts1", "", ts1);
+    ms.register("ts2", "", ts2);
+    MetricsSource s1 = ms.getSource("ts1");
+    assertNotNull(s1);
+    // should work when metrics system is not started
+    ms.unregisterSource("ts1");
+    s1 = ms.getSource("ts1");
+    assertNull(s1);
+    MetricsSource s2 = ms.getSource("ts2");
+    assertNotNull(s2);
+    ms.shutdown();
+  }
+
   private void checkMetricsRecords(List<MetricsRecord> recs) {
     LOG.debug(recs);
     MetricsRecord r = recs.get(0);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java Fri Jul 25 20:33:09 2014
@@ -105,7 +105,7 @@ public class TestNetworkTopologyWithNode
     testNodes[2] = dataNodes[3];
     testNodes[3] = dataNodes[0];
     cluster.sortByDistance(dataNodes[0], testNodes,
-        testNodes.length, 0xDEADBEEF);
+        testNodes.length, 0xDEADBEEF, false);
     assertTrue(testNodes[0] == dataNodes[0]);
     assertTrue(testNodes[1] == dataNodes[1]);
     assertTrue(testNodes[2] == dataNodes[2]);
@@ -117,7 +117,7 @@ public class TestNetworkTopologyWithNode
     testNodes[2] = dataNodes[1];
     testNodes[3] = dataNodes[0];
     cluster.sortByDistance(dataNodes[0], testNodes,
-        testNodes.length, 0xDEADBEEF);
+        testNodes.length, 0xDEADBEEF, false);
     assertTrue(testNodes[0] == dataNodes[0]);
     assertTrue(testNodes[1] == dataNodes[1]);
 
@@ -127,7 +127,7 @@ public class TestNetworkTopologyWithNode
     testNodes[2] = dataNodes[2];
     testNodes[3] = dataNodes[0];
     cluster.sortByDistance(dataNodes[0], testNodes,
-        testNodes.length, 0xDEADBEEF);
+        testNodes.length, 0xDEADBEEF, false);
     assertTrue(testNodes[0] == dataNodes[0]);
     assertTrue(testNodes[1] == dataNodes[2]);
 
@@ -137,7 +137,7 @@ public class TestNetworkTopologyWithNode
     testNodes[2] = dataNodes[2];
     testNodes[3] = dataNodes[0];
     cluster.sortByDistance(computeNode, testNodes,
-        testNodes.length, 0xDEADBEEF);
+        testNodes.length, 0xDEADBEEF, false);
     assertTrue(testNodes[0] == dataNodes[0]);
     assertTrue(testNodes[1] == dataNodes[2]);
   }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java Fri Jul 25 20:33:09 2014
@@ -101,7 +101,8 @@ public class TestDoAsEffectiveUser {
     builder.append("127.0.1.1,");
     builder.append(InetAddress.getLocalHost().getCanonicalHostName());
     LOG.info("Local Ip addresses: "+builder.toString());
-    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(superUserShortName),
         builder.toString());
   }
   
@@ -181,8 +182,8 @@ public class TestDoAsEffectiveUser {
   @Test(timeout=4000)
   public void testRealUserSetup() throws IOException {
     final Configuration conf = new Configuration();
-    conf.setStrings(DefaultImpersonationProvider
-        .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+        getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
     configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@@ -214,7 +215,8 @@ public class TestDoAsEffectiveUser {
   public void testRealUserAuthorizationSuccess() throws IOException {
     final Configuration conf = new Configuration();
     configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
-    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
         "group1");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@@ -248,9 +250,11 @@ public class TestDoAsEffectiveUser {
   @Test
   public void testRealUserIPAuthorizationFailure() throws IOException {
     final Configuration conf = new Configuration();
-    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME),
         "20.20.20.20"); //Authorized IP address
-    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
         "group1");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@@ -293,8 +297,8 @@ public class TestDoAsEffectiveUser {
   @Test
   public void testRealUserIPNotSpecified() throws IOException {
     final Configuration conf = new Configuration();
-    conf.setStrings(DefaultImpersonationProvider
-        .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+        getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
         .setNumHandlers(2).setVerbose(false).build();
@@ -377,7 +381,8 @@ public class TestDoAsEffectiveUser {
   public void testRealUserGroupAuthorizationFailure() throws IOException {
     final Configuration conf = new Configuration();
     configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
-    conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
+    conf.setStrings(DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
         "group3");
     Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
         .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java Fri Jul 25 20:33:09 2014
@@ -26,8 +26,11 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.FakeTimer;
 import org.junit.Before;
 import org.junit.Test;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.fail;
@@ -94,6 +97,9 @@ public class TestGroupsCaching {
 
   @Test
   public void testGroupsCaching() throws Exception {
+    // Disable negative cache.
+    conf.setLong(
+        CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 0);
     Groups groups = new Groups(conf);
     groups.cacheGroupsAdd(Arrays.asList(myGroups));
     groups.refresh();
@@ -163,4 +169,54 @@ public class TestGroupsCaching {
         FakeunPrivilegedGroupMapping.invoked);
 
   }
+
+  @Test
+  public void testNegativeGroupCaching() throws Exception {
+    final String user = "negcache";
+    final String failMessage = "Did not throw IOException: ";
+    conf.setLong(
+        CommonConfigurationKeys.HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS, 2);
+    FakeTimer timer = new FakeTimer();
+    Groups groups = new Groups(conf, timer);
+    groups.cacheGroupsAdd(Arrays.asList(myGroups));
+    groups.refresh();
+    FakeGroupMapping.addToBlackList(user);
+
+    // In the first attempt, the user will be put in the negative cache.
+    try {
+      groups.getGroups(user);
+      fail(failMessage + "Failed to obtain groups from FakeGroupMapping.");
+    } catch (IOException e) {
+      // Expects to raise exception for the first time. But the user will be
+      // put into the negative cache
+      GenericTestUtils.assertExceptionContains("No groups found for user", e);
+    }
+
+    // The second time, the user is in the negative cache.
+    try {
+      groups.getGroups(user);
+      fail(failMessage + "The user is in the negative cache.");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("No groups found for user", e);
+    }
+
+    // Brings back the backend user-group mapping service.
+    FakeGroupMapping.clearBlackList();
+
+    // It should still get groups from the negative cache.
+    try {
+      groups.getGroups(user);
+      fail(failMessage + "The user is still in the negative cache, even " +
+          "FakeGroupMapping has resumed.");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("No groups found for user", e);
+    }
+
+    // Let the elements in the negative cache expire.
+    timer.advance(4 * 1000);
+
+    // The groups for the user is expired in the negative cache, a new copy of
+    // groups for the user is fetched.
+    assertEquals(Arrays.asList(myGroups), groups.getGroups(user));
+  }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java Fri Jul 25 20:33:09 2014
@@ -128,6 +128,22 @@ public class TestCredShell {
   }
   
   @Test
+  public void testPromptForCredentialWithEmptyPasswd() throws Exception {
+    String[] args1 = {"create", "credential1", "--provider", 
+        "jceks://file" + tmpDir + "/credstore.jceks"};
+    ArrayList<String> passwords = new ArrayList<String>();
+    passwords.add(null);
+    passwords.add("p@ssw0rd");
+    int rc = 0;
+    CredentialShell shell = new CredentialShell();
+    shell.setConf(new Configuration());
+    shell.setPasswordReader(new MockPasswordReader(passwords));
+    rc = shell.run(args1);
+    assertEquals(outContent.toString(), -1, rc);
+    assertTrue(outContent.toString().contains("Passwords don't match"));
+  }
+
+  @Test
   public void testPromptForCredential() throws Exception {
     String[] args1 = {"create", "credential1", "--provider", 
         "jceks://file" + tmpDir + "/credstore.jceks"};
@@ -142,7 +158,7 @@ public class TestCredShell {
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("credential1 has been successfully " +
         "created."));
-
+    
     String[] args2 = {"delete", "credential1", "--provider", 
         "jceks://file" + tmpDir + "/credstore.jceks"};
     rc = shell.run(args2);
@@ -162,7 +178,7 @@ public class TestCredShell {
     public char[] readPassword(String prompt) {
       if (passwords.size() == 0) return null;
       String pass = passwords.remove(0);
-      return pass.toCharArray();
+      return pass == null ? null : pass.toCharArray();
     }
 
     @Override

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Fri Jul 25 20:33:09 2014
@@ -111,10 +111,12 @@ public class TestProxyUsers {
       groupMappingClassName);
 
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_NAME),
         StringUtils.join(",", Arrays.asList(NETGROUP_NAMES)));
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(REAL_USER_NAME),
         PROXY_IP);
     
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -135,10 +137,12 @@ public class TestProxyUsers {
   public void testProxyUsers() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -168,10 +172,12 @@ public class TestProxyUsers {
   public void testProxyUsersWithUserConf() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserUserConfKey(REAL_USER_NAME),
         StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(REAL_USER_NAME),
         PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -202,10 +208,12 @@ public class TestProxyUsers {
   public void testWildcardGroup() {
     Configuration conf = new Configuration();
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserGroupConfKey(REAL_USER_NAME),
       "*");
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -236,10 +244,12 @@ public class TestProxyUsers {
   public void testWildcardUser() {
     Configuration conf = new Configuration();
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserUserConfKey(REAL_USER_NAME),
       "*");
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -270,10 +280,12 @@ public class TestProxyUsers {
   public void testWildcardIP() {
     Configuration conf = new Configuration();
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       "*");
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -301,10 +313,12 @@ public class TestProxyUsers {
   public void testIPRange() {
     Configuration conf = new Configuration();
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_NAME),
         "*");
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(REAL_USER_NAME),
         PROXY_IP_RANGE);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
@@ -324,16 +338,19 @@ public class TestProxyUsers {
   public void testWithDuplicateProxyGroups() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES)));
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
     
     Collection<String> groupsToBeProxied = 
         ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
-        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_NAME));
     
     assertEquals (1,groupsToBeProxied.size());
   }
@@ -342,16 +359,19 @@ public class TestProxyUsers {
   public void testWithDuplicateProxyHosts() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider()
+          .getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP)));
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
     
     Collection<String> hosts = 
         ProxyUsers.getDefaultImpersonationProvider().getProxyHosts().get(
-        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME));
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(REAL_USER_NAME));
     
     assertEquals (1,hosts.size());
   }
@@ -391,26 +411,73 @@ public class TestProxyUsers {
   public void testWithProxyGroupsAndUsersWithSpaces() throws Exception {
     Configuration conf = new Configuration();
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserUserConfKey(REAL_USER_NAME),
         StringUtils.join(",", Arrays.asList(PROXY_USER_NAME + " ",AUTHORIZED_PROXY_USER_NAME, "ONEMORE")));
 
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserGroupConfKey(REAL_USER_NAME),
       StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
     
     conf.set(
-      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      DefaultImpersonationProvider.getTestProvider().
+          getProxySuperuserIpConfKey(REAL_USER_NAME),
       PROXY_IP);
     
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
     
     Collection<String> groupsToBeProxied = 
         ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
-        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_NAME));
     
     assertEquals (GROUP_NAMES.length, groupsToBeProxied.size());
   }
 
+  @Test(expected = IllegalArgumentException.class)
+  public void testProxyUsersWithNullPrefix() throws Exception {
+    ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration(false), 
+        null);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testProxyUsersWithEmptyPrefix() throws Exception {
+    ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration(false), 
+        "");
+  }
+
+  @Test
+  public void testProxyUsersWithCustomPrefix() throws Exception {
+    Configuration conf = new Configuration(false);
+    conf.set("x." + REAL_USER_NAME + ".users",
+        StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
+    conf.set("x." + REAL_USER_NAME+ ".hosts", PROXY_IP);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf, "x");
+
+
+    // First try proxying a user that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+
+    // Now try proxying a user that's not allowed
+    realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+    proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+  }
+
 
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
     try {
@@ -430,6 +497,11 @@ public class TestProxyUsers {
   }
 
   static class TestDummyImpersonationProvider implements ImpersonationProvider {
+
+    @Override
+    public void init(String configurationPrefix) {
+    }
+
     /**
      * Authorize a user (superuser) to impersonate another user (user1) if the 
      * superuser belongs to the group "sudo_user1" .
@@ -460,11 +532,13 @@ public class TestProxyUsers {
   public static void loadTest(String ipString, int testRange) {
     Configuration conf = new Configuration();
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserGroupConfKey(REAL_USER_NAME),
         StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
 
     conf.set(
-        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        DefaultImpersonationProvider.getTestProvider().
+            getProxySuperuserIpConfKey(REAL_USER_NAME),
         ipString
         );
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml Fri Jul 25 20:33:09 2014
@@ -79,4 +79,19 @@
     </description>
   </property>
 
+  <property>
+    <name>hadoop.kms.acl.GENERATE_EEK</name>
+    <value>*</value>
+    <description>
+      ACL for generateEncryptedKey CryptoExtension operations
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.acl.DECRYPT_EEK</name>
+    <value>*</value>
+    <description>
+      ACL for decrypt EncryptedKey CryptoExtension operations
+    </description>
+  </property>
 </configuration>

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java Fri Jul 25 20:33:09 2014
@@ -20,6 +20,8 @@ package org.apache.hadoop.crypto.key.kms
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
 import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
@@ -29,6 +31,7 @@ import org.apache.hadoop.util.StringUtil
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
 import javax.ws.rs.POST;
 import javax.ws.rs.Path;
@@ -39,10 +42,14 @@ import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.SecurityContext;
+
+import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.security.Principal;
 import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
@@ -61,8 +68,10 @@ public class KMS {
   private static final String GET_CURRENT_KEY = "GET_CURRENT_KEY";
   private static final String GET_KEY_VERSIONS = "GET_KEY_VERSIONS";
   private static final String GET_METADATA = "GET_METADATA";
+  private static final String GENERATE_EEK = "GENERATE_EEK";
+  private static final String DECRYPT_EEK = "DECRYPT_EEK";
 
-  private KeyProvider provider;
+  private KeyProviderCryptoExtension provider;
 
   public KMS() throws Exception {
     provider = KMSWebApp.getKeyProvider();
@@ -289,6 +298,92 @@ public class KMS {
     return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
   }
 
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @GET
+  @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
+      KMSRESTConstants.EEK_SUB_RESOURCE)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response generateEncryptedKeys(
+          @Context SecurityContext securityContext,
+          @PathParam("name") String name,
+          @QueryParam(KMSRESTConstants.EEK_OP) String edekOp,
+          @DefaultValue("1")
+          @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) int numKeys)
+          throws Exception {
+    Principal user = getPrincipal(securityContext);
+    KMSClientProvider.checkNotEmpty(name, "name");
+    KMSClientProvider.checkNotNull(edekOp, "eekOp");
+
+    Object retJSON;
+    if (edekOp.equals(KMSRESTConstants.EEK_GENERATE)) {
+      assertAccess(KMSACLs.Type.GENERATE_EEK, user, GENERATE_EEK, name);
+
+      List<EncryptedKeyVersion> retEdeks =
+          new LinkedList<EncryptedKeyVersion>();
+      try {
+        for (int i = 0; i < numKeys; i ++) {
+          retEdeks.add(provider.generateEncryptedKey(name));
+        }
+      } catch (Exception e) {
+        throw new IOException(e);
+      }
+      KMSAudit.ok(user, GENERATE_EEK, name, "");
+      retJSON = new ArrayList();
+      for (EncryptedKeyVersion edek : retEdeks) {
+        ((ArrayList)retJSON).add(KMSServerJSONUtils.toJSON(edek));
+      }
+    } else {
+      throw new IllegalArgumentException("Wrong " + KMSRESTConstants.EEK_OP +
+          " value, it must be " + KMSRESTConstants.EEK_GENERATE + " or " +
+          KMSRESTConstants.EEK_DECRYPT);
+    }
+    KMSWebApp.getGenerateEEKCallsMeter().mark();
+    return Response.ok().type(MediaType.APPLICATION_JSON).entity(retJSON)
+        .build();
+  }
+
+  @SuppressWarnings("rawtypes")
+  @POST
+  @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}/" +
+      KMSRESTConstants.EEK_SUB_RESOURCE)
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response decryptEncryptedKey(@Context SecurityContext securityContext,
+      @PathParam("versionName") String versionName,
+      @QueryParam(KMSRESTConstants.EEK_OP) String eekOp,
+      Map jsonPayload)
+      throws Exception {
+    Principal user = getPrincipal(securityContext);
+    KMSClientProvider.checkNotEmpty(versionName, "versionName");
+    KMSClientProvider.checkNotNull(eekOp, "eekOp");
+
+    String keyName = (String) jsonPayload.get(KMSRESTConstants.NAME_FIELD);
+    String ivStr = (String) jsonPayload.get(KMSRESTConstants.IV_FIELD);
+    String encMaterialStr = 
+        (String) jsonPayload.get(KMSRESTConstants.MATERIAL_FIELD);
+    Object retJSON;
+    if (eekOp.equals(KMSRESTConstants.EEK_DECRYPT)) {
+      assertAccess(KMSACLs.Type.DECRYPT_EEK, user, DECRYPT_EEK, versionName);
+      KMSClientProvider.checkNotNull(ivStr, KMSRESTConstants.IV_FIELD);
+      byte[] iv = Base64.decodeBase64(ivStr);
+      KMSClientProvider.checkNotNull(encMaterialStr,
+          KMSRESTConstants.MATERIAL_FIELD);
+      byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
+      KeyProvider.KeyVersion retKeyVersion =
+          provider.decryptEncryptedKey(
+              new KMSClientProvider.KMSEncryptedKeyVersion(keyName, versionName,
+                  iv, KeyProviderCryptoExtension.EEK, encMaterial));
+      retJSON = KMSServerJSONUtils.toJSON(retKeyVersion);
+      KMSAudit.ok(user, DECRYPT_EEK, versionName, "");
+    } else {
+      throw new IllegalArgumentException("Wrong " + KMSRESTConstants.EEK_OP +
+          " value, it must be " + KMSRESTConstants.EEK_GENERATE + " or " +
+          KMSRESTConstants.EEK_DECRYPT);
+    }
+    KMSWebApp.getDecryptEEKCallsMeter().mark();
+    return Response.ok().type(MediaType.APPLICATION_JSON).entity(retJSON)
+        .build();
+  }
+
   @GET
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
       KMSRESTConstants.VERSIONS_SUB_RESOURCE)

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java Fri Jul 25 20:33:09 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AccessControlList;
@@ -28,20 +29,20 @@ import java.util.Map;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 /**
  * Provides access to the <code>AccessControlList</code>s used by KMS,
  * hot-reloading them if the <code>kms-acls.xml</code> file where the ACLs
  * are defined has been updated.
  */
+@InterfaceAudience.Private
 public class KMSACLs implements Runnable {
   private static final Logger LOG = LoggerFactory.getLogger(KMSACLs.class);
 
 
   public enum Type {
-    CREATE, DELETE, ROLLOVER, GET, GET_KEYS, GET_METADATA, SET_KEY_MATERIAL;
+    CREATE, DELETE, ROLLOVER, GET, GET_KEYS, GET_METADATA,
+    SET_KEY_MATERIAL, GENERATE_EEK, DECRYPT_EEK;
 
     public String getConfigKey() {
       return KMSConfiguration.CONFIG_PREFIX + "acl." + this.toString();
@@ -52,13 +53,11 @@ public class KMSACLs implements Runnable
 
   public static final int RELOADER_SLEEP_MILLIS = 1000;
 
-  Map<Type, AccessControlList> acls;
-  private ReadWriteLock lock;
+  private volatile Map<Type, AccessControlList> acls;
   private ScheduledExecutorService executorService;
   private long lastReload;
 
   KMSACLs(Configuration conf) {
-    lock = new ReentrantReadWriteLock();
     if (conf == null) {
       conf = loadACLs();
     }
@@ -70,17 +69,13 @@ public class KMSACLs implements Runnable
   }
 
   private void setACLs(Configuration conf) {
-    lock.writeLock().lock();
-    try {
-      acls = new HashMap<Type, AccessControlList>();
-      for (Type aclType : Type.values()) {
-        String aclStr = conf.get(aclType.getConfigKey(), ACL_DEFAULT);
-        acls.put(aclType, new AccessControlList(aclStr));
-        LOG.info("'{}' ACL '{}'", aclType, aclStr);
-      }
-    } finally {
-      lock.writeLock().unlock();
+    Map<Type, AccessControlList> tempAcls = new HashMap<Type, AccessControlList>();
+    for (Type aclType : Type.values()) {
+      String aclStr = conf.get(aclType.getConfigKey(), ACL_DEFAULT);
+      tempAcls.put(aclType, new AccessControlList(aclStr));
+      LOG.info("'{}' ACL '{}'", aclType, aclStr);
     }
+    acls = tempAcls;
   }
 
   @Override
@@ -120,14 +115,7 @@ public class KMSACLs implements Runnable
 
   public boolean hasAccess(Type type, String user) {
     UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
-    AccessControlList acl = null;
-    lock.readLock().lock();
-    try {
-      acl = acls.get(type);
-    } finally {
-      lock.readLock().unlock();
-    }
-    return acl.isUserAllowed(ugi);
+    return acls.get(type).isUserAllowed(ugi);
   }
 
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java Fri Jul 25 20:33:09 2014
@@ -34,9 +34,21 @@ public class KMSConfiguration {
 
   public static final String CONFIG_PREFIX = "hadoop.kms.";
 
+  // Property to Enable/Disable Caching
+  public static final String KEY_CACHE_ENABLE = CONFIG_PREFIX +
+      "cache.enable";
+  // Timeout for the Key and Metadata Cache
   public static final String KEY_CACHE_TIMEOUT_KEY = CONFIG_PREFIX +
       "cache.timeout.ms";
-  public static final long KEY_CACHE_TIMEOUT_DEFAULT = 10 * 1000; // 10 secs
+  // TImeout for the Current Key cache
+  public static final String CURR_KEY_CACHE_TIMEOUT_KEY = CONFIG_PREFIX +
+      "current.key.cache.timeout.ms";
+
+  public static final boolean KEY_CACHE_ENABLE_DEFAULT = true;
+  // 10 mins
+  public static final long KEY_CACHE_TIMEOUT_DEFAULT = 10 * 60 * 1000;
+  // 30 secs
+  public static final long CURR_KEY_CACHE_TIMEOUT_DEFAULT = 30 * 1000;
 
   static Configuration getConfiguration(boolean loadHadoopDefaults,
       String ... resources) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java Fri Jul 25 20:33:09 2014
@@ -17,8 +17,10 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
+import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
 import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
 
 import java.util.ArrayList;
@@ -39,7 +41,9 @@ public class KMSServerJSONUtils {
           keyVersion.getName());
       json.put(KMSRESTConstants.VERSION_NAME_FIELD,
           keyVersion.getVersionName());
-      json.put(KMSRESTConstants.MATERIAL_FIELD, keyVersion.getMaterial());
+      json.put(KMSRESTConstants.MATERIAL_FIELD,
+          Base64.encodeBase64URLSafeString(
+              keyVersion.getMaterial()));
     }
     return json;
   }
@@ -56,6 +60,21 @@ public class KMSServerJSONUtils {
   }
 
   @SuppressWarnings("unchecked")
+  public static Map toJSON(EncryptedKeyVersion encryptedKeyVersion) {
+    Map json = new LinkedHashMap();
+    if (encryptedKeyVersion != null) {
+      json.put(KMSRESTConstants.VERSION_NAME_FIELD,
+          encryptedKeyVersion.getEncryptionKeyVersionName());
+      json.put(KMSRESTConstants.IV_FIELD,
+          Base64.encodeBase64URLSafeString(
+              encryptedKeyVersion.getEncryptedKeyIv()));
+      json.put(KMSRESTConstants.ENCRYPTED_KEY_VERSION_FIELD,
+          toJSON(encryptedKeyVersion.getEncryptedKeyVersion()));
+    }
+    return json;
+  }
+
+  @SuppressWarnings("unchecked")
   public static Map toJSON(String keyName, KeyProvider.Metadata meta) {
     Map json = new LinkedHashMap();
     if (meta != null) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java Fri Jul 25 20:33:09 2014
@@ -20,9 +20,12 @@ package org.apache.hadoop.crypto.key.kms
 import com.codahale.metrics.JmxReporter;
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricRegistry;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.key.CachingKeyProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.security.authorize.AccessControlList;
@@ -34,6 +37,7 @@ import org.slf4j.bridge.SLF4JBridgeHandl
 
 import javax.servlet.ServletContextEvent;
 import javax.servlet.ServletContextListener;
+
 import java.io.File;
 import java.net.URL;
 import java.util.List;
@@ -54,6 +58,10 @@ public class KMSWebApp implements Servle
       "unauthorized.calls.meter";
   private static final String UNAUTHENTICATED_CALLS_METER = METRICS_PREFIX +
       "unauthenticated.calls.meter";
+  private static final String GENERATE_EEK_METER = METRICS_PREFIX +
+      "generate_eek.calls.meter";
+  private static final String DECRYPT_EEK_METER = METRICS_PREFIX +
+      "decrypt_eek.calls.meter";
 
   private static Logger LOG;
   private static MetricRegistry metricRegistry;
@@ -65,8 +73,10 @@ public class KMSWebApp implements Servle
   private static Meter keyCallsMeter;
   private static Meter unauthorizedCallsMeter;
   private static Meter unauthenticatedCallsMeter;
+  private static Meter decryptEEKCallsMeter;
+  private static Meter generateEEKCallsMeter;
   private static Meter invalidCallsMeter;
-  private static KeyProvider keyProvider;
+  private static KeyProviderCryptoExtension keyProviderCryptoExtension;
 
   static {
     SLF4JBridgeHandler.removeHandlersForRootLogger();
@@ -121,6 +131,10 @@ public class KMSWebApp implements Servle
       metricRegistry = new MetricRegistry();
       jmxReporter = JmxReporter.forRegistry(metricRegistry).build();
       jmxReporter.start();
+      generateEEKCallsMeter = metricRegistry.register(GENERATE_EEK_METER,
+          new Meter());
+      decryptEEKCallsMeter = metricRegistry.register(DECRYPT_EEK_METER,
+          new Meter());
       adminCallsMeter = metricRegistry.register(ADMIN_CALLS_METER, new Meter());
       keyCallsMeter = metricRegistry.register(KEY_CALLS_METER, new Meter());
       invalidCallsMeter = metricRegistry.register(INVALID_CALLS_METER,
@@ -149,11 +163,23 @@ public class KMSWebApp implements Servle
             "the first provider",
             kmsConf.get(KeyProviderFactory.KEY_PROVIDER_PATH));
       }
-      keyProvider = providers.get(0);
-      long timeOutMillis =
-          kmsConf.getLong(KMSConfiguration.KEY_CACHE_TIMEOUT_KEY,
-              KMSConfiguration.KEY_CACHE_TIMEOUT_DEFAULT);
-      keyProvider = new KMSCacheKeyProvider(keyProvider, timeOutMillis);
+      KeyProvider keyProvider = providers.get(0);
+      if (kmsConf.getBoolean(KMSConfiguration.KEY_CACHE_ENABLE,
+          KMSConfiguration.KEY_CACHE_ENABLE_DEFAULT)) {
+        long keyTimeOutMillis =
+            kmsConf.getLong(KMSConfiguration.KEY_CACHE_TIMEOUT_KEY,
+                KMSConfiguration.KEY_CACHE_TIMEOUT_DEFAULT);
+        long currKeyTimeOutMillis =
+            kmsConf.getLong(KMSConfiguration.CURR_KEY_CACHE_TIMEOUT_KEY,
+                KMSConfiguration.CURR_KEY_CACHE_TIMEOUT_DEFAULT);
+        keyProvider = new CachingKeyProvider(keyProvider, keyTimeOutMillis,
+            currKeyTimeOutMillis);
+      }
+      keyProviderCryptoExtension = KeyProviderCryptoExtension.
+          createKeyProviderCryptoExtension(keyProvider);
+      keyProviderCryptoExtension = 
+          new EagerKeyGeneratorKeyProviderCryptoExtension(kmsConf, 
+              keyProviderCryptoExtension);
 
       LOG.info("KMS Started");
     } catch (Throwable ex) {
@@ -200,6 +226,14 @@ public class KMSWebApp implements Servle
     return invalidCallsMeter;
   }
 
+  public static Meter getGenerateEEKCallsMeter() {
+    return generateEEKCallsMeter;
+  }
+
+  public static Meter getDecryptEEKCallsMeter() {
+    return decryptEEKCallsMeter;
+  }
+
   public static Meter getUnauthorizedCallsMeter() {
     return unauthorizedCallsMeter;
   }
@@ -208,7 +242,7 @@ public class KMSWebApp implements Servle
     return unauthenticatedCallsMeter;
   }
 
-  public static KeyProvider getKeyProvider() {
-    return keyProvider;
+  public static KeyProviderCryptoExtension getKeyProvider() {
+    return keyProviderCryptoExtension;
   }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm Fri Jul 25 20:33:09 2014
@@ -72,22 +72,35 @@ Hadoop Key Management Server (KMS) - Doc
   KMS caches keys for short period of time to avoid excessive hits to the
   underlying key provider.
 
-  The cache is used with the following 2 methods only, <<<getCurrentKey()>>>
-  and <<<getKeyVersion()>>>.
+  The Cache is enabled by default (can be dissabled by setting the
+  <<<hadoop.kms.cache.enable>>> boolean property to false)
+
+  The cache is used with the following 3 methods only, <<<getCurrentKey()>>>
+  and <<<getKeyVersion()>>> and <<<getMetadata()>>>.
 
   For the <<<getCurrentKey()>>> method, cached entries are kept for a maximum
-  of 1000 millisecond regardless the number of times the key is being access
+  of 30000 millisecond regardless the number of times the key is being access
   (to avoid stale keys to be considered current).
 
   For the <<<getKeyVersion()>>> method, cached entries are kept with a default
-  inactivity timeout of 10000 milliseconds. This time out is configurable via
-  the following property in the <<<etc/hadoop/kms-site.xml>>> configuration
-  file:
+  inactivity timeout of 600000 milliseconds (10 mins). This time out is
+  configurable via the following property in the <<<etc/hadoop/kms-site.xml>>>
+  configuration file:
 
 +---+
   <property>
+    <name>hadoop.kms.cache.enable</name>
+    <value>true</value>
+  </property>
+
+  <property>
     <name>hadoop.kms.cache.timeout.ms</name>
-    <value>10000</value>
+    <value>600000</value>
+  </property>
+
+  <property>
+    <name>hadoop.kms.current.key.cache.timeout.ms</name>
+    <value>30000</value>
   </property>
 +---+
 
@@ -266,6 +279,25 @@ $ keytool -genkey -alias tomcat -keyalg 
         to provide the key material when creating or rolling a key.
     </description>
   </property>
+
+  <property>
+    <name>hadoop.kms.acl.GENERATE_EEK</name>
+    <value>*</value>
+    <description>
+      ACL for generateEncryptedKey
+      CryptoExtension operations
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.acl.DECRYPT_EEK</name>
+    <value>*</value>
+    <description>
+      ACL for decrypt EncryptedKey
+      CryptoExtension operations
+    </description>
+  </property>
+</configuration>
 +---+
 
 ** KMS HTTP REST API
@@ -383,6 +415,70 @@ Content-Type: application/json
 }
 +---+
 
+
+*** Generate Encrypted Key for Current KeyVersion
+
+  <REQUEST:>
+
++---+
+GET http://HOST:PORT/kms/v1/key/<key-name>/_eek?eek_op=generate&num_keys=<number-of-keys-to-generate>
++---+
+
+  <RESPONSE:>
+
++---+
+200 OK
+Content-Type: application/json
+[
+  {
+    "versionName"         : "encryptionVersionName",
+    "iv"                  : "<iv>",          //base64
+    "encryptedKeyVersion" : {
+        "versionName"       : "EEK",
+        "material"          : "<material>",    //base64
+    }
+  },
+  {
+    "versionName"         : "encryptionVersionName",
+    "iv"                  : "<iv>",          //base64
+    "encryptedKeyVersion" : {
+        "versionName"       : "EEK",
+        "material"          : "<material>",    //base64
+    }
+  },
+  ...
+]
++---+
+
+*** Decrypt Encrypted Key
+
+  <REQUEST:>
+
++---+
+POST http://HOST:PORT/kms/v1/keyversion/<version-name>/_eek?ee_op=decrypt
+Content-Type: application/json
+
+{
+  "name"        : "<key-name>",
+  "iv"          : "<iv>",          //base64
+  "material"    : "<material>",    //base64
+}
+
++---+
+
+  <RESPONSE:>
+
++---+
+200 OK
+Content-Type: application/json
+
+{
+  "name"        : "EK",
+  "material"    : "<material>",    //base64
+}
++---+
+
+
 *** Get Key Version
 
   <REQUEST:>

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java Fri Jul 25 20:33:09 2014
@@ -19,6 +19,9 @@ package org.apache.hadoop.crypto.key.kms
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
 import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.authorize.AuthorizationException;
@@ -36,6 +39,7 @@ import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.LoginContext;
+
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
@@ -267,7 +271,7 @@ public class TestKMS {
     }
   }
 
-  private void doAs(String user, final PrivilegedExceptionAction<Void> action)
+  private <T> T doAs(String user, final PrivilegedExceptionAction<T> action)
       throws Exception {
     Set<Principal> principals = new HashSet<Principal>();
     principals.add(new KerberosPrincipal(user));
@@ -280,7 +284,7 @@ public class TestKMS {
     try {
       loginContext.login();
       subject = loginContext.getSubject();
-      Subject.doAs(subject, action);
+      return Subject.doAs(subject, action);
     } finally {
       loginContext.logout();
     }
@@ -474,6 +478,32 @@ public class TestKMS {
         Assert.assertNotNull(kms1[0].getCreated());
         Assert.assertTrue(started.before(kms1[0].getCreated()));
 
+        // test generate and decryption of EEK
+        KeyProvider.KeyVersion kv = kp.getCurrentKey("k1");
+        KeyProviderCryptoExtension kpExt =
+            KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
+
+        EncryptedKeyVersion ek1 = kpExt.generateEncryptedKey(kv.getName());
+        Assert.assertEquals(KeyProviderCryptoExtension.EEK,
+            ek1.getEncryptedKeyVersion().getVersionName());
+        Assert.assertNotNull(ek1.getEncryptedKeyVersion().getMaterial());
+        Assert.assertEquals(kv.getMaterial().length,
+            ek1.getEncryptedKeyVersion().getMaterial().length);
+        KeyProvider.KeyVersion k1 = kpExt.decryptEncryptedKey(ek1);
+        Assert.assertEquals(KeyProviderCryptoExtension.EK, k1.getVersionName());
+        KeyProvider.KeyVersion k1a = kpExt.decryptEncryptedKey(ek1);
+        Assert.assertArrayEquals(k1.getMaterial(), k1a.getMaterial());
+        Assert.assertEquals(kv.getMaterial().length, k1.getMaterial().length);
+
+        EncryptedKeyVersion ek2 = kpExt.generateEncryptedKey(kv.getName());
+        KeyProvider.KeyVersion k2 = kpExt.decryptEncryptedKey(ek2);
+        boolean isEq = true;
+        for (int i = 0; isEq && i < ek2.getEncryptedKeyVersion()
+            .getMaterial().length; i++) {
+          isEq = k2.getMaterial()[i] == k1.getMaterial()[i];
+        }
+        Assert.assertFalse(isEq);
+
         // deleteKey()
         kp.deleteKey("k1");
 
@@ -565,7 +595,7 @@ public class TestKMS {
       @Override
       public Void call() throws Exception {
         final Configuration conf = new Configuration();
-        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
         URI uri = createKMSUri(getKMSUrl());
         final KeyProvider kp = new KMSClientProvider(uri, conf);
 
@@ -582,7 +612,7 @@ public class TestKMS {
               Assert.fail(ex.toString());
             }
             try {
-              kp.createKey("k", new byte[8], new KeyProvider.Options(conf));
+              kp.createKey("k", new byte[16], new KeyProvider.Options(conf));
               Assert.fail();
             } catch (AuthorizationException ex) {
               //NOP
@@ -598,7 +628,7 @@ public class TestKMS {
               Assert.fail(ex.toString());
             }
             try {
-              kp.rollNewVersion("k", new byte[8]);
+              kp.rollNewVersion("k", new byte[16]);
               Assert.fail();
             } catch (AuthorizationException ex) {
               //NOP
@@ -690,7 +720,7 @@ public class TestKMS {
           @Override
           public Void run() throws Exception {
             try {
-              KeyProvider.KeyVersion kv = kp.createKey("k1", new byte[8],
+              KeyProvider.KeyVersion kv = kp.createKey("k1", new byte[16],
                   new KeyProvider.Options(conf));
               Assert.assertNull(kv.getMaterial());
             } catch (Exception ex) {
@@ -717,7 +747,8 @@ public class TestKMS {
           @Override
           public Void run() throws Exception {
             try {
-              KeyProvider.KeyVersion kv = kp.rollNewVersion("k1", new byte[8]);
+              KeyProvider.KeyVersion kv =
+                  kp.rollNewVersion("k1", new byte[16]);
               Assert.assertNull(kv.getMaterial());
             } catch (Exception ex) {
               Assert.fail(ex.toString());
@@ -726,12 +757,46 @@ public class TestKMS {
           }
         });
 
-        doAs("GET", new PrivilegedExceptionAction<Void>() {
+        final KeyVersion currKv =
+            doAs("GET", new PrivilegedExceptionAction<KeyVersion>() {
           @Override
-          public Void run() throws Exception {
+          public KeyVersion run() throws Exception {
             try {
               kp.getKeyVersion("k1@0");
-              kp.getCurrentKey("k1");
+              KeyVersion kv = kp.getCurrentKey("k1");
+              return kv;
+            } catch (Exception ex) {
+              Assert.fail(ex.toString());
+            }
+            return null;
+          }
+        });
+
+        final EncryptedKeyVersion encKv =
+            doAs("GENERATE_EEK",
+                new PrivilegedExceptionAction<EncryptedKeyVersion>() {
+          @Override
+          public EncryptedKeyVersion run() throws Exception {
+            try {
+              KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.
+                      createKeyProviderCryptoExtension(kp);
+              EncryptedKeyVersion ek1 =
+                  kpCE.generateEncryptedKey(currKv.getName());
+              return ek1;
+            } catch (Exception ex) {
+              Assert.fail(ex.toString());
+            }
+            return null;
+          }
+        });
+
+        doAs("DECRYPT_EEK", new PrivilegedExceptionAction<Void>() {
+          @Override
+          public Void run() throws Exception {
+            try {
+              KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.
+                      createKeyProviderCryptoExtension(kp);
+              kpCE.decryptEncryptedKey(encKv);
             } catch (Exception ex) {
               Assert.fail(ex.toString());
             }
@@ -817,7 +882,7 @@ public class TestKMS {
       @Override
       public Void call() throws Exception {
         final Configuration conf = new Configuration();
-        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+        conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
         URI uri = createKMSUri(getKMSUrl());
         final KeyProvider kp = new KMSClientProvider(uri, conf);
 
@@ -889,6 +954,30 @@ public class TestKMS {
       Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
     }
 
+    caughtTimeout = false;
+    try {
+      KeyProvider kp = new KMSClientProvider(uri, conf);
+      KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp)
+          .generateEncryptedKey("a");
+    } catch (SocketTimeoutException e) {
+      caughtTimeout = true;
+    } catch (IOException e) {
+      Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
+    }
+
+    caughtTimeout = false;
+    try {
+      KeyProvider kp = new KMSClientProvider(uri, conf);
+      KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp)
+          .decryptEncryptedKey(
+              new KMSClientProvider.KMSEncryptedKeyVersion("a",
+                  "a", new byte[] {1, 2}, "EEK", new byte[] {1, 2}));
+    } catch (SocketTimeoutException e) {
+      caughtTimeout = true;
+    } catch (IOException e) {
+      Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
+    }
+
     Assert.assertTrue(caughtTimeout);
 
     sock.close();

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java Fri Jul 25 20:33:09 2014
@@ -19,12 +19,16 @@ package org.apache.hadoop.mount;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.oncrpc.RpcProgram;
 import org.apache.hadoop.oncrpc.SimpleTcpServer;
 import org.apache.hadoop.oncrpc.SimpleUdpServer;
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
 
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
 /**
  * Main class for starting mountd daemon. This daemon implements the NFS
  * mount protocol. When receiving a MOUNT request from an NFS client, it checks
@@ -33,6 +37,7 @@ import org.apache.hadoop.util.ShutdownHo
  * handle for requested directory and returns it to the client.
  */
 abstract public class MountdBase {
+  public static final Log LOG = LogFactory.getLog(MountdBase.class);
   private final RpcProgram rpcProgram;
   private int udpBoundPort; // Will set after server starts
   private int tcpBoundPort; // Will set after server starts
@@ -40,11 +45,11 @@ abstract public class MountdBase {
   public RpcProgram getRpcProgram() {
     return rpcProgram;
   }
-  
+
   /**
    * Constructor
    * @param program
-   * @throws IOException 
+   * @throws IOException
    */
   public MountdBase(RpcProgram program) throws IOException {
     rpcProgram = program;
@@ -74,11 +79,16 @@ abstract public class MountdBase {
     if (register) {
       ShutdownHookManager.get().addShutdownHook(new Unregister(),
           SHUTDOWN_HOOK_PRIORITY);
-      rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort);
-      rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
+      try {
+        rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort);
+        rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
+      } catch (Throwable e) {
+        LOG.fatal("Failed to start the server. Cause:", e);
+        terminate(1, e);
+      }
     }
   }
-  
+
   /**
    * Priority of the mountd shutdown hook.
    */
@@ -91,5 +101,5 @@ abstract public class MountdBase {
       rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP, tcpBoundPort);
     }
   }
-  
+
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java Fri Jul 25 20:33:09 2014
@@ -53,7 +53,12 @@ public class NfsExports {
       long expirationPeriodNano = conf.getLong(
           Nfs3Constant.NFS_EXPORTS_CACHE_EXPIRYTIME_MILLIS_KEY,
           Nfs3Constant.NFS_EXPORTS_CACHE_EXPIRYTIME_MILLIS_DEFAULT) * 1000 * 1000;
-      exports = new NfsExports(cacheSize, expirationPeriodNano, matchHosts);
+      try {
+        exports = new NfsExports(cacheSize, expirationPeriodNano, matchHosts);
+      } catch (IllegalArgumentException e) {
+        LOG.error("Invalid NFS Exports provided: ", e);
+        return exports;
+      }
     }
     return exports;
   }
@@ -71,7 +76,16 @@ public class NfsExports {
   
   private static final Pattern CIDR_FORMAT_LONG = 
       Pattern.compile(SLASH_FORMAT_LONG);
-  
+
+  // Hostnames are composed of series of 'labels' concatenated with dots.
+  // Labels can be between 1-63 characters long, and can only take
+  // letters, digits & hyphens. They cannot start and end with hyphens. For
+  // more details, refer RFC-1123 & http://en.wikipedia.org/wiki/Hostname
+  private static final String LABEL_FORMAT =
+      "[a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?";
+  private static final Pattern HOSTNAME_FORMAT =
+      Pattern.compile("^(" + LABEL_FORMAT + "\\.)*" + LABEL_FORMAT + "$");
+
   static class AccessCacheEntry implements LightWeightCache.Entry{
     private final String hostAddr;
     private AccessPrivilege access;
@@ -381,10 +395,14 @@ public class NfsExports {
         LOG.debug("Using Regex match for '" + host + "' and " + privilege);
       }
       return new RegexMatch(privilege, host);
+    } else if (HOSTNAME_FORMAT.matcher(host).matches()) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Using exact match for '" + host + "' and " + privilege);
+      }
+      return new ExactMatch(privilege, host);
+    } else {
+      throw new IllegalArgumentException("Invalid hostname provided '" + host
+          + "'");
     }
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Using exact match for '" + host + "' and " + privilege);
-    }
-    return new ExactMatch(privilege, host);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java Fri Jul 25 20:33:09 2014
@@ -25,6 +25,8 @@ import org.apache.hadoop.oncrpc.SimpleTc
 import org.apache.hadoop.portmap.PortmapMapping;
 import org.apache.hadoop.util.ShutdownHookManager;
 
+import static org.apache.hadoop.util.ExitUtil.terminate;
+
 /**
  * Nfs server. Supports NFS v3 using {@link RpcProgram}.
  * Currently Mountd program is also started inside this class.
@@ -34,7 +36,7 @@ public abstract class Nfs3Base {
   public static final Log LOG = LogFactory.getLog(Nfs3Base.class);
   private final RpcProgram rpcProgram;
   private int nfsBoundPort; // Will set after server starts
-    
+
   public RpcProgram getRpcProgram() {
     return rpcProgram;
   }
@@ -46,11 +48,16 @@ public abstract class Nfs3Base {
 
   public void start(boolean register) {
     startTCPServer(); // Start TCP server
-    
+
     if (register) {
       ShutdownHookManager.get().addShutdownHook(new Unregister(),
           SHUTDOWN_HOOK_PRIORITY);
-      rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort);
+      try {
+        rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort);
+      } catch (Throwable e) {
+        LOG.fatal("Failed to start the server. Cause:", e);
+        terminate(1, e);
+      }
     }
   }
 
@@ -61,7 +68,7 @@ public abstract class Nfs3Base {
     tcpServer.run();
     nfsBoundPort = tcpServer.getBoundPort();
   }
-  
+
   /**
    * Priority of the nfsd shutdown hook.
    */

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java?rev=1613514&r1=1613513&r2=1613514&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java Fri Jul 25 20:33:09 2014
@@ -131,7 +131,7 @@ public abstract class RpcProgram extends
     } catch (IOException e) {
       String request = set ? "Registration" : "Unregistration";
       LOG.error(request + " failure with " + host + ":" + port
-          + ", portmap entry: " + mapEntry, e);
+          + ", portmap entry: " + mapEntry);
       throw new RuntimeException(request + " failure", e);
     }
   }



Mime
View raw message