Return-Path: Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: (qmail 32451 invoked from network); 15 Jan 2011 02:41:25 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 15 Jan 2011 02:41:25 -0000 Received: (qmail 12673 invoked by uid 500); 15 Jan 2011 02:41:24 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 12552 invoked by uid 500); 15 Jan 2011 02:41:23 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 12545 invoked by uid 99); 15 Jan 2011 02:41:23 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 15 Jan 2011 02:41:23 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 15 Jan 2011 02:41:19 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 1D8202388999; Sat, 15 Jan 2011 02:40:52 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1059235 - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/ipc/ src/java/org/apache/hadoop/security/ src/java/org/apache/hadoop/security/authorize/ src/test/core/org/apache/hadoop/security/ Date: Sat, 15 Jan 2011 02:40:51 -0000 To: common-commits@hadoop.apache.org From: todd@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20110115024052.1D8202388999@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: todd Date: Sat Jan 15 02:40:51 2011 New Revision: 1059235 URL: http://svn.apache.org/viewvc?rev=1059235&view=rev Log: HADOOP-7104. Remove unnecessary DNS reverse lookups from RPC layer. Contributed by Kan Zhang Modified: hadoop/common/trunk/CHANGES.txt hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java hadoop/common/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java Modified: hadoop/common/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1059235&r1=1059234&r2=1059235&view=diff ============================================================================== --- hadoop/common/trunk/CHANGES.txt (original) +++ hadoop/common/trunk/CHANGES.txt Sat Jan 15 02:40:51 2011 @@ -42,6 +42,9 @@ Trunk (unreleased changes) HADOOP-6995. Allow wildcards to be used in ProxyUsers configurations. (todd) + HADOOP-7104. Remove unnecessary DNS reverse lookups from RPC layer + (Kan Zhang via todd) + OPTIMIZATIONS BUG FIXES Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java?rev=1059235&r1=1059234&r2=1059235&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Client.java Sat Jan 15 02:40:51 2011 @@ -1268,7 +1268,7 @@ public class Client { + protocol.getCanonicalName()); } return SecurityUtil.getServerPrincipal(conf.get(serverKey), address - .getAddress().getCanonicalHostName()); + .getAddress()); } return null; } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=1059235&r1=1059234&r2=1059235&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java Sat Jan 15 02:40:51 2011 @@ -851,8 +851,8 @@ public abstract class Server { // Cache the remote host & port info so that even if the socket is // disconnected, we can say where it used to connect to. private String hostAddress; - private String hostName; private int remotePort; + private InetAddress addr; ConnectionHeader header = new ConnectionHeader(); Class protocol; @@ -889,12 +889,11 @@ public abstract class Server { this.unwrappedData = null; this.unwrappedDataLengthBuffer = ByteBuffer.allocate(4); this.socket = channel.socket(); - InetAddress addr = socket.getInetAddress(); + this.addr = socket.getInetAddress(); if (addr == null) { this.hostAddress = "*Unknown*"; } else { this.hostAddress = addr.getHostAddress(); - this.hostName = addr.getCanonicalHostName(); } this.remotePort = socket.getPort(); this.responseQueue = new LinkedList(); @@ -917,8 +916,8 @@ public abstract class Server { return hostAddress; } - public String getHostName() { - return hostName; + public InetAddress getHostInetAddress() { + return addr; } public void setLastContact(long lastContact) { @@ -1326,7 +1325,7 @@ public abstract class Server { && (authMethod != AuthMethod.DIGEST)) { ProxyUsers.authorize(user, this.getHostAddress(), conf); } - authorize(user, header, getHostName()); + authorize(user, header, getHostInetAddress()); if (LOG.isDebugEnabled()) { LOG.debug("Successfully authorized " + header); } @@ -1666,12 +1665,12 @@ public abstract class Server { * * @param user client user * @param connection incoming connection - * @param hostname fully-qualified domain name of incoming connection + * @param addr InetAddress of incoming connection * @throws AuthorizationException when the client isn't authorized to talk the protocol */ public void authorize(UserGroupInformation user, ConnectionHeader connection, - String hostname + InetAddress addr ) throws AuthorizationException { if (authorize) { Class protocol = null; @@ -1681,7 +1680,7 @@ public abstract class Server { throw new AuthorizationException("Unknown protocol: " + connection.getProtocol()); } - serviceAuthorizationManager.authorize(user, protocol, getConf(), hostname); + serviceAuthorizationManager.authorize(user, protocol, getConf(), addr); } } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java?rev=1059235&r1=1059234&r2=1059235&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java Sat Jan 15 02:40:51 2011 @@ -135,8 +135,8 @@ public class SecurityUtil { } /** - * Convert Kerberos principal name conf values to valid Kerberos principal - * names. It replaces $host in the conf values with hostname, which should be + * Convert Kerberos principal name pattern to valid Kerberos principal + * names. It replaces hostname pattern with hostname, which should be * fully-qualified domain name. If hostname is null or "0.0.0.0", it uses * dynamically looked-up fqdn of the current host instead. * @@ -149,24 +149,57 @@ public class SecurityUtil { */ public static String getServerPrincipal(String principalConfig, String hostname) throws IOException { - if (principalConfig == null) - return null; - String[] components = principalConfig.split("[/@]"); - if (components.length != 3) { - throw new IOException( - "Kerberos service principal name isn't configured properly " - + "(should have 3 parts): " + principalConfig); - } - - if (components[1].equals(HOSTNAME_PATTERN)) { - String fqdn = hostname; - if (fqdn == null || fqdn.equals("") || fqdn.equals("0.0.0.0")) { - fqdn = getLocalHostName(); - } - return components[0] + "/" + fqdn + "@" + components[2]; + String[] components = getComponents(principalConfig); + if (components == null || components.length != 3 + || !components[1].equals(HOSTNAME_PATTERN)) { + return principalConfig; } else { + return replacePattern(components, hostname); + } + } + + /** + * Convert Kerberos principal name pattern to valid Kerberos principal names. + * This method is similar to {@link #getServerPrincipal(String, String)}, + * except 1) the reverse DNS lookup from addr to hostname is done only when + * necessary, 2) param addr can't be null (no default behavior of using local + * hostname when addr is null). + * + * @param principalConfig + * Kerberos principal name pattern to convert + * @param addr + * InetAddress of the host used for substitution + * @return converted Kerberos principal name + * @throws IOException + */ + public static String getServerPrincipal(String principalConfig, + InetAddress addr) throws IOException { + String[] components = getComponents(principalConfig); + if (components == null || components.length != 3 + || !components[1].equals(HOSTNAME_PATTERN)) { return principalConfig; + } else { + if (addr == null) { + throw new IOException("Can't replace " + HOSTNAME_PATTERN + + " pattern since client address is null"); + } + return replacePattern(components, addr.getCanonicalHostName()); + } + } + + private static String[] getComponents(String principalConfig) { + if (principalConfig == null) + return null; + return principalConfig.split("[/@]"); + } + + private static String replacePattern(String[] components, String hostname) + throws IOException { + String fqdn = hostname; + if (fqdn == null || fqdn.equals("") || fqdn.equals("0.0.0.0")) { + fqdn = getLocalHostName(); } + return components[0] + "/" + fqdn + "@" + components[2]; } static String getLocalHostName() throws UnknownHostException { Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1059235&r1=1059234&r2=1059235&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Sat Jan 15 02:40:51 2011 @@ -18,6 +18,7 @@ package org.apache.hadoop.security.authorize; import java.io.IOException; +import java.net.InetAddress; import java.util.IdentityHashMap; import java.util.Map; import java.util.Set; @@ -30,7 +31,6 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.KerberosName; import org.apache.hadoop.security.UserGroupInformation; /** @@ -71,13 +71,13 @@ public class ServiceAuthorizationManager * @param user user accessing the service * @param protocol service being accessed * @param conf configuration to use - * @param hostname fully qualified domain name of the client + * @param addr InetAddress of the client * @throws AuthorizationException on authorization failure */ public void authorize(UserGroupInformation user, Class protocol, Configuration conf, - String hostname + InetAddress addr ) throws AuthorizationException { AccessControlList acl = protocolToAcl.get(protocol); if (acl == null) { @@ -91,41 +91,24 @@ public class ServiceAuthorizationManager if (krbInfo != null) { String clientKey = krbInfo.clientPrincipal(); if (clientKey != null && !clientKey.equals("")) { - if (hostname == null) { - throw new AuthorizationException( - "Can't authorize client when client hostname is null"); - } try { clientPrincipal = SecurityUtil.getServerPrincipal( - conf.get(clientKey), hostname); + conf.get(clientKey), addr); } catch (IOException e) { throw (AuthorizationException) new AuthorizationException( "Can't figure out Kerberos principal name for connection from " - + hostname + " for user=" + user + " protocol=" + protocol) + + addr + " for user=" + user + " protocol=" + protocol) .initCause(e); } } } - // when authorizing use the short name only - String shortName = clientPrincipal; - if(clientPrincipal != null ) { - try { - shortName = new KerberosName(clientPrincipal).getShortName(); - } catch (IOException e) { - LOG.warn("couldn't get short name from " + clientPrincipal, e); - // just keep going - } - } - if(LOG.isDebugEnabled()) { - LOG.debug("for protocol authorization compare (" + clientPrincipal + - "): " + shortName + " with " + user.getShortUserName()); - } - if((shortName != null && !shortName.equals(user.getShortUserName())) || + if((clientPrincipal != null && !clientPrincipal.equals(user.getUserName())) || !acl.isUserAllowed(user)) { - AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol); + AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol=" + protocol + + ", expected client Kerberos principal is " + clientPrincipal); throw new AuthorizationException("User " + user + - " is not authorized for protocol " + - protocol); + " is not authorized for protocol " + protocol + + ", expected client Kerberos principal is " + clientPrincipal); } AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol); } Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java?rev=1059235&r1=1059234&r2=1059235&view=diff ============================================================================== --- hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java (original) +++ hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestSecurityUtil.java Sat Jan 15 02:40:51 2011 @@ -18,14 +18,16 @@ package org.apache.hadoop.security; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; import java.io.IOException; +import java.net.InetAddress; import javax.security.auth.kerberos.KerberosPrincipal; import org.apache.hadoop.conf.Configuration; -import org.junit.Assert; import org.junit.Test; +import org.mockito.Mockito; public class TestSecurityUtil { @Test @@ -50,28 +52,47 @@ public class TestSecurityUtil { private void verify(String original, String hostname, String expected) throws IOException { - assertTrue(SecurityUtil.getServerPrincipal(original, hostname).equals( - expected)); - assertTrue(SecurityUtil.getServerPrincipal(original, null).equals( - expected)); - assertTrue(SecurityUtil.getServerPrincipal(original, "").equals( - expected)); - assertTrue(SecurityUtil.getServerPrincipal(original, "0.0.0.0").equals( - expected)); + assertEquals(expected, + SecurityUtil.getServerPrincipal(original, hostname)); + + InetAddress addr = mockAddr(hostname); + assertEquals(expected, + SecurityUtil.getServerPrincipal(original, addr)); } + private InetAddress mockAddr(String reverseTo) { + InetAddress mock = Mockito.mock(InetAddress.class); + Mockito.doReturn(reverseTo).when(mock).getCanonicalHostName(); + return mock; + } + @Test public void testGetServerPrincipal() throws IOException { String service = "hdfs/"; String realm = "@REALM"; - String hostname = SecurityUtil.getLocalHostName(); + String hostname = "foohost"; + String userPrincipal = "foo@FOOREALM"; String shouldReplace = service + SecurityUtil.HOSTNAME_PATTERN + realm; String replaced = service + hostname + realm; verify(shouldReplace, hostname, replaced); String shouldNotReplace = service + SecurityUtil.HOSTNAME_PATTERN + "NAME" + realm; verify(shouldNotReplace, hostname, shouldNotReplace); - verify(shouldNotReplace, shouldNotReplace, shouldNotReplace); + verify(userPrincipal, hostname, userPrincipal); + // testing reverse DNS lookup doesn't happen + InetAddress notUsed = Mockito.mock(InetAddress.class); + assertEquals(shouldNotReplace, + SecurityUtil.getServerPrincipal(shouldNotReplace, notUsed)); + Mockito.verify(notUsed, Mockito.never()).getCanonicalHostName(); + } + + @Test + public void testLocalHostNameForNullOrWild() throws Exception { + String local = SecurityUtil.getLocalHostName(); + assertEquals("hdfs/" + local + "@REALM", + SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null)); + assertEquals("hdfs/" + local + "@REALM", + SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", "0.0.0.0")); } @Test