Return-Path: Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: (qmail 9212 invoked from network); 27 Jan 2010 08:09:14 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 27 Jan 2010 08:09:14 -0000 Received: (qmail 2609 invoked by uid 500); 27 Jan 2010 08:09:13 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 2541 invoked by uid 500); 27 Jan 2010 08:09:12 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 2532 invoked by uid 99); 27 Jan 2010 08:09:12 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 27 Jan 2010 08:09:12 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 27 Jan 2010 08:09:02 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 1EC5D23888C2; Wed, 27 Jan 2010 08:08:32 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r903560 - in /hadoop/common/trunk: ./ .eclipse.templates/ ivy/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/ipc/ src/java/org/apache/hadoop/security/ src/java/org/apache/hadoop/security/authorize/ src/test/core/org/apache/hadoo... Date: Wed, 27 Jan 2010 08:08:31 -0000 To: common-commits@hadoop.apache.org From: omalley@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20100127080832.1EC5D23888C2@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: omalley Date: Wed Jan 27 08:08:29 2010 New Revision: 903560 URL: http://svn.apache.org/viewvc?rev=903560&view=rev Log: HADOOP-6299. Reimplement the UserGroupInformation to use the OS specific and Kerberos JAAS login. (omalley) Added: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/AccessControlList.java hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java hadoop/common/trunk/src/test/core/org/apache/hadoop/security/authorize/TestAccessControlList.java Removed: hadoop/common/trunk/src/java/org/apache/hadoop/security/Group.java hadoop/common/trunk/src/java/org/apache/hadoop/security/PermissionChecker.java hadoop/common/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java hadoop/common/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ConfiguredPolicy.java hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ConnectionPermission.java hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestAccessControlList.java hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUnixUserGroupInformation.java hadoop/common/trunk/src/test/core/org/apache/hadoop/security/authorize/TestConfiguredPolicy.java Modified: hadoop/common/trunk/.eclipse.templates/.classpath hadoop/common/trunk/CHANGES.txt hadoop/common/trunk/ivy.xml hadoop/common/trunk/ivy/libraries.properties hadoop/common/trunk/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileSystem.java hadoop/common/trunk/src/java/org/apache/hadoop/ipc/ConnectionHeader.java hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RPC.java hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java hadoop/common/trunk/src/java/org/apache/hadoop/security/GroupMappingServiceProvider.java hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java hadoop/common/trunk/src/java/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java hadoop/common/trunk/src/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java hadoop/common/trunk/src/java/org/apache/hadoop/security/User.java hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/Service.java hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java Modified: hadoop/common/trunk/.eclipse.templates/.classpath URL: http://svn.apache.org/viewvc/hadoop/common/trunk/.eclipse.templates/.classpath?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/.eclipse.templates/.classpath (original) +++ hadoop/common/trunk/.eclipse.templates/.classpath Wed Jan 27 08:08:29 2010 @@ -35,7 +35,8 @@ - + + Modified: hadoop/common/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/CHANGES.txt (original) +++ hadoop/common/trunk/CHANGES.txt Wed Jan 27 08:08:29 2010 @@ -4,6 +4,9 @@ INCOMPATIBLE CHANGES + HADOOP-6299. Reimplement the UserGroupInformation to use the OS + specific and Kerberos JAAS login. (omalley) + NEW FEATURES HADOOP-6284. Add a new parameter, HADOOP_JAVA_PLATFORM_OPTS, to Modified: hadoop/common/trunk/ivy.xml URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy.xml?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/ivy.xml (original) +++ hadoop/common/trunk/ivy.xml Wed Jan 27 08:08:29 2010 @@ -294,5 +294,10 @@ rev="${aspectj.version}" conf="common->default"> + + Modified: hadoop/common/trunk/ivy/libraries.properties URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/libraries.properties?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/ivy/libraries.properties (original) +++ hadoop/common/trunk/ivy/libraries.properties Wed Jan 27 08:08:29 2010 @@ -79,3 +79,6 @@ xerces.version=1.4.4 aspectj.version=1.6.5 + +mockito-all.version=1.8.0 + Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Wed Jan 27 08:08:29 2010 @@ -128,5 +128,6 @@ public static final String HADOOP_UTIL_HASH_TYPE_DEFAULT = "murmur"; public static final String HADOOP_SECURITY_GROUP_MAPPING = "hadoop.security.group.mapping"; public static final String HADOOP_SECURITY_GROUPS_CACHE_SECS = "hadoop.security.groups.cache.secs"; + public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileSystem.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileSystem.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/FileSystem.java Wed Jan 27 08:08:29 2010 @@ -35,8 +35,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; -import javax.security.auth.login.LoginException; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -1318,9 +1316,6 @@ /** Default pattern character: Character set close. */ private static final char PAT_SET_CLOSE = ']'; - GlobFilter() { - } - GlobFilter(String filePattern) throws IOException { setRegex(filePattern); } @@ -1870,15 +1865,9 @@ scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase(); authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase(); this.unique = unique; - UserGroupInformation ugi = UserGroupInformation.readFrom(conf); - if (ugi == null) { - try { - ugi = UserGroupInformation.login(conf); - } catch(LoginException e) { - LOG.warn("uri=" + uri, e); - } - } - username = ugi == null? null: ugi.getUserName(); + + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + username = ugi.getUserName(); } /** {@inheritDoc} */ Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/ConnectionHeader.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/ConnectionHeader.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/ConnectionHeader.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/ConnectionHeader.java Wed Jan 27 08:08:29 2010 @@ -25,7 +25,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.security.UnixUserGroupInformation; import org.apache.hadoop.security.UserGroupInformation; /** @@ -36,7 +35,7 @@ public static final Log LOG = LogFactory.getLog(ConnectionHeader.class); private String protocol; - private UserGroupInformation ugi = new UnixUserGroupInformation(); + private UserGroupInformation ugi = null; public ConnectionHeader() {} @@ -60,9 +59,10 @@ protocol = null; } - boolean ugiPresent = in.readBoolean(); - if (ugiPresent) { - ugi.readFields(in); + boolean ugiUsernamePresent = in.readBoolean(); + if (ugiUsernamePresent) { + String username = in.readUTF(); + ugi = UserGroupInformation.createRemoteUser(username); } else { ugi = null; } @@ -73,7 +73,7 @@ Text.writeString(out, (protocol == null) ? "" : protocol); if (ugi != null) { out.writeBoolean(true); - ugi.write(out); + out.writeUTF(ugi.getUserName()); } else { out.writeBoolean(false); } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RPC.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RPC.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RPC.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/RPC.java Wed Jan 27 08:08:29 2010 @@ -29,7 +29,6 @@ import java.util.HashMap; import javax.net.SocketFactory; -import javax.security.auth.login.LoginException; import org.apache.commons.logging.*; @@ -205,12 +204,7 @@ public static Object getProxy(Class protocol, long clientVersion, InetSocketAddress addr, Configuration conf, SocketFactory factory) throws IOException { - UserGroupInformation ugi = null; - try { - ugi = UserGroupInformation.login(conf); - } catch (LoginException le) { - throw new RuntimeException("Couldn't login!"); - } + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); return getProxy(protocol, clientVersion, addr, ugi, conf, factory); } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/Server.java Wed Jan 27 08:08:29 2010 @@ -18,12 +18,18 @@ package org.apache.hadoop.ipc; -import java.io.IOException; -import java.io.DataInputStream; -import java.io.DataOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; - +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.net.BindException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.SocketException; +import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; @@ -33,41 +39,30 @@ import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; - -import java.net.BindException; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.ServerSocket; -import java.net.Socket; -import java.net.SocketException; -import java.net.UnknownHostException; - import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; -import java.util.Iterator; import java.util.Map; import java.util.Random; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingQueue; -import javax.security.auth.Subject; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; -import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.ipc.metrics.RpcMetrics; -import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; /** An abstract IPC service. IPC calls take a single {@link Writable} as a * parameter, and return a {@link Writable} as their value. A service runs on @@ -76,6 +71,7 @@ * @see Client */ public abstract class Server { + private final boolean authorize; /** * The first four bytes of Hadoop RPC connections @@ -728,7 +724,7 @@ ConnectionHeader header = new ConnectionHeader(); Class protocol; - Subject user = null; + UserGroupInformation user = null; // Fake 'call' for failed authorization response private static final int AUTHROIZATION_FAILED_CALLID = -1; @@ -899,14 +895,7 @@ throw new IOException("Unknown protocol: " + header.getProtocol()); } - // TODO: Get the user name from the GSS API for Kerberbos-based security - // Create the user subject; however use the groups as defined on the - // server-side, don't trust the user groups provided by the client - UserGroupInformation ugi = header.getUgi(); - user = null; - if(ugi != null) { - user = SecurityUtil.getSubject(conf, header.getUgi().getUserName()); - } + user = header.getUgi(); } private void processData() throws IOException, InterruptedException { @@ -968,24 +957,23 @@ try { // Make the call as the user via Subject.doAs, thus associating // the call with the Subject - value = - Subject.doAs(call.connection.user, - new PrivilegedExceptionAction() { - @Override - public Writable run() throws Exception { - // make the call - return call(call.connection.protocol, - call.param, call.timestamp); - - } - } - ); - - } catch (PrivilegedActionException pae) { - Exception e = pae.getException(); - LOG.info(getName()+", call "+call+": error: " + e, e); - errorClass = e.getClass().getName(); - error = StringUtils.stringifyException(e); + if (call.connection.user == null) { + value = call(call.connection.protocol, call.param, + call.timestamp); + } else { + value = + call.connection.user.doAs + (new PrivilegedExceptionAction() { + @Override + public Writable run() throws Exception { + // make the call + return call(call.connection.protocol, + call.param, call.timestamp); + + } + } + ); + } } catch (Throwable e) { LOG.info(getName()+", call "+call+": error: " + e, e); errorClass = e.getClass().getName(); @@ -1045,6 +1033,9 @@ this.maxIdleTime = 2*conf.getInt("ipc.client.connection.maxidletime", 1000); this.maxConnectionsToNuke = conf.getInt("ipc.client.kill.max", 10); this.thresholdIdleConnections = conf.getInt("ipc.client.idlethreshold", 4000); + this.authorize = + conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, + false); // Start the listener here and let it bind to the port listener = new Listener(); @@ -1176,8 +1167,20 @@ * @param connection incoming connection * @throws AuthorizationException when the client isn't authorized to talk the protocol */ - public void authorize(Subject user, ConnectionHeader connection) - throws AuthorizationException {} + public void authorize(UserGroupInformation user, + ConnectionHeader connection + ) throws AuthorizationException { + if (authorize) { + Class protocol = null; + try { + protocol = getProtocolClass(connection.getProtocol(), getConf()); + } catch (ClassNotFoundException cfne) { + throw new AuthorizationException("Unknown protocol: " + + connection.getProtocol()); + } + ServiceAuthorizationManager.authorize(user, protocol); + } + } /** * The number of open RPC conections Modified: hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java Wed Jan 27 08:08:29 2010 @@ -24,23 +24,17 @@ import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; -import java.net.ConnectException; import java.net.InetSocketAddress; -import java.net.SocketTimeoutException; import java.io.*; import java.util.Map; import java.util.HashMap; import javax.net.SocketFactory; -import javax.security.auth.Subject; -import javax.security.auth.login.LoginException; import org.apache.commons.logging.*; import org.apache.hadoop.io.*; -import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.conf.*; import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate; @@ -291,7 +285,6 @@ public static class Server extends RPC.Server { private Object instance; private boolean verbose; - private boolean authorize = false; /** Construct an RPC server. * @param instance the instance whose methods will be called @@ -325,9 +318,6 @@ super(bindAddress, port, Invocation.class, numHandlers, conf, classNameBase(instance.getClass().getName())); this.instance = instance; this.verbose = verbose; - this.authorize = - conf.getBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG, - false); } public Writable call(Class protocol, Writable param, long receivedTime) @@ -390,21 +380,6 @@ throw ioe; } } - - @Override - public void authorize(Subject user, ConnectionHeader connection) - throws AuthorizationException { - if (authorize) { - Class protocol = null; - try { - protocol = getProtocolClass(connection.getProtocol(), getConf()); - } catch (ClassNotFoundException cfne) { - throw new AuthorizationException("Unknown protocol: " + - connection.getProtocol()); - } - ServiceAuthorizationManager.authorize(user, protocol); - } - } } private static void log(String value) { Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/GroupMappingServiceProvider.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/GroupMappingServiceProvider.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/GroupMappingServiceProvider.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/GroupMappingServiceProvider.java Wed Jan 27 08:08:29 2010 @@ -27,10 +27,10 @@ interface GroupMappingServiceProvider { /** - * Get all various {@link Group} memberships of a given {@link User}. + * Get all various group memberships of a given user. * Returns EMPTY list in case of non-existing user - * @param user User name - * @return Group memberships of user + * @param user User's name + * @return group memberships of user * @throws IOException */ public List getGroups(String user) throws IOException; Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/Groups.java Wed Jan 27 08:08:29 2010 @@ -32,10 +32,10 @@ /** * A user-to-groups mapping service. * - * {@link Groups} allows for server to get the various {@link Group} memberships - * of a given {@link User} via the {@link #getGroups(String)} call, thus ensuring - * a consistent user-to-groups mapping and protects against vagaries of different - * mappings on servers and clients in a Hadoop cluster. + * {@link Groups} allows for server to get the various group memberships + * of a given user via the {@link #getGroups(String)} call, thus ensuring + * a consistent user-to-groups mapping and protects against vagaries of + * different mappings on servers and clients in a Hadoop cluster. */ public class Groups { private static final Log LOG = LogFactory.getLog(Groups.class); @@ -62,9 +62,9 @@ } /** - * Get the {@link Group} memberships of a given {@link User}. - * @param user User name - * @return the Group memberships of user + * Get the group memberships of a given user. + * @param user User's name + * @return the group memberships of the user * @throws IOException */ public List getGroups(String user) throws IOException { @@ -109,4 +109,18 @@ return groups; } } + + private static Groups GROUPS = null; + + /** + * Get the groups being used to map user-to-groups. + * @return the groups being used to map user-to-groups. + */ + public static Groups getUserToGroupsMappingService(Configuration conf) { + if(GROUPS == null) { + LOG.debug(" Creating new Groups object"); + GROUPS = new Groups(conf); + } + return GROUPS; + } } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/RefreshUserToGroupMappingsProtocol.java Wed Jan 27 08:08:29 2010 @@ -34,7 +34,7 @@ public static final long versionID = 1L; /** - * Refresh {@link User} to {@link Group} mappings. + * Refresh user to group mappings. * @param conf * @throws IOException */ Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java Wed Jan 27 08:08:29 2010 @@ -30,9 +30,9 @@ import org.apache.hadoop.util.Shell.ExitCodeException; /** - * A simple shell-based implementation of {@link GroupMappingServiceProvider} which - * exec's the groups shell command to fetch the {@link Group} - * memberships of a given {@link User}. + * A simple shell-based implementation of {@link GroupMappingServiceProvider} + * that exec's the groups shell command to fetch the group + * memberships of a given user. */ public class ShellBasedUnixGroupsMapping implements GroupMappingServiceProvider { Map> userGroups = Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/User.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/User.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/User.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/User.java Wed Jan 27 08:08:29 2010 @@ -20,51 +20,62 @@ import java.security.Principal; /** - * The username of a user. + * Save the full and short name of the user as a principal. This allows us to + * have a single type that we always look for when picking up user names. */ -public class User implements Principal { - final String user; +class User implements Principal { + private final String fullName; + private final String shortName; + + public User(String name) { + fullName = name; + int atIdx = name.indexOf('@'); + if (atIdx == -1) { + shortName = name; + } else { + int slashIdx = name.indexOf('/'); + if (slashIdx == -1 || atIdx < slashIdx) { + shortName = name.substring(0, atIdx); + } else { + shortName = name.substring(0, slashIdx); + } + } + } /** - * Create a new User with the given username. - * @param user user name + * Get the full name of the user. */ - public User(String user) { - this.user = user; - } - @Override public String getName() { - return user; + return fullName; } - + + /** + * Get the user name up to the first '/' or '@' + * @return the leading part of the user name + */ + public String getShortName() { + return shortName; + } + @Override - public String toString() { - return user; + public boolean equals(Object o) { + if (this == o) { + return true; + } else if (o == null || getClass() != o.getClass()) { + return false; + } else { + return fullName.equals(((User) o).fullName); + } } - + @Override public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((user == null) ? 0 : user.hashCode()); - return result; + return fullName.hashCode(); } - + @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - User other = (User) obj; - if (user == null) { - if (other.user != null) - return false; - } else if (!user.equals(other.user)) - return false; - return true; + public String toString() { + return fullName; } } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/UserGroupInformation.java Wed Jan 27 08:08:29 2010 @@ -17,113 +17,569 @@ */ package org.apache.hadoop.security; +import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.HDFS; +import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE; +import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; + import java.io.IOException; +import java.lang.reflect.UndeclaredThrowableException; +import java.security.AccessControlContext; import java.security.AccessController; import java.security.Principal; +import java.security.PrivilegedAction; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; import java.util.Set; import javax.security.auth.Subject; +import javax.security.auth.callback.CallbackHandler; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; +import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag; +import javax.security.auth.spi.LoginModule; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Writable; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; + +import com.sun.security.auth.NTUserPrincipal; +import com.sun.security.auth.UnixPrincipal; +import com.sun.security.auth.module.Krb5LoginModule; -/** A {@link Writable} abstract class for storing user and groups information. +/** + * User and group information for Hadoop. + * This class wraps around a JAAS Subject and provides methods to determine the + * user's username and groups. It supports both the Windows, Unix and Kerberos + * login modules. */ -public abstract class UserGroupInformation implements Writable, Principal { - public static final Log LOG = LogFactory.getLog(UserGroupInformation.class); - private static UserGroupInformation LOGIN_UGI = null; - - private static final ThreadLocal currentUser = - new ThreadLocal(); - - /** @return the {@link UserGroupInformation} for the current thread */ - public static UserGroupInformation getCurrentUGI() { - Subject user = getCurrentUser(); - - if (user == null) { - user = currentUser.get(); +public class UserGroupInformation { + private static final Log LOG = LogFactory.getLog(UserGroupInformation.class); + + /** + * A login module that looks at the Kerberos, Unix, or Windows principal and + * adds the corresponding UserName. + */ + @InterfaceAudience.Private + public static class HadoopLoginModule implements LoginModule { + private Subject subject; + + @Override + public boolean abort() throws LoginException { + return true; + } + + private T getCanonicalUser(Class cls) { + for(T user: subject.getPrincipals(cls)) { + return user; + } + return null; + } + + @Override + public boolean commit() throws LoginException { + Principal user = null; + // if we are using kerberos, try it out + if (useKerberos) { + user = getCanonicalUser(KerberosPrincipal.class); + } + // if we don't have a kerberos user, use the OS user if (user == null) { - return null; + user = getCanonicalUser(OS_PRINCIPAL_CLASS); } + // if we found the user, add our principal + if (user != null) { + subject.getPrincipals().add(new User(user.getName())); + return true; + } + LOG.error("Can't find user in " + subject); + throw new LoginException("Can't find user name"); + } + + @Override + public void initialize(Subject subject, CallbackHandler callbackHandler, + Map sharedState, Map options) { + this.subject = subject; + } + + @Override + public boolean login() throws LoginException { + return true; + } + + @Override + public boolean logout() throws LoginException { + return true; + } + } + + /** Are the static variables that depend on configuration initialized? */ + private static boolean isInitialized = false; + /** Should we use Kerberos configuration? */ + private static boolean useKerberos; + /** Server-side groups fetching service */ + private static Groups groups; + + /** + * A method to initialize the fields that depend on a configuration. + * Must be called before useKerberos or groups is used. + */ + private static synchronized void ensureInitialized() { + if (!isInitialized) { + initialize(new Configuration()); + } + } + + /** + * Set the configuration values for UGI. + * @param conf the configuration to use + */ + private static synchronized void initialize(Configuration conf) { + String value = conf.get(HADOOP_SECURITY_AUTHENTICATION); + if ("simple".equals(value)) { + useKerberos = false; + } else if (value == null || "kerberos".equals(value)) { + useKerberos = true; + } else { + throw new IllegalArgumentException("Invalid attribute value for " + + HADOOP_SECURITY_AUTHENTICATION + + " of " + value); + } + // If we haven't set up testing groups, use the configuration to find it + if (!(groups instanceof TestingGroups)) { + groups = Groups.getUserToGroupsMappingService(conf); + } + // Set the configuration for JAAS to be the Hadoop configuration. + // This is done here rather than a static initializer to avoid a + // circular dependence. + javax.security.auth.login.Configuration.setConfiguration + (new HadoopConfiguration()); + isInitialized = true; + } + + /** + * Set the static configuration for UGI. + * In particular, set the security authentication mechanism and the + * group look up service. + * @param conf the configuration to use + */ + public static void setConfiguration(Configuration conf) { + initialize(conf); + } + + /** + * Determine if UserGroupInformation is using Kerberos to determine + * user identities or is relying on simple authentication + * + * @return true if UGI is working in a secure environment + */ + public static boolean isSecurityEnabled() { + ensureInitialized(); + return useKerberos; + } + + /** + * Information about the logged in user. + */ + private static UserGroupInformation loginUser = null; + private static String keytabPrincipal = null; + private static String keytabFile = null; + + private final Subject subject; + private final Set> tokens = + new LinkedHashSet>(); + + private static final String OS_LOGIN_MODULE_NAME; + private static final Class OS_PRINCIPAL_CLASS; + private static final boolean windows = + System.getProperty("os.name").startsWith("Windows"); + static { + if (windows) { + OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.NTLoginModule"; + OS_PRINCIPAL_CLASS = NTUserPrincipal.class; + } else { + OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.UnixLoginModule"; + OS_PRINCIPAL_CLASS = UnixPrincipal.class; } + } + + /** + * A JAAS configuration that defines the login modules that we want + * to use for login. + */ + private static class HadoopConfiguration + extends javax.security.auth.login.Configuration { + private static final String SIMPLE_CONFIG_NAME = "hadoop-simple"; + private static final String USER_KERBEROS_CONFIG_NAME = + "hadoop-user-kerberos"; + private static final String KEYTAB_KERBEROS_CONFIG_NAME = + "hadoop-keytab-kerberos"; - Set ugiPrincipals = - user.getPrincipals(UserGroupInformation.class); + private static final AppConfigurationEntry OS_SPECIFIC_LOGIN = + new AppConfigurationEntry(OS_LOGIN_MODULE_NAME, + LoginModuleControlFlag.REQUIRED, + new HashMap()); + private static final AppConfigurationEntry HADOOP_LOGIN = + new AppConfigurationEntry(HadoopLoginModule.class.getName(), + LoginModuleControlFlag.REQUIRED, + new HashMap()); + private static final Map USER_KERBEROS_OPTIONS = + new HashMap(); + static { + USER_KERBEROS_OPTIONS.put("doNotPrompt", "true"); + USER_KERBEROS_OPTIONS.put("useTicketCache", "true"); + } + private static final AppConfigurationEntry USER_KERBEROS_LOGIN = + new AppConfigurationEntry(Krb5LoginModule.class.getName(), + LoginModuleControlFlag.OPTIONAL, + USER_KERBEROS_OPTIONS); + private static final Map KEYTAB_KERBEROS_OPTIONS = + new HashMap(); + static { + KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true"); + KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true"); + KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true"); + } + private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = + new AppConfigurationEntry(Krb5LoginModule.class.getName(), + LoginModuleControlFlag.REQUIRED, + KEYTAB_KERBEROS_OPTIONS); - UserGroupInformation ugi = null; - if (ugiPrincipals != null && ugiPrincipals.size() == 1) { - ugi = ugiPrincipals.iterator().next(); - if (ugi == null) { - throw new RuntimeException("Cannot find _current user_ UGI in the Subject!"); + private static final AppConfigurationEntry[] SIMPLE_CONF = + new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, HADOOP_LOGIN}; + + private static final AppConfigurationEntry[] USER_KERBEROS_CONF = + new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN, USER_KERBEROS_LOGIN, + HADOOP_LOGIN}; + + private static final AppConfigurationEntry[] KEYTAB_KERBEROS_CONF = + new AppConfigurationEntry[]{KEYTAB_KERBEROS_LOGIN, HADOOP_LOGIN}; + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String appName) { + if (SIMPLE_CONFIG_NAME.equals(appName)) { + return SIMPLE_CONF; + } else if (USER_KERBEROS_CONFIG_NAME.equals(appName)) { + return USER_KERBEROS_CONF; + } else if (KEYTAB_KERBEROS_CONFIG_NAME.equals(appName)) { + KEYTAB_KERBEROS_OPTIONS.put("keyTab", keytabFile); + KEYTAB_KERBEROS_OPTIONS.put("principal", keytabPrincipal); + return KEYTAB_KERBEROS_CONF; } - } else { - throw new RuntimeException("Cannot resolve current user from subject, " + - "which had " + ugiPrincipals.size() + - " UGI principals!"); + return null; } - return ugi; } - /** - * Set the {@link UserGroupInformation} for the current thread - * @deprecated Use {@link #setCurrentUser(UserGroupInformation)} - */ - @Deprecated - public static void setCurrentUGI(UserGroupInformation ugi) { - setCurrentUser(ugi); + /** + * Create a UserGroupInformation for the given subject. + * This does not change the subject or acquire new credentials. + * @param subject the user's subject + */ + UserGroupInformation(Subject subject) { + this.subject = subject; } /** - * Return the current user Subject. - * @return the current user Subject + * Return the current user, including any doAs in the current stack. + * @return the current user + * @throws IOException if login fails */ - static Subject getCurrentUser() { - return Subject.getSubject(AccessController.getContext()); + public static UserGroupInformation getCurrentUser() throws IOException { + AccessControlContext context = AccessController.getContext(); + Subject subject = Subject.getSubject(context); + return subject == null ? getLoginUser() : new UserGroupInformation(subject); } - + + /** + * Get the currently logged in user. + * @return the logged in user + * @throws IOException if login fails + */ + public synchronized + static UserGroupInformation getLoginUser() throws IOException { + if (loginUser == null) { + try { + LoginContext login; + if (isSecurityEnabled()) { + login = new LoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME); + } else { + login = new LoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME); + } + login.login(); + loginUser = new UserGroupInformation(login.getSubject()); + } catch (LoginException le) { + throw new IOException("failure to login", le); + } + } + return loginUser; + } + + /** + * Log a user in from a keytab file. Loads a user identity from a keytab + * file and login them in. They become the currently logged-in user. + * @param user the principal name to load from the keytab + * @param path the path to the keytab file + * @throws IOException if the keytab file can't be read + */ + public synchronized + static void loginUserFromKeytab(String user, + String path + ) throws IOException { + if (!isSecurityEnabled()) + return; + + keytabFile = path; + keytabPrincipal = user; + try { + LoginContext login = + new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME); + login.login(); + loginUser = new UserGroupInformation(login.getSubject()); + } catch (LoginException le) { + throw new IOException("Login failure for " + user + " from keytab " + + path, le); + } + } + /** - * Set the {@link UserGroupInformation} for the current thread - * WARNING - This method should be used only in test cases and other exceptional - * cases! - * @param ugi {@link UserGroupInformation} for the current thread + * Create a user from a login name. It is intended to be used for remote + * users in RPC, since it won't have any credentials. + * @param user the full user principal name, must not be empty or null + * @return the UserGroupInformation for the remote user. */ - public static void setCurrentUser(UserGroupInformation ugi) { - Subject user = SecurityUtil.getSubject(ugi); - currentUser.set(user); + public static UserGroupInformation createRemoteUser(String user) { + if (user == null || "".equals(user)) { + throw new IllegalArgumentException("Null user"); + } + Subject subject = new Subject(); + subject.getPrincipals().add(new User(user)); + return new UserGroupInformation(subject); } - /** Get username + /** + * This class is used for storing the groups for testing. It stores a local + * map that has the translation of usernames to groups. + */ + private static class TestingGroups extends Groups { + private final Map> userToGroupsMapping = + new HashMap>(); + + private TestingGroups() { + super(new org.apache.hadoop.conf.Configuration()); + } + + @Override + public List getGroups(String user) { + List result = userToGroupsMapping.get(user); + if (result == null) { + result = new ArrayList(); + } + return result; + } + + private void setUserGroups(String user, String[] groups) { + userToGroupsMapping.put(user, Arrays.asList(groups)); + } + } + + /** + * Create a UGI for testing HDFS and MapReduce + * @param user the full user principal name + * @param userGroups the names of the groups that the user belongs to + * @return a fake user for running unit tests + */ + @InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE}) + public static UserGroupInformation createUserForTesting(String user, + String[] userGroups) { + ensureInitialized(); + UserGroupInformation ugi = createRemoteUser(user); + // make sure that the testing object is setup + if (!(groups instanceof TestingGroups)) { + groups = new TestingGroups(); + } + // add the user groups + ((TestingGroups) groups).setUserGroups(ugi.getShortUserName(), userGroups); + return ugi; + } + + /** + * Get the user's login name. + * @return the user's name up to the first '/' or '@'. + */ + public String getShortUserName() { + for (User p: subject.getPrincipals(User.class)) { + return p.getShortName(); + } + return null; + } + + /** + * Get the user's full principal name. + * @return the user's full principal name. + */ + public String getUserName() { + for (User p: subject.getPrincipals(User.class)) { + return p.getName(); + } + return null; + } + + /** + * Add a token to this UGI * - * @return the user's name + * @param token Token to be added + * @return true on successful add of new token */ - public abstract String getUserName(); + public synchronized boolean addToken(Token token) { + return tokens.add(token); + } - /** Get the name of the groups that the user belong to + /** + * Obtain the collection of tokens associated with this user. * - * @return an array of group names + * @return an unmodifiable collection of tokens associated with user + */ + public synchronized Collection> getTokens() { + return Collections.unmodifiableSet(tokens); + } + + /** + * Get the group names for this user. + * @return the list of users with the primary group first. If the command + * fails, it returns an empty list. + */ + public synchronized String[] getGroupNames() { + ensureInitialized(); + try { + List result = groups.getGroups(getShortUserName()); + return result.toArray(new String[result.size()]); + } catch (IOException ie) { + LOG.warn("No groups available for user " + getShortUserName()); + return new String[0]; + } + } + + /** + * Return the username. */ - public abstract String[] getGroupNames(); + @Override + public String toString() { + return getUserName(); + } - /** Login and return a UserGroupInformation object. */ - public static UserGroupInformation login(Configuration conf - ) throws LoginException { - if (LOGIN_UGI == null) { - LOGIN_UGI = UnixUserGroupInformation.login(conf); + /** + * Compare the subjects to see if they are equal to each other. + */ + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (o == null || getClass() != o.getClass()) { + return false; + } else { + return subject.equals(((UserGroupInformation) o).subject); } - return LOGIN_UGI; } - /** Read a {@link UserGroupInformation} from conf */ - public static UserGroupInformation readFrom(Configuration conf - ) throws IOException { + /** + * Return the hash of the subject. + */ + @Override + public int hashCode() { + return subject.hashCode(); + } + + /** + * Get the underlying subject from this ugi. + * @return the subject that represents this user. + */ + protected Subject getSubject() { + return subject; + } + + /** + * Run the given action as the user. + * @param the return type of the run method + * @param action the method to execute + * @return the value from the run method + */ + public T doAs(PrivilegedAction action) { + return Subject.doAs(subject, action); + } + + /** + * Run the given action as the user, potentially throwing an exception. + * @param the return type of the run method + * @param action the method to execute + * @return the value from the run method + * @throws IOException if the action throws an IOException + * @throws Error if the action throws an Error + * @throws RuntimeException if the action throws a RuntimeException + * @throws InterruptedException if the action throws an InterruptedException + * @throws UndeclaredThrowableException if the action throws something else + */ + public T doAs(PrivilegedExceptionAction action + ) throws IOException, InterruptedException { try { - return UnixUserGroupInformation.readFromConf(conf, - UnixUserGroupInformation.UGI_PROPERTY_NAME); - } catch (LoginException e) { - throw (IOException)new IOException().initCause(e); + return Subject.doAs(subject, action); + } catch (PrivilegedActionException pae) { + Throwable cause = pae.getCause(); + if (cause instanceof IOException) { + throw (IOException) cause; + } else if (cause instanceof Error) { + throw (Error) cause; + } else if (cause instanceof RuntimeException) { + throw (RuntimeException) cause; + } else if (cause instanceof InterruptedException) { + throw (InterruptedException) cause; + } else { + throw new UndeclaredThrowableException(pae,"Unknown exception in doAs"); + } + } + } + + private void print() throws IOException { + System.out.println("User: " + getUserName()); + System.out.print("Group Ids: "); + System.out.println(); + String[] groups = getGroupNames(); + System.out.print("Groups: "); + for(int i=0; i < groups.length; i++) { + System.out.print(groups[i] + " "); + } + System.out.println(); + } + + /** + * A test method to print out the current user's UGI. + * @param args if there are two arguments, read the user from the keytab + * and print it out. + * @throws Exception + */ + public static void main(String [] args) throws Exception { + System.out.println("Getting UGI for current user"); + UserGroupInformation ugi = getCurrentUser(); + ugi.print(); + System.out.println("UGI: " + ugi); + System.out.println("============================================================"); + + if (args.length == 2) { + System.out.println("Getting UGI from keytab...."); + loginUserFromKeytab(args[0], args[1]); + getCurrentUser().print(); + System.out.println("Keytab: " + ugi); } } } Added: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/AccessControlList.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/AccessControlList.java?rev=903560&view=auto ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/AccessControlList.java (added) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/AccessControlList.java Wed Jan 27 08:08:29 2010 @@ -0,0 +1,143 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.security.authorize; + +import java.util.Iterator; +import java.util.Set; +import java.util.TreeSet; + +import org.apache.hadoop.security.UserGroupInformation; + +/** + * Class representing a configured access control list. + */ +public class AccessControlList { + + // Indicates an ACL string that represents access to all users + public static final String WILDCARD_ACL_VALUE = "*"; + + // Set of users who are granted access. + private Set users; + // Set of groups which are granted access + private Set groups; + // Whether all users are granted access. + private boolean allAllowed; + + /** + * Construct a new ACL from a String representation of the same. + * + * The String is a a comma separated list of users and groups. + * The user list comes first and is separated by a space followed + * by the group list. For e.g. "user1,user2 group1,group2" + * + * @param aclString String representation of the ACL + */ + public AccessControlList(String aclString) { + users = new TreeSet(); + groups = new TreeSet(); + if (aclString.contains(WILDCARD_ACL_VALUE) && + aclString.trim().equals(WILDCARD_ACL_VALUE)) { + allAllowed = true; + } else { + String[] userGroupStrings = aclString.split(" ", 2); + + if (userGroupStrings.length >= 1) { + String[] usersStr = userGroupStrings[0].split(","); + if (usersStr.length >= 1) { + addToSet(users, usersStr); + } + } + + if (userGroupStrings.length == 2) { + String[] groupsStr = userGroupStrings[1].split(","); + if (groupsStr.length >= 1) { + addToSet(groups, groupsStr); + } + } + } + } + + public boolean isAllAllowed() { + return allAllowed; + } + + /** + * Get the names of users allowed for this service. + * @return the set of user names. the set must not be modified. + */ + Set getUsers() { + return users; + } + + /** + * Get the names of user groups allowed for this service. + * @return the set of group names. the set must not be modified. + */ + Set getGroups() { + return groups; + } + + public boolean isUserAllowed(UserGroupInformation ugi) { + if (allAllowed || users.contains(ugi.getUserName())) { + return true; + } else { + for(String group: ugi.getGroupNames()) { + if (groups.contains(group)) { + return true; + } + } + } + return false; + } + + private static final void addToSet(Set set, String[] strings) { + for (String s : strings) { + s = s.trim(); + if (s.length() > 0) { + set.add(s); + } + } + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + boolean first = true; + for(String user: users) { + if (!first) { + sb.append(","); + } else { + first = false; + } + sb.append(user); + } + if (!groups.isEmpty()) { + sb.append(" "); + } + first = true; + for(String group: groups) { + if (!first) { + sb.append(","); + } else { + first = false; + } + sb.append(group); + } + return sb.toString(); + } +} \ No newline at end of file Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/Service.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/Service.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/Service.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/Service.java Wed Jan 27 08:08:29 2010 @@ -28,11 +28,11 @@ */ public class Service { private String key; - private Permission permission; + private Class protocol; public Service(String key, Class protocol) { this.key = key; - this.permission = new ConnectionPermission(protocol); + this.protocol = protocol; } /** @@ -44,10 +44,10 @@ } /** - * Get the {@link Permission} required to access the service. - * @return the {@link Permission} required to access the service + * Get the protocol for the service + * @return the {@link Class} for the protocol */ - public Permission getPermission() { - return permission; + public Class getProtocol() { + return protocol; } } Modified: hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original) +++ hadoop/common/trunk/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Wed Jan 27 08:08:29 2010 @@ -17,19 +17,10 @@ */ package org.apache.hadoop.security.authorize; -import java.security.AccessControlException; -import java.security.AccessController; -import java.security.Permission; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.Collections; -import java.util.HashMap; +import java.util.IdentityHashMap; import java.util.Map; -import javax.security.auth.Subject; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; /** @@ -37,9 +28,10 @@ * for incoming service requests. */ public class ServiceAuthorizationManager { + private static final String HADOOP_POLICY_FILE = "hadoop-policy.xml"; - private static final Log LOG = - LogFactory.getLog(ServiceAuthorizationManager.class); + private static Map, AccessControlList> protocolToAcl = + new IdentityHashMap, AccessControlList>(); /** * Configuration key for controlling service-level authorization for Hadoop. @@ -47,9 +39,6 @@ public static final String SERVICE_AUTHORIZATION_CONFIG = "hadoop.security.authorization"; - private static Map, Permission> protocolToPermissionMap = - Collections.synchronizedMap(new HashMap, Permission>()); - /** * Authorize the user to access the protocol being used. * @@ -57,49 +46,48 @@ * @param protocol service being accessed * @throws AuthorizationException on authorization failure */ - public static void authorize(Subject user, Class protocol) - throws AuthorizationException { - Permission permission = protocolToPermissionMap.get(protocol); - if (permission == null) { - permission = new ConnectionPermission(protocol); - protocolToPermissionMap.put(protocol, permission); + public static void authorize(UserGroupInformation user, + Class protocol + ) throws AuthorizationException { + AccessControlList acl = protocolToAcl.get(protocol); + if (acl == null) { + throw new AuthorizationException("Protocol " + protocol + + " is not known."); + } + if (!acl.isUserAllowed(user)) { + throw new AuthorizationException("User " + user.toString() + + " is not authorized for protocol " + + protocol); } - - checkPermission(user, permission); } - - /** - * Check if the given {@link Subject} has all of necessary {@link Permission} - * set. - * - * @param user Subject to be authorized - * @param permissions Permission set - * @throws AuthorizationException if the authorization failed - */ - private static void checkPermission(final Subject user, - final Permission... permissions) - throws AuthorizationException { - try{ - Subject.doAs(user, - new PrivilegedExceptionAction() { - @Override - public Void run() throws Exception { - try { - for(Permission permission : permissions) { - AccessController.checkPermission(permission); - } - } catch (AccessControlException ace) { - LOG.info("Authorization failed for " + - UserGroupInformation.getCurrentUGI(), ace); - throw new AuthorizationException(ace); - } - return null; - } - } - ); - } catch (PrivilegedActionException e) { - throw new AuthorizationException(e.getException()); + + public static synchronized void refresh(Configuration conf, + PolicyProvider provider) { + // Get the system property 'hadoop.policy.file' + String policyFile = + System.getProperty("hadoop.policy.file", HADOOP_POLICY_FILE); + + // Make a copy of the original config, and load the policy file + Configuration policyConf = new Configuration(conf); + policyConf.addResource(policyFile); + + final Map, AccessControlList> newAcls = + new IdentityHashMap, AccessControlList>(); + + // Parse the config file + Service[] services = provider.getServices(); + if (services != null) { + for (Service service : services) { + AccessControlList acl = + new AccessControlList( + policyConf.get(service.getServiceKey(), + AccessControlList.WILDCARD_ACL_VALUE) + ); + newAcls.put(service.getProtocol(), acl); + } } + + // Flip to the newly parsed permissions + protocolToAcl = newAcls; } - } Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java?rev=903560&r1=903559&r2=903560&view=diff ============================================================================== --- hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java (original) +++ hadoop/common/trunk/src/test/core/org/apache/hadoop/ipc/TestRPC.java Wed Jan 27 08:08:29 2010 @@ -34,9 +34,7 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.authorize.AuthorizationException; -import org.apache.hadoop.security.authorize.ConfiguredPolicy; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.Service; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; @@ -338,7 +336,7 @@ } private void doRPCs(Configuration conf, boolean expectFailure) throws Exception { - SecurityUtil.setPolicy(new ConfiguredPolicy(conf, new TestPolicyProvider())); + ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider()); Server server = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, 5, true, conf); Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java?rev=903560&view=auto ============================================================================== --- hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java (added) +++ hadoop/common/trunk/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java Wed Jan 27 08:08:29 2010 @@ -0,0 +1,189 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.hadoop.security; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import static org.mockito.Mockito.mock; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.junit.Test; + +public class TestUserGroupInformation { + final private static String USER_NAME = "user1@HADOOP.APACHE.ORG"; + final private static String GROUP1_NAME = "group1"; + final private static String GROUP2_NAME = "group2"; + final private static String GROUP3_NAME = "group3"; + final private static String[] GROUP_NAMES = + new String[]{GROUP1_NAME, GROUP2_NAME, GROUP3_NAME}; + + /** + * given user name - get all the groups. + * Needs to happen before creating the test users + */ + @Test + public void testGetServerSideGroups() throws IOException, + InterruptedException { + // get the user name + Process pp = Runtime.getRuntime().exec("whoami"); + BufferedReader br = new BufferedReader + (new InputStreamReader(pp.getInputStream())); + String userName = br.readLine().trim(); + // get the groups + pp = Runtime.getRuntime().exec("id -Gn"); + br = new BufferedReader(new InputStreamReader(pp.getInputStream())); + String line = br.readLine(); + System.out.println(userName + ":" + line); + + List groups = new ArrayList (); + for(String s: line.split("[\\s]")) { + groups.add(s); + } + + final UserGroupInformation login = UserGroupInformation.getCurrentUser(); + assertEquals(userName, login.getShortUserName()); + String[] gi = login.getGroupNames(); + assertEquals(groups.size(), gi.length); + for(int i=0; i < gi.length; i++) { + assertEquals(groups.get(i), gi[i]); + } + + final UserGroupInformation fakeUser = + UserGroupInformation.createRemoteUser("foo.bar"); + fakeUser.doAs(new PrivilegedExceptionAction(){ + @Override + public Object run() throws IOException { + UserGroupInformation current = UserGroupInformation.getCurrentUser(); + assertFalse(current.equals(login)); + assertEquals(current, fakeUser); + assertEquals(0, current.getGroupNames().length); + return null; + }}); + } + + /** Test login method */ + @Test + public void testLogin() throws Exception { + // login from unix + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + assertEquals(UserGroupInformation.getCurrentUser(), + UserGroupInformation.getLoginUser()); + assertTrue(ugi.getGroupNames().length >= 1); + + // ensure that doAs works correctly + UserGroupInformation userGroupInfo = + UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); + UserGroupInformation curUGI = + userGroupInfo.doAs(new PrivilegedExceptionAction(){ + public UserGroupInformation run() throws IOException { + return UserGroupInformation.getCurrentUser(); + }}); + // make sure in the scope of the doAs, the right user is current + assertEquals(curUGI, userGroupInfo); + // make sure it is not the same as the login user + assertFalse(curUGI.equals(UserGroupInformation.getLoginUser())); + } + + /** test constructor */ + @Test + public void testConstructor() throws Exception { + UserGroupInformation ugi = + UserGroupInformation.createUserForTesting("user2/cron@HADOOP.APACHE.ORG", + GROUP_NAMES); + // make sure the short and full user names are correct + assertEquals("user2/cron@HADOOP.APACHE.ORG", ugi.getUserName()); + assertEquals("user2", ugi.getShortUserName()); + ugi = UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); + assertEquals("user1", ugi.getShortUserName()); + + // failure test + testConstructorFailures(null); + testConstructorFailures(""); + } + + private void testConstructorFailures(String userName) { + boolean gotException = false; + try { + UserGroupInformation.createRemoteUser(userName); + } catch (Exception e) { + gotException = true; + } + assertTrue(gotException); + } + + @Test + public void testEquals() throws Exception { + UserGroupInformation uugi = + UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); + + assertEquals(uugi, uugi); + // The subjects should be equal, so this should work + assertTrue(uugi.equals( + UserGroupInformation.createUserForTesting + (USER_NAME, GROUP_NAMES))); + // ensure that different UGI with the same subject are equal + assertEquals(uugi, new UserGroupInformation(uugi.getSubject())); + } + + @Test + public void testGettingGroups() throws Exception { + UserGroupInformation uugi = + UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); + assertEquals(USER_NAME, uugi.getUserName()); + assertArrayEquals(new String[]{GROUP1_NAME, GROUP2_NAME, GROUP3_NAME}, + uugi.getGroupNames()); + } + + @SuppressWarnings("unchecked") // from Mockito mocks + @Test + public void testUGITokens() { + UserGroupInformation ugi = + UserGroupInformation.createUserForTesting("TheDoctor", + new String [] { "TheTARDIS"}); + Token t1 = mock(Token.class); + Token t2 = mock(Token.class); + + ugi.addToken(t1); + ugi.addToken(t2); + + Collection> z = ugi.getTokens(); + assertTrue(z.contains(t1)); + assertTrue(z.contains(t2)); + assertEquals(2, z.size()); + + try { + z.remove(t1); + fail("Shouldn't be able to modify token collection from UGI"); + } catch(UnsupportedOperationException uoe) { + // Can't modify tokens + } + } +} Added: hadoop/common/trunk/src/test/core/org/apache/hadoop/security/authorize/TestAccessControlList.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/security/authorize/TestAccessControlList.java?rev=903560&view=auto ============================================================================== --- hadoop/common/trunk/src/test/core/org/apache/hadoop/security/authorize/TestAccessControlList.java (added) +++ hadoop/common/trunk/src/test/core/org/apache/hadoop/security/authorize/TestAccessControlList.java Wed Jan 27 08:08:29 2010 @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.security.authorize; + +import java.util.Iterator; +import java.util.Set; + +import org.apache.hadoop.security.authorize.AccessControlList; + + +import junit.framework.TestCase; + +public class TestAccessControlList extends TestCase { + + public void testWildCardAccessControlList() throws Exception { + AccessControlList acl; + + acl = new AccessControlList("*"); + assertTrue(acl.isAllAllowed()); + + acl = new AccessControlList(" * "); + assertTrue(acl.isAllAllowed()); + + acl = new AccessControlList(" *"); + assertTrue(acl.isAllAllowed()); + + acl = new AccessControlList("* "); + assertTrue(acl.isAllAllowed()); + } + + public void testAccessControlList() throws Exception { + AccessControlList acl; + Set users; + Set groups; + + acl = new AccessControlList("drwho tardis"); + users = acl.getUsers(); + assertEquals(users.size(), 1); + assertEquals(users.iterator().next(), "drwho"); + groups = acl.getGroups(); + assertEquals(groups.size(), 1); + assertEquals(groups.iterator().next(), "tardis"); + + acl = new AccessControlList("drwho"); + users = acl.getUsers(); + assertEquals(users.size(), 1); + assertEquals(users.iterator().next(), "drwho"); + groups = acl.getGroups(); + assertEquals(groups.size(), 0); + + acl = new AccessControlList("drwho "); + users = acl.getUsers(); + assertEquals(users.size(), 1); + assertEquals(users.iterator().next(), "drwho"); + groups = acl.getGroups(); + assertEquals(groups.size(), 0); + + acl = new AccessControlList(" tardis"); + users = acl.getUsers(); + assertEquals(users.size(), 0); + groups = acl.getGroups(); + assertEquals(groups.size(), 1); + assertEquals(groups.iterator().next(), "tardis"); + + Iterator iter; + acl = new AccessControlList("drwho,joe tardis,users"); + users = acl.getUsers(); + assertEquals(users.size(), 2); + iter = users.iterator(); + assertEquals(iter.next(), "drwho"); + assertEquals(iter.next(), "joe"); + groups = acl.getGroups(); + assertEquals(groups.size(), 2); + iter = groups.iterator(); + assertEquals(iter.next(), "tardis"); + assertEquals(iter.next(), "users"); + + acl = new AccessControlList("drwho,joe tardis, users"); + users = acl.getUsers(); + assertEquals(users.size(), 2); + iter = users.iterator(); + assertEquals(iter.next(), "drwho"); + assertEquals(iter.next(), "joe"); + groups = acl.getGroups(); + assertEquals(groups.size(), 2); + iter = groups.iterator(); + assertEquals(iter.next(), "tardis"); + assertEquals(iter.next(), "users"); + } +}