Return-Path: Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: (qmail 99591 invoked from network); 20 Jul 2010 00:49:34 -0000 Received: from unknown (HELO mail.apache.org) (140.211.11.3) by 140.211.11.9 with SMTP; 20 Jul 2010 00:49:34 -0000 Received: (qmail 18322 invoked by uid 500); 20 Jul 2010 00:49:34 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 18264 invoked by uid 500); 20 Jul 2010 00:49:33 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 18256 invoked by uid 99); 20 Jul 2010 00:49:33 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 20 Jul 2010 00:49:33 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 20 Jul 2010 00:49:28 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 1FF6E238896F; Tue, 20 Jul 2010 00:48:34 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r965697 - in /hadoop/hdfs/trunk: ./ src/java/org/apache/hadoop/hdfs/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/java/org/apache/hadoop/hdfs/server/namenode/ src/java/org/apache/hadoop/hdfs/tools/ src/test/hdfs/org/apache/hadoop/hd... Date: Tue, 20 Jul 2010 00:48:33 -0000 To: hdfs-commits@hadoop.apache.org From: ddas@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20100720004834.1FF6E238896F@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: ddas Date: Tue Jul 20 00:48:33 2010 New Revision: 965697 URL: http://svn.apache.org/viewvc?rev=965697&view=rev Log: HDFS-1201. The HDFS component for HADOOP-6632. Contributed by Kan Zhang & Jitendra Pandey. Modified: hadoop/hdfs/trunk/CHANGES.txt hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/security/TestDelegationToken.java hadoop/hdfs/trunk/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java Modified: hadoop/hdfs/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/CHANGES.txt (original) +++ hadoop/hdfs/trunk/CHANGES.txt Tue Jul 20 00:48:33 2010 @@ -78,6 +78,9 @@ Trunk (unreleased changes) HDFS-1298 - Add support in HDFS for new statistics added in FileSystem to track the file system operations. (suresh) + + HDFS-1201. The HDFS component for HADOOP-6632. + (Kan Zhang & Jitendra Pandey via ddas) OPTIMIZATIONS Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/DFSUtil.java Tue Jul 20 00:48:33 2010 @@ -18,14 +18,11 @@ package org.apache.hadoop.hdfs; -import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.StringTokenizer; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.security.UserGroupInformation; @InterfaceAudience.Private public class DFSUtil { @@ -84,22 +81,6 @@ public class DFSUtil { simulation[index] = false; } } - - /** - * If a keytab has been provided, login as that user. - */ - public static void login(final Configuration conf, - final String keytabFileKey, - final String userNameKey) - throws IOException { - String keytabFilename = conf.get(keytabFileKey); - - if(keytabFilename == null) - return; - - String user = conf.get(userNameKey, System.getProperty("user.name")); - UserGroupInformation.loginUserFromKeytab(user, keytabFilename); - } /** * Converts a byte array to a string using UTF8 encoding. Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Tue Jul 20 00:48:33 2010 @@ -106,6 +106,7 @@ import org.apache.hadoop.ipc.RemoteExcep import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.DNS; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.util.Daemon; @@ -1394,8 +1395,7 @@ public class DataNode extends Configured dnThreadName = "DataNode: [" + StringUtils.uriToString(dataDirs.toArray(new URI[0])) + "]"; UserGroupInformation.setConfiguration(conf); - DFSUtil.login(conf, - DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, + SecurityUtil.login(conf, DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY); return makeInstance(dataDirs, conf); } Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Tue Jul 20 00:48:33 2010 @@ -4565,7 +4565,7 @@ public class FSNamesystem implements FSC if (isInSafeMode()) { throw new SafeModeException("Cannot cancel delegation token", safeMode); } - String canceller = UserGroupInformation.getCurrentUser().getShortUserName(); + String canceller = UserGroupInformation.getCurrentUser().getUserName(); DelegationTokenIdentifier id = dtSecretManager .cancelToken(token, canceller); logCancelDelegationToken(id); Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java Tue Jul 20 00:48:33 2010 @@ -17,11 +17,6 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY; - import java.security.PrivilegedExceptionAction; import java.util.*; import java.io.*; @@ -30,6 +25,7 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import org.apache.hadoop.security.SecurityUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -110,7 +106,9 @@ public class GetImageServlet extends Htt // This method is only called on the NN, therefore it is safe to // use these key values. return UserGroupInformation.loginUserFromKeytabAndReturnUGI( - conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), + NameNode.getAddress(conf).getHostName()), conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)); } }); @@ -124,16 +122,27 @@ public class GetImageServlet extends Htt } } - protected boolean isValidRequestor(String remoteUser, Configuration conf) { + @SuppressWarnings("deprecation") + protected boolean isValidRequestor(String remoteUser, Configuration conf) + throws IOException { if(remoteUser == null) { // This really shouldn't happen... LOG.warn("Received null remoteUser while authorizing access to getImage servlet"); return false; } - String [] validRequestors = {conf.get(DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), - conf.get(DFS_NAMENODE_USER_NAME_KEY), - conf.get(DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), - conf.get(DFS_SECONDARY_NAMENODE_USER_NAME_KEY) }; + String[] validRequestors = { + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode + .getAddress(conf).getHostName()), + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode + .getAddress(conf).getHostName()), + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), + SecondaryNameNode.getHttpAddress(conf).getHostName()), + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY), + SecondaryNameNode.getHttpAddress(conf).getHostName()) }; for(String v : validRequestors) { if(v != null && v.equals(remoteUser)) { Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Tue Jul 20 00:48:33 2010 @@ -95,6 +95,7 @@ import org.apache.hadoop.security.author import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; +import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.ServicePlugin; import org.apache.hadoop.util.StringUtils; @@ -341,6 +342,9 @@ public class NameNode implements Namenod */ protected void initialize(Configuration conf) throws IOException { InetSocketAddress socAddr = getRpcServerAddress(conf); + UserGroupInformation.setConfiguration(conf); + SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, + DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, socAddr.getHostName()); int handlerCount = conf.getInt("dfs.namenode.handler.count", 10); // set service-level authorization security policy @@ -414,28 +418,39 @@ public class NameNode implements Namenod this.emptier.start(); } + public static String getInfoServer(Configuration conf) { + return UserGroupInformation.isSecurityEnabled() ? conf.get( + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT) : conf.get( + DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT); + } + private void startHttpServer(final Configuration conf) throws IOException { + final InetSocketAddress infoSocAddr = getHttpServerAddress(conf); + final String infoHost = infoSocAddr.getHostName(); if(UserGroupInformation.isSecurityEnabled()) { - String httpsUser = conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY); - if(httpsUser == null) { - LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY + - " not defined in config. Starting http server as " - + conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY) - + ": Kerberized SSL may be not function correctly."); - } else { - // Kerberized SSL servers must be run from the host principal... - LOG.info("Logging in as " + httpsUser + " to start http server."); - DFSUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, - DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY); - } + String httpsUser = SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoHost); + if (httpsUser == null) { + LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY + + " not defined in config. Starting http server as " + + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), rpcAddress + .getHostName()) + + ": Kerberized SSL may be not function correctly."); + } else { + // Kerberized SSL servers must be run from the host principal... + LOG.info("Logging in as " + httpsUser + " to start http server."); + SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, + DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, infoHost); + } } UserGroupInformation ugi = UserGroupInformation.getLoginUser(); try { this.httpServer = ugi.doAs(new PrivilegedExceptionAction() { @Override public HttpServer run() throws IOException, InterruptedException { - InetSocketAddress infoSocAddr = getHttpServerAddress(conf); - String infoHost = infoSocAddr.getHostName(); int infoPort = infoSocAddr.getPort(); httpServer = new HttpServer("hdfs", infoHost, infoPort, infoPort == 0, conf); @@ -447,8 +462,8 @@ public class NameNode implements Namenod DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT); InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf - .get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, infoHost - + ":" + 0)); + .get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT)); Configuration sslConf = new HdfsConfiguration(false); if (certSSL) { sslConf.addResource(conf.get( @@ -498,11 +513,12 @@ public class NameNode implements Namenod if(UserGroupInformation.isSecurityEnabled() && conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY) != null) { // Go back to being the correct Namenode principal - LOG.info("Logging back in as " - + conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY) - + " following http server start."); - DFSUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, - DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY); + LOG.info("Logging back in as " + + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), rpcAddress + .getHostName()) + " following http server start."); + SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, + DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, rpcAddress.getHostName()); } } } @@ -540,11 +556,6 @@ public class NameNode implements Namenod protected NameNode(Configuration conf, NamenodeRole role) throws IOException { - UserGroupInformation.setConfiguration(conf); - DFSUtil.login(conf, - DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, - DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY); - this.role = role; try { initialize(conf); Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Tue Jul 20 00:48:33 2010 @@ -21,6 +21,7 @@ import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; +import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; @@ -49,6 +50,7 @@ import org.apache.hadoop.ipc.RemoteExcep import org.apache.hadoop.metrics.jvm.JvmMetrics; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector; +import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Daemon; @@ -71,6 +73,10 @@ import org.apache.hadoop.util.StringUtil @InterfaceAudience.Private public class SecondaryNameNode implements Runnable { + static{ + Configuration.addDefaultResource("hdfs-default.xml"); + Configuration.addDefaultResource("hdfs-site.xml"); + } public static final Log LOG = LogFactory.getLog(SecondaryNameNode.class.getName()); @@ -114,11 +120,6 @@ public class SecondaryNameNode implement * Create a connection to the primary namenode. */ public SecondaryNameNode(Configuration conf) throws IOException { - UserGroupInformation.setConfiguration(conf); - DFSUtil.login(conf, - DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY, - DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY); - try { initialize(conf); } catch(IOException e) { @@ -126,11 +127,26 @@ public class SecondaryNameNode implement throw e; } } - + + public static InetSocketAddress getHttpAddress(Configuration conf) { + return NetUtils.createSocketAddr(conf.get( + DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, + DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_DEFAULT)); + } + /** * Initialize SecondaryNameNode. */ private void initialize(final Configuration conf) throws IOException { + final InetSocketAddress infoSocAddr = getHttpAddress(conf); + infoBindAddress = infoSocAddr.getHostName(); + UserGroupInformation.setConfiguration(conf); + if (UserGroupInformation.isSecurityEnabled()) { + SecurityUtil.login(conf, + DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY, + DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY, + infoBindAddress); + } // initiate Java VM metrics JvmMetrics.init("SecondaryNameNode", conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY)); @@ -162,7 +178,9 @@ public class SecondaryNameNode implement // Kerberized SSL servers must be run from the host principal... UserGroupInformation httpUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI( - conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), + SecurityUtil.getServerPrincipal(conf + .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), + infoBindAddress), conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY)); try { infoServer = httpUGI.doAs(new PrivilegedExceptionAction() { @@ -170,11 +188,7 @@ public class SecondaryNameNode implement public HttpServer run() throws IOException, InterruptedException { LOG.info("Starting web server as: " + UserGroupInformation.getCurrentUser().getUserName()); - InetSocketAddress infoSocAddr = NetUtils.createSocketAddr( - conf.get(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_DEFAULT)); - infoBindAddress = infoSocAddr.getHostName(); int tmpInfoPort = infoSocAddr.getPort(); infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf); @@ -235,10 +249,31 @@ public class SecondaryNameNode implement } } + public void run() { + if (UserGroupInformation.isSecurityEnabled()) { + UserGroupInformation ugi = null; + try { + ugi = UserGroupInformation.getLoginUser(); + } catch (IOException e) { + LOG.error(StringUtils.stringifyException(e)); + e.printStackTrace(); + Runtime.getRuntime().exit(-1); + } + ugi.doAs(new PrivilegedAction() { + @Override + public Object run() { + doWork(); + return null; + } + }); + } else { + doWork(); + } + } // // The main work loop // - public void run() { + public void doWork() { // // Poll the Namenode (once every 5 minutes) to find the size of the @@ -343,11 +378,7 @@ public class SecondaryNameNode implement throw new IOException("This is not a DFS"); } - String configuredAddress = UserGroupInformation.isSecurityEnabled() ? - conf.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT) - : conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT); + String configuredAddress = NameNode.getInfoServer(conf); InetSocketAddress sockAddr = NetUtils.createSocketAddr(configuredAddress); if (sockAddr.getAddress().isAnyLocalAddress()) { if(UserGroupInformation.isSecurityEnabled()) { Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java (original) +++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java Tue Jul 20 00:48:33 2010 @@ -29,6 +29,7 @@ import java.security.PrivilegedException import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; @@ -79,15 +80,6 @@ public class DFSck extends Configured im super(conf); this.ugi = UserGroupInformation.getCurrentUser(); } - - private String getInfoServer() { - Configuration conf = getConf(); - return UserGroupInformation.isSecurityEnabled() ? conf.get( - DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT) : conf.get( - DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT); - } /** * Print fsck usage information @@ -140,7 +132,7 @@ public class DFSck extends Configured im proto = "https://"; } final StringBuilder url = new StringBuilder(proto); - url.append(getInfoServer()); + url.append(NameNode.getInfoServer(getConf())); url.append("/fsck?ugi=").append(ugi.getShortUserName()).append("&path="); String dir = "/"; Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/security/TestDelegationToken.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/security/TestDelegationToken.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/security/TestDelegationToken.java (original) +++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/security/TestDelegationToken.java Tue Jul 20 00:48:33 2010 @@ -22,6 +22,8 @@ package org.apache.hadoop.hdfs.security; import java.io.ByteArrayInputStream; import java.io.DataInputStream; +import java.io.IOException; +import java.security.PrivilegedExceptionAction; import junit.framework.Assert; @@ -33,6 +35,7 @@ import org.apache.hadoop.hdfs.HdfsConfig import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; @@ -51,6 +54,8 @@ public class TestDelegationToken { config = new HdfsConfiguration(); config.setLong(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000); config.setLong(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 5000); + config.set("hadoop.security.auth_to_local", + "RULE:[2:$1@$0](JobTracker@.*FOO.COM)s/@.*//" + "DEFAULT"); FileSystem.setDefaultUri(config, "hdfs://localhost:" + "0"); cluster = new MiniDFSCluster(0, config, 1, true, true, true, null, null, null, null); cluster.waitActive(); @@ -148,5 +153,50 @@ public class TestDelegationToken { Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); dtSecretManager.renewToken(token, "JobTracker"); } + + @Test + public void testDelegationTokenWithDoAs() throws Exception { + final DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem(); + final Token token = dfs.getDelegationToken(new Text( + "JobTracker")); + final UserGroupInformation longUgi = UserGroupInformation + .createRemoteUser("JobTracker/foo.com@FOO.COM"); + final UserGroupInformation shortUgi = UserGroupInformation + .createRemoteUser("JobTracker"); + longUgi.doAs(new PrivilegedExceptionAction() { + public Object run() throws IOException { + final DistributedFileSystem dfs = (DistributedFileSystem) cluster + .getFileSystem(); + try { + //try renew with long name + dfs.renewDelegationToken(token); + } catch (IOException e) { + Assert.fail("Could not renew delegation token for user "+longUgi); + } + return null; + } + }); + shortUgi.doAs(new PrivilegedExceptionAction() { + public Object run() throws IOException { + final DistributedFileSystem dfs = (DistributedFileSystem) cluster + .getFileSystem(); + dfs.renewDelegationToken(token); + return null; + } + }); + longUgi.doAs(new PrivilegedExceptionAction() { + public Object run() throws IOException { + final DistributedFileSystem dfs = (DistributedFileSystem) cluster + .getFileSystem(); + try { + //try cancel with long name + dfs.cancelDelegationToken(token); + } catch (IOException e) { + Assert.fail("Could not cancel delegation token for user "+longUgi); + } + return null; + } + }); + } } Modified: hadoop/hdfs/trunk/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java?rev=965697&r1=965696&r2=965697&view=diff ============================================================================== --- hadoop/hdfs/trunk/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java (original) +++ hadoop/hdfs/trunk/src/test/unit/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java Tue Jul 20 00:48:33 2010 @@ -23,30 +23,46 @@ import static org.apache.hadoop.hdfs.DFS import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY; import static org.junit.Assert.*; +import java.io.IOException; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.security.SecurityUtil; import org.junit.Test; public class TestGetImageServlet { + private static final String HOST = "foo.com"; + private static final String KERBEROS_DOMAIN = "@HADOOP.ORG"; + + private static Configuration getConf() { + Configuration conf = new Configuration(); + FileSystem.setDefaultUri(conf, "hdfs://" + HOST); + conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, HOST + + ":50090"); + return conf; + } // Worker class to poke the isValidRequestor method with verifying it accepts // or rejects with these standard allowed principals private void verifyIsValidReqBehavior(GetImageServlet gim, - boolean shouldSucceed, String msg) { + boolean shouldSucceed, String msg) + throws IOException { final String [] validRequestors = {DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, DFS_NAMENODE_USER_NAME_KEY, DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY, DFS_SECONDARY_NAMENODE_USER_NAME_KEY }; + Configuration conf = getConf(); for(String v : validRequestors) { - Configuration conf = new Configuration(); - conf.set(v, "a"); - assertEquals(msg + v, gim.isValidRequestor(shouldSucceed ? "a" : "b", conf), - shouldSucceed); + conf.set(v, "a/" + SecurityUtil.HOSTNAME_PATTERN + KERBEROS_DOMAIN); + assertEquals(msg + v, gim.isValidRequestor(shouldSucceed ? "a/" + HOST + + KERBEROS_DOMAIN : "b/" + HOST + KERBEROS_DOMAIN, conf), + shouldSucceed); } } @Test - public void IsValidRequestorAcceptsCorrectly() { + public void IsValidRequestorAcceptsCorrectly() throws IOException { GetImageServlet gim = new GetImageServlet(); verifyIsValidReqBehavior(gim, true, @@ -54,12 +70,12 @@ public class TestGetImageServlet { } @Test - public void IsValidRequestorRejectsCorrectly() { + public void IsValidRequestorRejectsCorrectly() throws IOException { GetImageServlet gim = new GetImageServlet(); // Don't set any valid requestors assertFalse("isValidRequestor allowed a requestor despite no values being set", - gim.isValidRequestor("not set", new Configuration())); + gim.isValidRequestor("not set", getConf())); verifyIsValidReqBehavior(gim, false, "isValidRequestor has allowed an invalid requestor: ");