Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id DE79910B89 for ; Tue, 17 Dec 2013 20:59:51 +0000 (UTC) Received: (qmail 40960 invoked by uid 500); 17 Dec 2013 20:59:51 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 40891 invoked by uid 500); 17 Dec 2013 20:59:51 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 40602 invoked by uid 99); 17 Dec 2013 20:59:51 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 17 Dec 2013 20:59:51 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 17 Dec 2013 20:59:49 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 749D923889E1; Tue, 17 Dec 2013 20:59:29 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1551703 - in /hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/qjournal/server/ src/main/java/org/apache/hadoop/hdfs/server/datanode/ src/main/ja... Date: Tue, 17 Dec 2013 20:59:29 -0000 To: hdfs-commits@hadoop.apache.org From: jing9@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20131217205929.749D923889E1@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: jing9 Date: Tue Dec 17 20:59:27 2013 New Revision: 1551703 URL: http://svn.apache.org/r1551703 Log: HDFS-5545. Merge change r1546151 from trunk. Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Dec 17 20:59:27 2013 @@ -162,6 +162,9 @@ Release 2.4.0 - UNRELEASED HDFS-5538. URLConnectionFactory should pick up the SSL related configuration by default. (Haohui Mai via jing9) + HDFS-5545. Allow specifying endpoints for listeners in HttpServer. (Haohui + Mai via jing9) + OPTIMIZATIONS HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn) Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java Tue Dec 17 20:59:27 2013 @@ -78,6 +78,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; +import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; @@ -1407,4 +1408,19 @@ public class DFSUtil { return (value == null || value.isEmpty()) ? defaultKey : DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY; } + + public static HttpServer.Builder loadSslConfToHttpServerBuilder( + HttpServer.Builder builder, Configuration sslConf) { + return builder + .needsClientAuth( + sslConf.getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, + DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)) + .keyPassword(sslConf.get("ssl.server.keystore.keypassword")) + .keyStore(sslConf.get("ssl.server.keystore.location"), + sslConf.get("ssl.server.keystore.password"), + sslConf.get("ssl.server.keystore.type", "jks")) + .trustStore(sslConf.get("ssl.server.truststore.location"), + sslConf.get("ssl.server.truststore.password"), + sslConf.get("ssl.server.truststore.type", "jks")); + } } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java Tue Dec 17 20:59:27 2013 @@ -23,6 +23,8 @@ import static org.apache.hadoop.hdfs.DFS import java.io.IOException; import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; import javax.servlet.ServletContext; @@ -69,8 +71,15 @@ public class JournalNodeHttpServer { bindAddr.getHostName())); int tmpInfoPort = bindAddr.getPort(); + URI httpEndpoint; + try { + httpEndpoint = new URI("http://" + NetUtils.getHostPortString(bindAddr)); + } catch (URISyntaxException e) { + throw new IOException(e); + } + httpServer = new HttpServer.Builder().setName("journal") - .setBindAddress(bindAddr.getHostName()).setPort(tmpInfoPort) + .addEndpoint(httpEndpoint) .setFindPort(tmpInfoPort == 0).setConf(conf).setACL( new AccessControlList(conf.get(DFS_ADMIN, " "))) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()) @@ -85,7 +94,7 @@ public class JournalNodeHttpServer { httpServer.start(); // The web-server port can be ephemeral... ensure we have the correct info - infoPort = httpServer.getPort(); + infoPort = httpServer.getConnectorAddress(0).getPort(); LOG.info("Journal Web-server up at: " + bindAddr + ":" + infoPort); } @@ -104,7 +113,7 @@ public class JournalNodeHttpServer { * Return the actual address bound to by the running server. */ public InetSocketAddress getAddress() { - InetSocketAddress addr = httpServer.getListenerAddress(); + InetSocketAddress addr = httpServer.getConnectorAddress(0); assert addr.getPort() != 0; return addr; } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Tue Dec 17 20:59:27 2013 @@ -22,6 +22,7 @@ import com.google.common.annotations.Vis import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.protobuf.BlockingService; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -181,6 +182,7 @@ public class DataNode extends Configured private volatile boolean heartbeatsDisabledForTests = false; private DataStorage storage = null; private HttpServer infoServer = null; + private int infoPort; private int infoSecurePort; DataNodeMetrics metrics; private InetSocketAddress streamingAddr; @@ -308,27 +310,33 @@ public class DataNode extends Configured String infoHost = infoSocAddr.getHostName(); int tmpInfoPort = infoSocAddr.getPort(); HttpServer.Builder builder = new HttpServer.Builder().setName("datanode") - .setBindAddress(infoHost).setPort(tmpInfoPort) + .addEndpoint(URI.create("http://" + NetUtils.getHostPortString(infoSocAddr))) .setFindPort(tmpInfoPort == 0).setConf(conf) .setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))); - this.infoServer = (secureResources == null) ? builder.build() : - builder.setConnector(secureResources.getListener()).build(); LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort); if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) { - boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY, - DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT); InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get( DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 0)); - Configuration sslConf = new HdfsConfiguration(false); - sslConf.addResource(conf.get(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, - "ssl-server.xml")); - this.infoServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth); + builder.addEndpoint(URI.create("https://" + + NetUtils.getHostPortString(secInfoSocAddr))); + Configuration sslConf = new Configuration(false); + sslConf.setBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf + .getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, + DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)); + sslConf.addResource(conf.get( + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); + DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf); + if(LOG.isDebugEnabled()) { LOG.debug("Datanode listening for SSL on " + secInfoSocAddr); } infoSecurePort = secInfoSocAddr.getPort(); } + + this.infoServer = (secureResources == null) ? builder.build() : + builder.setConnector(secureResources.getListener()).build(); this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class); this.infoServer.addInternalServlet(null, "/getFileChecksum/*", FileChecksumServlets.GetServlet.class); @@ -344,6 +352,7 @@ public class DataNode extends Configured WebHdfsFileSystem.PATH_PREFIX + "/*"); } this.infoServer.start(); + this.infoPort = infoServer.getConnectorAddress(0).getPort(); } private void startPlugins(Configuration conf) { @@ -2251,7 +2260,7 @@ public class DataNode extends Configured * @return the datanode's http port */ public int getInfoPort() { - return infoServer.getPort(); + return infoPort; } /** Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Tue Dec 17 20:59:27 2013 @@ -44,6 +44,7 @@ import org.apache.hadoop.ha.ServiceFaile import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Trash; + import static org.apache.hadoop.hdfs.DFSConfigKeys.*; import static org.apache.hadoop.util.ExitUtil.terminate; import static org.apache.hadoop.util.ToolRunner.confirmPrompt; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java Tue Dec 17 20:59:27 2013 @@ -21,6 +21,7 @@ import static org.apache.hadoop.hdfs.DFS import java.io.IOException; import java.net.InetSocketAddress; +import java.net.URI; import java.util.HashMap; import java.util.Map; @@ -70,18 +71,38 @@ public class NameNodeHttpServer { this.bindAddress = bindAddress; } - public void start() throws IOException { + void start() throws IOException { final String infoHost = bindAddress.getHostName(); int infoPort = bindAddress.getPort(); - httpServer = new HttpServer.Builder().setName("hdfs") - .setBindAddress(infoHost).setPort(infoPort) + HttpServer.Builder builder = new HttpServer.Builder().setName("hdfs") + .addEndpoint(URI.create(("http://" + NetUtils.getHostPortString(bindAddress)))) .setFindPort(infoPort == 0).setConf(conf).setACL( new AccessControlList(conf.get(DFS_ADMIN, " "))) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()) .setUsernameConfKey( DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY) .setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf, - DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)).build(); + DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)); + + boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false); + if (certSSL) { + httpsAddress = NetUtils.createSocketAddr(conf.get( + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT)); + + builder.addEndpoint(URI.create("https://" + + NetUtils.getHostPortString(httpsAddress))); + Configuration sslConf = new Configuration(false); + sslConf.setBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf + .getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, + DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)); + sslConf.addResource(conf.get( + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); + DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf); + } + + httpServer = builder.build(); if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) { // set user pattern based on configuration file UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT)); @@ -91,7 +112,7 @@ public class NameNodeHttpServer { final String classname = AuthFilter.class.getName(); final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*"; Map params = getAuthFilterParams(conf); - httpServer.defineFilter(httpServer.getWebAppContext(), name, classname, params, + HttpServer.defineFilter(httpServer.getWebAppContext(), name, classname, params, new String[]{pathSpec}); HttpServer.LOG.info("Added filter '" + name + "' (class=" + classname + ")"); @@ -101,34 +122,19 @@ public class NameNodeHttpServer { + ";" + Param.class.getPackage().getName(), pathSpec); } - boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false); + httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn); + httpServer.setAttribute(JspHelper.CURRENT_CONF, conf); + setupServlets(httpServer, conf); + httpServer.start(); + httpAddress = httpServer.getConnectorAddress(0); if (certSSL) { - boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false); - httpsAddress = NetUtils.createSocketAddr(conf.get( - DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT)); - - Configuration sslConf = new Configuration(false); - sslConf.addResource(conf.get( - DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, - DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); - httpServer.addSslListener(httpsAddress, sslConf, needClientAuth); + httpsAddress = httpServer.getConnectorAddress(1); // assume same ssl port for all datanodes InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get( DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 50475)); httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, datanodeSslPort .getPort()); } - httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn); - httpServer.setAttribute(JspHelper.CURRENT_CONF, conf); - setupServlets(httpServer, conf); - httpServer.start(); - httpAddress = new InetSocketAddress(bindAddress.getAddress(), - httpServer.getPort()); - if (certSSL) { - httpsAddress = new InetSocketAddress(bindAddress.getAddress(), - httpServer.getConnectorPort(1)); - } } private Map getAuthFilterParams(Configuration conf) Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1551703&r1=1551702&r2=1551703&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Tue Dec 17 20:59:27 2013 @@ -30,6 +30,7 @@ import java.io.FilenameFilter; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; +import java.net.URISyntaxException; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.Collection; @@ -214,7 +215,7 @@ public class SecondaryNameNode implement /** * Initialize SecondaryNameNode. - * @param commandLineOpts + * @param commandLineOpts */ private void initialize(final Configuration conf, CommandLineOpts commandLineOpts) throws IOException { @@ -256,8 +257,15 @@ public class SecondaryNameNode implement // initialize the webserver for uploading files. int tmpInfoPort = infoSocAddr.getPort(); + URI httpEndpoint; + try { + httpEndpoint = new URI("http://" + NetUtils.getHostPortString(infoSocAddr)); + } catch (URISyntaxException e) { + throw new IOException(e); + } + infoServer = new HttpServer.Builder().setName("secondary") - .setBindAddress(infoBindAddress).setPort(tmpInfoPort) + .addEndpoint(httpEndpoint) .setFindPort(tmpInfoPort == 0).setConf(conf).setACL( new AccessControlList(conf.get(DFS_ADMIN, " "))) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()) @@ -275,7 +283,7 @@ public class SecondaryNameNode implement LOG.info("Web server init done"); // The web-server port can be ephemeral... ensure we have the correct info - infoPort = infoServer.getPort(); + infoPort = infoServer.getConnectorAddress(0).getPort(); conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" + infoPort); LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" + infoPort);