Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 686D410306 for ; Wed, 2 Oct 2013 05:53:24 +0000 (UTC) Received: (qmail 78755 invoked by uid 500); 2 Oct 2013 05:53:23 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 78481 invoked by uid 500); 2 Oct 2013 05:53:19 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 78468 invoked by uid 99); 2 Oct 2013 05:53:17 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 02 Oct 2013 05:53:17 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 02 Oct 2013 05:53:15 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id B5FB3238890B; Wed, 2 Oct 2013 05:52:55 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1528311 - in /hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/namenode/ src/main/webapps/hdfs/ src/test/java/org/apache/hadoop/hdfs/server/namenode/ Date: Wed, 02 Oct 2013 05:52:55 -0000 To: hdfs-commits@hadoop.apache.org From: cnauroth@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20131002055255.B5FB3238890B@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: cnauroth Date: Wed Oct 2 05:52:54 2013 New Revision: 1528311 URL: http://svn.apache.org/r1528311 Log: HDFS-5279. Merging change r1528310 from branch-2 to branch-2.1-beta Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/corrupt_files.jsp hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.jsp hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfsnodelist.jsp hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1528311&r1=1528310&r2=1528311&view=diff ============================================================================== --- hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Oct 2 05:52:54 2013 @@ -55,6 +55,9 @@ Release 2.1.2 - UNRELEASED HDFS-5255. Distcp job fails with hsftp when https is enabled in insecure cluster. (Arpit Agarwal) + HDFS-5279. Guard against NullPointerException in NameNode JSP pages before + initialization of FSNamesystem. (cnauroth) + Release 2.1.1-beta - 2013-09-23 INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java?rev=1528311&r1=1528310&r2=1528311&view=diff ============================================================================== --- hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java (original) +++ hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java Wed Oct 2 05:52:54 2013 @@ -30,6 +30,7 @@ import java.net.URLEncoder; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -640,7 +641,8 @@ class NamenodeJspHelper { .getAttribute(JspHelper.CURRENT_CONF); // We can't redirect if there isn't a DN to redirect to. // Lets instead show a proper error message. - if (nn.getNamesystem().getNumLiveDataNodes() < 1) { + FSNamesystem fsn = nn.getNamesystem(); + if (fsn == null || fsn.getNumLiveDataNodes() < 1) { throw new IOException("Can't browse the DFS since there are no " + "live nodes available to redirect to."); } @@ -676,6 +678,20 @@ class NamenodeJspHelper { resp.sendRedirect(redirectLocation); } + /** + * Returns a descriptive label for the running NameNode. If the NameNode has + * initialized to the point of running its RPC server, then this label consists + * of the host and port of the RPC server. Otherwise, the label is a message + * stating that the NameNode is still initializing. + * + * @param nn NameNode to describe + * @return String NameNode label + */ + static String getNameNodeLabel(NameNode nn) { + return nn.getRpcServer() != null ? nn.getNameNodeAddressHostPortString() : + "initializing"; + } + static class NodeListJsp { private int rowNum = 0; @@ -830,6 +846,9 @@ class NamenodeJspHelper { HttpServletRequest request) throws IOException { final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context); final FSNamesystem ns = nn.getNamesystem(); + if (ns == null) { + return; + } final DatanodeManager dm = ns.getBlockManager().getDatanodeManager(); final List live = new ArrayList(); @@ -1007,14 +1026,16 @@ class NamenodeJspHelper { final BlockManager blockManager; XMLBlockInfo(FSNamesystem fsn, Long blockId) { - this.blockManager = fsn.getBlockManager(); + this.blockManager = fsn != null ? fsn.getBlockManager() : null; if (blockId == null) { this.block = null; this.inode = null; } else { this.block = new Block(blockId); - this.inode = ((INode)blockManager.getBlockCollection(block)).asFile(); + this.inode = blockManager != null ? + ((INode)blockManager.getBlockCollection(block)).asFile() : + null; } } @@ -1088,8 +1109,10 @@ class NamenodeJspHelper { } doc.startTag("replicas"); - for(final Iterator it = blockManager.datanodeIterator(block); - it.hasNext(); ) { + for (final Iterator it = blockManager != null ? + blockManager.datanodeIterator(block) : + Collections.emptyList().iterator(); + it.hasNext();) { doc.startTag("replica"); DatanodeDescriptor dd = it.next(); @@ -1125,7 +1148,7 @@ class NamenodeJspHelper { XMLCorruptBlockInfo(FSNamesystem fsn, Configuration conf, int numCorruptBlocks, Long startingBlockId) { - this.blockManager = fsn.getBlockManager(); + this.blockManager = fsn != null ? fsn.getBlockManager() : null; this.conf = conf; this.numCorruptBlocks = numCorruptBlocks; this.startingBlockId = startingBlockId; @@ -1148,16 +1171,19 @@ class NamenodeJspHelper { doc.endTag(); doc.startTag("num_missing_blocks"); - doc.pcdata(""+blockManager.getMissingBlocksCount()); + doc.pcdata("" + (blockManager != null ? + blockManager.getMissingBlocksCount() : 0)); doc.endTag(); doc.startTag("num_corrupt_replica_blocks"); - doc.pcdata(""+blockManager.getCorruptReplicaBlocksCount()); + doc.pcdata("" + (blockManager != null ? + blockManager.getCorruptReplicaBlocksCount() : 0)); doc.endTag(); doc.startTag("corrupt_replica_block_ids"); - final long[] corruptBlockIds = blockManager.getCorruptReplicaBlockIds( - numCorruptBlocks, startingBlockId); + final long[] corruptBlockIds = blockManager != null ? + blockManager.getCorruptReplicaBlockIds(numCorruptBlocks, + startingBlockId) : null; if (corruptBlockIds != null) { for (Long blockId: corruptBlockIds) { doc.startTag("block_id"); Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/corrupt_files.jsp URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/corrupt_files.jsp?rev=1528311&r1=1528310&r2=1528311&view=diff ============================================================================== --- hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/corrupt_files.jsp (original) +++ hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/corrupt_files.jsp Wed Oct 2 05:52:54 2013 @@ -25,6 +25,7 @@ import="org.apache.hadoop.fs.Path" import="org.apache.hadoop.ha.HAServiceProtocol.HAServiceState" import="java.util.Collection" + import="java.util.Collections" import="java.util.Arrays" %> <%!//for java.io.Serializable private static final long serialVersionUID = 1L;%> @@ -34,9 +35,10 @@ HAServiceState nnHAState = nn.getServiceState(); boolean isActive = (nnHAState == HAServiceState.ACTIVE); String namenodeRole = nn.getRole().toString(); - String namenodeLabel = nn.getNameNodeAddressHostPortString(); - Collection corruptFileBlocks = - fsn.listCorruptFileBlocks("/", null); + String namenodeLabel = NamenodeJspHelper.getNameNodeLabel(nn); + Collection corruptFileBlocks = fsn != null ? + fsn.listCorruptFileBlocks("/", null) : + Collections.emptyList(); int corruptFileCount = corruptFileBlocks.size(); %> @@ -48,7 +50,7 @@

<%=namenodeRole%> '<%=namenodeLabel%>'

<%=NamenodeJspHelper.getVersionTable(fsn)%>
-<% if (isActive) { %> +<% if (isActive && fsn != null) { %> Browse the filesystem
<% } %> Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.jsp URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.jsp?rev=1528311&r1=1528310&r2=1528311&view=diff ============================================================================== --- hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.jsp (original) +++ hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.jsp Wed Oct 2 05:52:54 2013 @@ -34,29 +34,20 @@ boolean isActive = (nnHAState == HAServiceState.ACTIVE); String namenodeRole = nn.getRole().toString(); String namenodeState = nnHAState.toString(); - String namenodeLabel = nn.getRpcServer() != null ? - nn.getNameNodeAddressHostPortString() : null; + String namenodeLabel = NamenodeJspHelper.getNameNodeLabel(nn); %> -<% if (namenodeLabel != null) { %> Hadoop <%=namenodeRole%> <%=namenodeLabel%> -<% } else { %> -Hadoop <%=namenodeRole%> -<% } %> -<% if (namenodeLabel != null) { %>

<%=namenodeRole%> '<%=namenodeLabel%>' (<%=namenodeState%>)

-<% } else { %> -

<%=namenodeRole%> (<%=namenodeState%>)

-<% } %> <%= NamenodeJspHelper.getVersionTable(fsn) %>
-<% if (isActive) { %> +<% if (isActive && fsn != null) { %> Browse the filesystem
<% } %> <%=namenodeRole%> Logs Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfsnodelist.jsp URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfsnodelist.jsp?rev=1528311&r1=1528310&r2=1528311&view=diff ============================================================================== --- hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfsnodelist.jsp (original) +++ hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfsnodelist.jsp Wed Oct 2 05:52:54 2013 @@ -33,7 +33,7 @@ String namenodeRole = nn.getRole().toStr FSNamesystem fsn = nn.getNamesystem(); HAServiceState nnHAState = nn.getServiceState(); boolean isActive = (nnHAState == HAServiceState.ACTIVE); -String namenodeLabel = nn.getNameNodeAddressHostPortString(); +String namenodeLabel = NamenodeJspHelper.getNameNodeLabel(nn); %> @@ -46,7 +46,7 @@ String namenodeLabel = nn.getNameNodeAdd

<%=namenodeRole%> '<%=namenodeLabel%>'

<%= NamenodeJspHelper.getVersionTable(fsn) %>
-<% if (isActive) { %> +<% if (isActive && fsn != null) { %> Browse the filesystem
<% } %> <%=namenodeRole%> Logs
Modified: hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java?rev=1528311&r1=1528310&r2=1528311&view=diff ============================================================================== --- hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java (original) +++ hadoop/common/branches/branch-2.1-beta/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeJspHelper.java Wed Oct 2 05:52:54 2013 @@ -25,12 +25,15 @@ import static org.apache.hadoop.hdfs.ser import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.List; import java.util.regex.Pattern; +import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.JspWriter; import org.apache.hadoop.conf.Configuration; @@ -45,6 +48,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; +import org.znerd.xmlenc.XMLOutputter; public class TestNameNodeJspHelper { @@ -118,6 +122,66 @@ public class TestNameNodeJspHelper { } /** + * Tests for non-null, non-empty NameNode label. + */ + @Test + public void testGetNameNodeLabel() { + String nameNodeLabel = NamenodeJspHelper.getNameNodeLabel( + cluster.getNameNode()); + Assert.assertNotNull(nameNodeLabel); + Assert.assertFalse(nameNodeLabel.isEmpty()); + } + + /** + * Tests for non-null, non-empty NameNode label when called before + * initialization of the NameNode RPC server. + */ + @Test + public void testGetNameNodeLabelNullRpcServer() { + NameNode nn = mock(NameNode.class); + when(nn.getRpcServer()).thenReturn(null); + String nameNodeLabel = NamenodeJspHelper.getNameNodeLabel( + cluster.getNameNode()); + Assert.assertNotNull(nameNodeLabel); + Assert.assertFalse(nameNodeLabel.isEmpty()); + } + + /** + * Tests that redirectToRandomDataNode does not throw NullPointerException if + * it finds a null FSNamesystem. + */ + @Test(expected=IOException.class) + public void testRedirectToRandomDataNodeNullNamesystem() throws Exception { + NameNode nn = mock(NameNode.class); + when(nn.getNamesystem()).thenReturn(null); + ServletContext context = mock(ServletContext.class); + when(context.getAttribute("name.node")).thenReturn(nn); + NamenodeJspHelper.redirectToRandomDataNode(context, + mock(HttpServletRequest.class), mock(HttpServletResponse.class)); + } + + /** + * Tests that XMLBlockInfo does not throw NullPointerException if it finds a + * null FSNamesystem. + */ + @Test + public void testXMLBlockInfoNullNamesystem() throws IOException { + XMLOutputter doc = new XMLOutputter(mock(JspWriter.class), "UTF-8"); + new NamenodeJspHelper.XMLBlockInfo(null, 1L).toXML(doc); + } + + /** + * Tests that XMLCorruptBlockInfo does not throw NullPointerException if it + * finds a null FSNamesystem. + */ + @Test + public void testXMLCorruptBlockInfoNullNamesystem() throws IOException { + XMLOutputter doc = new XMLOutputter(mock(JspWriter.class), "UTF-8"); + new NamenodeJspHelper.XMLCorruptBlockInfo(null, mock(Configuration.class), + 10, 1L).toXML(doc); + } + + /** * Checks if the list contains any string that partially matches the regex. * * @param list List containing strings to check