Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id DA0CB100B3 for ; Wed, 5 Mar 2014 21:51:51 +0000 (UTC) Received: (qmail 27926 invoked by uid 500); 5 Mar 2014 21:51:50 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 27879 invoked by uid 500); 5 Mar 2014 21:51:49 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 27870 invoked by uid 99); 5 Mar 2014 21:51:49 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 05 Mar 2014 21:51:49 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 05 Mar 2014 21:51:46 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 0995E23888E4; Wed, 5 Mar 2014 21:51:25 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1574683 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Date: Wed, 05 Mar 2014 21:51:24 -0000 To: hdfs-commits@hadoop.apache.org From: wheat9@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20140305215125.0995E23888E4@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: wheat9 Date: Wed Mar 5 21:51:24 2014 New Revision: 1574683 URL: http://svn.apache.org/r1574683 Log: HDFS-5857. TestWebHDFS#testNamenodeRestart fails intermittently with NPE. Contributed By Mit Desai. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1574683&r1=1574682&r2=1574683&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Mar 5 21:51:24 2014 @@ -683,6 +683,9 @@ Release 2.4.0 - UNRELEASED HDFS-6051. HDFS cannot run on Windows since short-circuit shared memory segment changes. (cmccabe) + HDFS-5857. TestWebHDFS#testNamenodeRestart fails intermittently with NPE. + (Mit Desai via wheat9) + BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9) Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1574683&r1=1574682&r2=1574683&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Wed Mar 5 21:51:24 2014 @@ -164,6 +164,15 @@ public class NamenodeWebHdfsMethods { response.setContentType(null); } + private static NamenodeProtocols getRPCServer(NameNode namenode) + throws IOException { + final NamenodeProtocols np = namenode.getRpcServer(); + if (np == null) { + throw new IOException("Namenode is in startup mode"); + } + return np; + } + @VisibleForTesting static DatanodeInfo chooseDatanode(final NameNode namenode, final String path, final HttpOpParam.Op op, final long openOffset, @@ -188,7 +197,7 @@ public class NamenodeWebHdfsMethods { || op == GetOpParam.Op.GETFILECHECKSUM || op == PostOpParam.Op.APPEND) { //choose a datanode containing a replica - final NamenodeProtocols np = namenode.getRpcServer(); + final NamenodeProtocols np = getRPCServer(namenode); final HdfsFileStatus status = np.getFileInfo(path); if (status == null) { throw new FileNotFoundException("File " + path + " not found."); @@ -424,7 +433,7 @@ public class NamenodeWebHdfsMethods { final Configuration conf = (Configuration)context.getAttribute(JspHelper.CURRENT_CONF); final NameNode namenode = (NameNode)context.getAttribute("name.node"); - final NamenodeProtocols np = namenode.getRpcServer(); + final NamenodeProtocols np = getRPCServer(namenode); switch(op.getValue()) { case CREATE: @@ -606,7 +615,7 @@ public class NamenodeWebHdfsMethods { } case CONCAT: { - namenode.getRpcServer().concat(fullpath, concatSrcs.getAbsolutePaths()); + getRPCServer(namenode).concat(fullpath, concatSrcs.getAbsolutePaths()); return Response.ok().build(); } default: @@ -696,7 +705,7 @@ public class NamenodeWebHdfsMethods { final BufferSizeParam bufferSize ) throws IOException, URISyntaxException { final NameNode namenode = (NameNode)context.getAttribute("name.node"); - final NamenodeProtocols np = namenode.getRpcServer(); + final NamenodeProtocols np = getRPCServer(namenode); switch(op.getValue()) { case OPEN: @@ -905,7 +914,7 @@ public class NamenodeWebHdfsMethods { switch(op.getValue()) { case DELETE: { - final boolean b = namenode.getRpcServer().delete(fullpath, recursive.getValue()); + final boolean b = getRPCServer(namenode).delete(fullpath, recursive.getValue()); final String js = JsonUtil.toJsonString("boolean", b); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); }