hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dhr...@apache.org
Subject svn commit: r937763 - in /hadoop/hdfs/trunk: CHANGES.txt src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCorruptFilesJsp.java src/webapps/hdfs/corrupt_files.jsp src/webapps/hdfs/dfshealth.jsp
Date Sun, 25 Apr 2010 08:52:06 GMT
Author: dhruba
Date: Sun Apr 25 08:52:06 2010
New Revision: 937763

URL: http://svn.apache.org/viewvc?rev=937763&view=rev
Log:
HDFS-1031. Enhance the webUi to list a few of the corrupted files in HDFS.
(André Orian via dhruba)



Added:
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCorruptFilesJsp.java
    hadoop/hdfs/trunk/src/webapps/hdfs/corrupt_files.jsp
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/webapps/hdfs/dfshealth.jsp

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=937763&r1=937762&r2=937763&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Sun Apr 25 08:52:06 2010
@@ -286,6 +286,9 @@ Trunk (unreleased changes)
 
     HDFS-1101. TestDiskError.testLocalDirs() fails. (cdouglas via jghoman)
 
+    HDFS-1031. Enhance the webUi to list a few of the corrupted files in HDFS.
+    (André Orian via dhruba)
+
 Release 0.21.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Added: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCorruptFilesJsp.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCorruptFilesJsp.java?rev=937763&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCorruptFilesJsp.java
(added)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestCorruptFilesJsp.java
Sun Apr 25 08:52:06 2010
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import static org.junit.Assert.assertTrue;
+
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ChecksumException;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSTestUtil;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.TestDatanodeBlockScanner;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.junit.Test;
+
+/** A JUnit test for corrupt_files.jsp */
+public class TestCorruptFilesJsp  {
+
+  @Test
+  public void testCorruptFilesJsp() throws Exception {
+    MiniDFSCluster cluster = null;
+    try {
+
+      final int FILE_SIZE = 512;
+
+      Path[] filepaths = { new Path("/audiobook"), new Path("/audio/audio1"),
+          new Path("/audio/audio2"), new Path("/audio/audio") };
+
+      Configuration conf = new HdfsConfiguration();
+      // datanode scans directories
+      conf.setInt("dfs.datanode.directoryscan.interval", 1);
+      // datanode sends block reports
+      conf.setInt("dfs.blockreport.intervalMsec", 3 * 1000);
+      cluster = new MiniDFSCluster(conf, 1, true, null);
+      cluster.waitActive();
+
+      FileSystem fs = cluster.getFileSystem();
+
+      // create files
+      for (Path filepath : filepaths) {
+        DFSTestUtil.createFile(fs, filepath, FILE_SIZE, (short) 1, 0L);
+        DFSTestUtil.waitReplication(fs, filepath, (short) 1);
+      }
+
+      // verify there are not corrupt files
+      ClientProtocol namenode = DFSClient.createNamenode(conf);
+      FileStatus[] badFiles = namenode.getCorruptFiles();
+      assertTrue("There are " + badFiles.length
+          + " corrupt files, but expecting none", badFiles.length == 0);
+
+      // Check if webui agrees
+      URL url = new URL("http://"
+          + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY)
+          + "/corrupt_files.jsp");
+      String corruptFilesPage = DFSTestUtil.urlGet(url);
+      assertTrue("Corrupt files page is not showing a healthy filesystem",
+          corruptFilesPage.contains("No missing blocks found at the moment."));
+
+      // Now corrupt all the files except for the last one
+      for (int idx = 0; idx < filepaths.length - 1; idx++) {
+        String blockName = DFSTestUtil.getFirstBlock(fs, filepaths[idx])
+            .getBlockName();
+        TestDatanodeBlockScanner.corruptReplica(blockName, 0);
+
+        // read the file so that the corrupt block is reported to NN
+        FSDataInputStream in = fs.open(filepaths[idx]);
+        try {
+          in.readFully(new byte[FILE_SIZE]);
+        } catch (ChecksumException ignored) { // checksum error is expected.
+        }
+        in.close();
+      }
+
+      // verify if all corrupt files were reported to NN
+      badFiles = namenode.getCorruptFiles();
+      assertTrue("Expecting 3 corrupt files, but got " + badFiles.length,
+          badFiles.length == 3);
+
+      // Check if webui agrees
+      url = new URL("http://"
+          + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY)
+          + "/corrupt_files.jsp");
+      corruptFilesPage = DFSTestUtil.urlGet(url);
+      assertTrue("'/audiobook' should be corrupt", corruptFilesPage
+          .contains("/audiobook"));
+      assertTrue("'/audio/audio1' should be corrupt", corruptFilesPage
+          .contains("/audio/audio1"));
+      assertTrue("'/audio/audio2' should be corrupt", corruptFilesPage
+          .contains("/audio/audio2"));
+      assertTrue("Summary message shall report 3 corrupt files",
+          corruptFilesPage.contains("At least 3 corrupt file(s)"));
+
+      // clean up
+      for (Path filepath : filepaths) {
+        fs.delete(filepath, false);
+      }
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+
+  }
+
+}

Added: hadoop/hdfs/trunk/src/webapps/hdfs/corrupt_files.jsp
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/webapps/hdfs/corrupt_files.jsp?rev=937763&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/webapps/hdfs/corrupt_files.jsp (added)
+++ hadoop/hdfs/trunk/src/webapps/hdfs/corrupt_files.jsp Sun Apr 25 08:52:06 2010
@@ -0,0 +1,78 @@
+
+<%
+  /*
+   * Licensed to the Apache Software Foundation (ASF) under one
+   * or more contributor license agreements.  See the NOTICE file
+   * distributed with this work for additional information
+   * regarding copyright ownership.  The ASF licenses this file
+   * to you under the Apache License, Version 2.0 (the
+   * "License"); you may not use this file except in compliance
+   * with the License.  You may obtain a copy of the License at
+   *
+   *     http://www.apache.org/licenses/LICENSE-2.0
+   *
+   * Unless required by applicable law or agreed to in writing, software
+   * distributed under the License is distributed on an "AS IS" BASIS,
+   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   * See the License for the specific language governing permissions and
+   * limitations under the License.
+   */
+%>
+<%@ page contentType="text/html; charset=UTF-8"
+	import="org.apache.hadoop.util.ServletUtil"
+	import="org.apache.hadoop.fs.FileStatus"
+	import="org.apache.hadoop.fs.FileUtil"
+	import="org.apache.hadoop.fs.Path"
+	import="java.util.Arrays" %>
+<%!//for java.io.Serializable
+  private static final long serialVersionUID = 1L;%>
+<%
+  NameNode nn = (NameNode) application.getAttribute("name.node");
+  FSNamesystem fsn = nn.getNamesystem();
+  String namenodeRole = nn.getRole().toString();
+  String namenodeLabel = nn.getNameNodeAddress().getHostName() + ":"
+      + nn.getNameNodeAddress().getPort();
+  FileStatus[] corruptFileStatuses = nn.getCorruptFiles();
+  Path[] corruptFilePaths = FileUtil.stat2Paths(corruptFileStatuses);
+  int corruptFileCount = corruptFileStatuses.length;
+%>
+
+<html>
+<link rel="stylesheet" type="text/css" href="/static/hadoop.css">
+<title>Hadoop <%=namenodeRole%> <%=namenodeLabel%></title>
+<body>
+<h1><%=namenodeRole%> '<%=namenodeLabel%>'</h1>
+<%=NamenodeJspHelper.getVersionTable(fsn)%>
+<br>
+<b><a href="/nn_browsedfscontent.jsp">Browse the filesystem</a></b>
+<br>
+<b><a href="/logs/"><%=namenodeRole%> Logs</a></b>
+<br>
+<b><a href=/dfshealth.jsp> Go back to DFS home</a></b>
+<hr>
+<h3>Reported Corrupt Files</h3>
+<%
+  if (corruptFileCount == 0) {
+%>
+    <i>No missing blocks found at the moment.</i> <br>
+    Please run fsck for a thorough health analysis.
+<%
+  } else {
+    Arrays.sort(corruptFilePaths);
+    for (Path corruptFilePath : corruptFilePaths) {
+      String currentPath = corruptFilePath.toString();
+%>
+      <%=currentPath%><br>
+<%
+    }
+%>
+    <p>
+      <b>Total:</b> At least <%=corruptFileCount%> corrupt file(s)
+    </p>
+<%
+  }
+%>
+
+<%
+  out.println(ServletUtil.htmlFooter());
+%>

Modified: hadoop/hdfs/trunk/src/webapps/hdfs/dfshealth.jsp
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/webapps/hdfs/dfshealth.jsp?rev=937763&r1=937762&r2=937763&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/webapps/hdfs/dfshealth.jsp (original)
+++ hadoop/hdfs/trunk/src/webapps/hdfs/dfshealth.jsp Sun Apr 25 08:52:06 2010
@@ -49,7 +49,9 @@
 <h3>Cluster Summary</h3>
 <b> <%= NamenodeJspHelper.getSafeModeText(fsn)%> </b>
 <b> <%= NamenodeJspHelper.getInodeLimitText(fsn)%> </b>
-<a class="warning"><%= NamenodeJspHelper.getWarningText(fsn)%></a>
+<a class="warning" href="/corrupt_files.jsp" title="List corrupt files">
+  <%= NamenodeJspHelper.getWarningText(fsn)%>
+</a>
 
 <% healthjsp.generateHealthReport(out, nn, request); %>
 <hr>



Mime
View raw message