hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r740064 - in /hadoop/core/trunk: ./ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/webapps/datanode/ src/webapps/hdfs/
Date Mon, 02 Feb 2009 18:36:03 GMT
Author: szetszwo
Date: Mon Feb  2 18:36:01 2009
New Revision: 740064

URL: http://svn.apache.org/viewvc?rev=740064&view=rev
Log:
HADOOP-5097. Remove static variable JspHelper.fsn.  (szetszwo)

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
    hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp
    hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp
    hadoop/core/trunk/src/webapps/datanode/tail.jsp
    hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp
    hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Mon Feb  2 18:36:01 2009
@@ -56,6 +56,9 @@
     or the main Java task in Hadoop's case, kills the entire subtree of
     processes. (Ravi Gummadi via ddas)
 
+    HADOOP-5097. Remove static variable JspHelper.fsn, a static reference to
+    a non-singleton FSNamesystem object.  (szetszwo)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Mon
Feb  2 18:36:01 2009
@@ -61,7 +61,6 @@
 import java.io.PrintWriter;
 import java.io.DataOutputStream;
 import java.net.InetAddress;
-import java.net.InetSocketAddress;
 import java.util.*;
 import java.util.Map.Entry;
 
@@ -244,8 +243,6 @@
   private int replIndex = 0;
 
   private static FSNamesystem fsNamesystemObject;
-  /** NameNode RPC address */
-  private InetSocketAddress nameNodeAddress = null; // TODO: name-node has this field, it
should be removed here
   private SafeModeInfo safeMode;  // safe mode information
   private Host2NodesMap host2DataNodeMap = new Host2NodesMap();
     
@@ -292,7 +289,6 @@
     this.systemStart = now();
     setConfigurationParameters(conf);
 
-    this.nameNodeAddress = nn.getNameNodeAddress();
     this.registerMBean(conf); // register the MBean for the FSNamesystemStutus
     this.dir = new FSDirectory(this, conf);
     StartupOption startOpt = NameNode.getStartupOption(conf);
@@ -3457,16 +3453,6 @@
     return datanodeMap.get(name);
   }
 
-  /**
-   * @deprecated use {@link NameNode#getNameNodeAddress()} instead.
-   */
-  @Deprecated
-  public InetSocketAddress getDFSNameNodeAddress() {
-    return nameNodeAddress;
-  }
-
-  /**
-   */
   public Date getStartTime() {
     return new Date(systemStart); 
   }

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
(original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
Mon Feb  2 18:36:01 2009
@@ -34,6 +34,8 @@
  * @see org.apache.hadoop.hdfs.HftpFileSystem
  */
 public class FileDataServlet extends DfsServlet {
+  /** For java.io.Serializable */
+  private static final long serialVersionUID = 1L;
 
   /** Create a redirection URI */
   protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
@@ -54,26 +56,20 @@
         "/streamFile", "filename=" + i.getPath() + "&ugi=" + ugi, null);
   }
 
-  private static JspHelper jspHelper = null;
-
   /** Select a datanode to service this request.
    * Currently, this looks at no more than the first five blocks of a file,
    * selecting a datanode randomly from the most represented.
    */
-  private static DatanodeID pickSrcDatanode(FileStatus i,
+  private DatanodeID pickSrcDatanode(FileStatus i,
       ClientProtocol nnproxy) throws IOException {
-    // a race condition can happen by initializing a static member this way.
-    // A proper fix should make JspHelper a singleton. Since it doesn't affect 
-    // correctness, we leave it as is for now.
-    if (jspHelper == null)
-      jspHelper = new JspHelper();
     final LocatedBlocks blks = nnproxy.getBlockLocations(
         i.getPath().toUri().getPath(), 0, 1);
     if (i.getLen() == 0 || blks.getLocatedBlocks().size() <= 0) {
       // pick a random datanode
-      return jspHelper.randomNode();
+      NameNode nn = (NameNode)getServletContext().getAttribute("name.node");
+      return nn.getNamesystem().getRandomDatanode();
     }
-    return jspHelper.bestNode(blks.get(0));
+    return JspHelper.bestNode(blks.get(0));
   }
 
   /**

Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Mon Feb
 2 18:36:01 2009
@@ -32,51 +32,39 @@
 import javax.servlet.jsp.JspWriter;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSClient;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.*;
+import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.VersionInfo;
 
 public class JspHelper {
   final static public String WEB_UGI_PROPERTY_NAME = "dfs.web.ugi";
 
-  static FSNamesystem fsn = null;
-  public static InetSocketAddress nameNodeAddr;
   public static final Configuration conf = new Configuration();
   public static final UnixUserGroupInformation webUGI
   = UnixUserGroupInformation.createImmutable(
       conf.getStrings(WEB_UGI_PROPERTY_NAME));
 
-  public static final int defaultChunkSizeToView = 
+  private static final int defaultChunkSizeToView = 
     conf.getInt("dfs.default.chunk.view.size", 32 * 1024);
-  static Random rand = new Random();
-
-  public JspHelper() {
-    if (DataNode.getDataNode() != null) {
-      nameNodeAddr = DataNode.getDataNode().getNameNodeAddr();
-    }
-    else {
-      fsn = FSNamesystem.getFSNamesystem();
-      nameNodeAddr = fsn.getDFSNameNodeAddress(); 
-    }      
+  static final Random rand = new Random();
 
+  static {
     UnixUserGroupInformation.saveToConf(conf,
         UnixUserGroupInformation.UGI_PROPERTY_NAME, webUGI);
   }
 
-  public DatanodeID randomNode() throws IOException {
-    return fsn.getRandomDatanode();
-  }
+  /** Private constructor for preventing creating JspHelper object. */
+  private JspHelper() {} 
 
-  public DatanodeInfo bestNode(LocatedBlock blk) throws IOException {
+  public static DatanodeInfo bestNode(LocatedBlock blk) throws IOException {
     TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>();
     DatanodeInfo chosenNode = null;
     int failures = 0;
@@ -115,7 +103,8 @@
     s.close();
     return chosenNode;
   }
-  public void streamBlockInAscii(InetSocketAddress addr, long blockId, 
+
+  public static void streamBlockInAscii(InetSocketAddress addr, long blockId, 
                                  long genStamp, long blockSize, 
                                  long offsetIntoBlock, long chunkSizeToView, JspWriter out)

     throws IOException {
@@ -155,24 +144,20 @@
     s.close();
     out.print(new String(buf));
   }
-  public void DFSNodesStatus(ArrayList<DatanodeDescriptor> live,
-                             ArrayList<DatanodeDescriptor> dead) {
-    if (fsn != null)
-      fsn.DFSNodesStatus(live, dead);
-  }
-  public void addTableHeader(JspWriter out) throws IOException {
+
+  public static void addTableHeader(JspWriter out) throws IOException {
     out.print("<table border=\"1\""+
               " cellpadding=\"2\" cellspacing=\"2\">");
     out.print("<tbody>");
   }
-  public void addTableRow(JspWriter out, String[] columns) throws IOException {
+  public static void addTableRow(JspWriter out, String[] columns) throws IOException {
     out.print("<tr>");
     for (int i = 0; i < columns.length; i++) {
       out.print("<td style=\"vertical-align: top;\"><B>"+columns[i]+"</B><br></td>");
     }
     out.print("</tr>");
   }
-  public void addTableRow(JspWriter out, String[] columns, int row) throws IOException {
+  public static void addTableRow(JspWriter out, String[] columns, int row) throws IOException
{
     out.print("<tr>");
       
     for (int i = 0; i < columns.length; i++) {
@@ -185,17 +170,17 @@
     }
     out.print("</tr>");
   }
-  public void addTableFooter(JspWriter out) throws IOException {
+  public static void addTableFooter(JspWriter out) throws IOException {
     out.print("</tbody></table>");
   }
 
-  public String getSafeModeText() {
+  public static String getSafeModeText(FSNamesystem fsn) {
     if (!fsn.isInSafeMode())
       return "";
     return "Safe mode is ON. <em>" + fsn.getSafeModeTip() + "</em><br>";
   }
 
-  public String getInodeLimitText() {
+  public static String getInodeLimitText(FSNamesystem fsn) {
     long inodes = fsn.dir.totalInodes();
     long blocks = fsn.getBlocksTotal();
     long maxobjects = fsn.getMaxObjects();
@@ -217,7 +202,7 @@
     return str;
   }
 
-  public String getUpgradeStatusText() {
+  public static String getUpgradeStatusText(FSNamesystem fsn) {
     String statusText = "";
     try {
       UpgradeStatusReport status = 
@@ -231,7 +216,7 @@
     return statusText;
   }
 
-  public void sortNodeList(ArrayList<DatanodeDescriptor> nodes,
+  public static void sortNodeList(ArrayList<DatanodeDescriptor> nodes,
                            String field, String order) {
         
     class NodeComapare implements Comparator<DatanodeDescriptor> {
@@ -370,4 +355,20 @@
       file = "..." + file.substring(start, file.length());
     out.print("<title>HDFS:" + file + "</title>");
   }
+
+  /** Convert a String to chunk-size-to-view. */
+  public static int string2ChunkSizeToView(String s) {
+    int n = s == null? 0: Integer.parseInt(s);
+    return n > 0? n: defaultChunkSizeToView;
+  }
+
+  /** Return a table containing version information. */
+  public static String getVersionTable(FSNamesystem fsn) {
+    return "<div id='dfstable'><table>"       
+        + "\n  <tr><td id='col1'>Started:</td><td>" + fsn.getStartTime()
+ "</td></tr>\n"
+        + "\n  <tr><td id='col1'>Version:</td><td>" + VersionInfo.getVersion()
+ ", " + VersionInfo.getRevision()
+        + "\n  <tr><td id='col1'>Compiled:</td><td>" + VersionInfo.getDate()
+ " by " + VersionInfo.getUser() + " from " + VersionInfo.getBranch()
+        + "\n  <tr><td id='col1'>Upgrades:</td><td>" + getUpgradeStatusText(fsn)
+        + "\n</table></div>";
+  }
 }

Modified: hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp (original)
+++ hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp Mon Feb  2 18:36:01 2009
@@ -17,12 +17,11 @@
 %>
 
 <%!
-  static JspHelper jspHelper = new JspHelper();
+  static final DataNode datanode = DataNode.getDataNode();
 
   public void generateFileDetails(JspWriter out, HttpServletRequest req) 
     throws IOException {
 
-    int chunkSizeToView = 0;
     long startOffset = 0;
     int datanodePort;
 
@@ -47,10 +46,7 @@
     if (namenodeInfoPortStr != null)
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
 
-    String chunkSizeToViewStr = req.getParameter("chunkSizeToView");
-    if (chunkSizeToViewStr != null && Integer.parseInt(chunkSizeToViewStr) > 0)
-      chunkSizeToView = Integer.parseInt(chunkSizeToViewStr);
-    else chunkSizeToView = jspHelper.defaultChunkSizeToView;
+    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView"));
 
     String startOffsetStr = req.getParameter("startOffset");
     if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
@@ -71,7 +67,7 @@
     } 
     blockSize = Long.parseLong(blockSizeStr);
 
-    DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf);
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
     List<LocatedBlock> blocks = 
       dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
     //Add the various links for looking at the file contents
@@ -87,7 +83,7 @@
     LocatedBlock lastBlk = blocks.get(blocks.size() - 1);
     long blockId = lastBlk.getBlock().getBlockId();
     try {
-      chosenNode = jspHelper.bestNode(lastBlk);
+      chosenNode = JspHelper.bestNode(lastBlk);
     } catch (IOException e) {
       out.print(e.toString());
       dfs.close();
@@ -157,7 +153,7 @@
     }
     out.println("</table>");
     out.print("<hr>");
-    String namenodeHost = jspHelper.nameNodeAddr.getHostName();
+    String namenodeHost = datanode.getNameNodeAddr().getHostName();
     out.print("<br><a href=\"http://" + 
               InetAddress.getByName(namenodeHost).getCanonicalHostName() + ":" +
               namenodeInfoPort + "/dfshealth.jsp\">Go back to DFS home</a>");
@@ -168,7 +164,6 @@
     throws IOException {
     long startOffset = 0;
     int datanodePort = 0; 
-    int chunkSizeToView = 0;
 
     String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
     int namenodeInfoPort = -1;
@@ -208,10 +203,7 @@
     }
     blockSize = Long.parseLong(blockSizeStr);
     
-    String chunkSizeToViewStr = req.getParameter("chunkSizeToView");
-    if (chunkSizeToViewStr != null && Integer.parseInt(chunkSizeToViewStr) > 0)
-      chunkSizeToView = Integer.parseInt(chunkSizeToViewStr);
-    else chunkSizeToView = jspHelper.defaultChunkSizeToView;
+    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView"));
 
     String startOffsetStr = req.getParameter("startOffset");
     if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
@@ -240,7 +232,7 @@
     out.print("<hr>");
 
     //Determine the prev & next blocks
-    DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf);
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
     long nextStartOffset = 0;
     long nextBlockSize = 0;
     String nextBlockIdStr = null;
@@ -261,7 +253,7 @@
             nextGenStamp = Long.toString(nextBlock.getBlock().getGenerationStamp());
             nextStartOffset = 0;
             nextBlockSize = nextBlock.getBlock().getNumBytes();
-            DatanodeInfo d = jspHelper.bestNode(nextBlock);
+            DatanodeInfo d = JspHelper.bestNode(nextBlock);
             String datanodeAddr = d.getName();
             nextDatanodePort = Integer.parseInt(
                                       datanodeAddr.substring(
@@ -315,7 +307,7 @@
             if (prevStartOffset < 0)
               prevStartOffset = 0;
             prevBlockSize = prevBlock.getBlock().getNumBytes();
-            DatanodeInfo d = jspHelper.bestNode(prevBlock);
+            DatanodeInfo d = JspHelper.bestNode(prevBlock);
             String datanodeAddr = d.getName();
             prevDatanodePort = Integer.parseInt(
                                       datanodeAddr.substring(
@@ -353,7 +345,7 @@
     out.print("<hr>");
     out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\"
READONLY>");
     try {
-    jspHelper.streamBlockInAscii(
+    JspHelper.streamBlockInAscii(
             new InetSocketAddress(req.getServerName(), datanodePort), blockId, 
             genStamp, blockSize, startOffset, chunkSizeToView, out);
     } catch (Exception e){

Modified: hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp (original)
+++ hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp Mon Feb  2 18:36:01 2009
@@ -17,7 +17,7 @@
   import="java.text.DateFormat"
 %>
 <%!
-  static JspHelper jspHelper = new JspHelper();
+  static final DataNode datanode = DataNode.getDataNode();
   
   public void generateDirectoryStructure( JspWriter out, 
                                           HttpServletRequest req,
@@ -34,7 +34,7 @@
     if (namenodeInfoPortStr != null)
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
     
-    DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf);
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
     String target = dir;
     final FileStatus targetStatus = dfs.getFileInfo(target);
     if (targetStatus == null) { // not exists
@@ -55,7 +55,7 @@
         if (locations == null || locations.length == 0) {
           out.print("Empty file");
         } else {
-          DatanodeInfo chosenNode = jspHelper.bestNode(firstBlock);
+          DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock);
           String fqdn = InetAddress.getByName(chosenNode.getHost()).
             getCanonicalHostName();
           String datanodeAddr = chosenNode.getName();
@@ -99,9 +99,9 @@
         out.print("Empty directory");
       }
       else {
-        jspHelper.addTableHeader(out);
+        JspHelper.addTableHeader(out);
         int row=0;
-        jspHelper.addTableRow(out, headings, row++);
+        JspHelper.addTableRow(out, headings, row++);
         String cols [] = new String[headings.length];
         for (int i = 0; i < files.length; i++) {
           //Get the location of the first block of the file
@@ -126,12 +126,12 @@
           cols[6] = files[i].getPermission().toString();
           cols[7] = files[i].getOwner();
           cols[8] = files[i].getGroup();
-          jspHelper.addTableRow(out, cols, row++);
+          JspHelper.addTableRow(out, cols, row++);
         }
-        jspHelper.addTableFooter(out);
+        JspHelper.addTableFooter(out);
       }
     } 
-    String namenodeHost = jspHelper.nameNodeAddr.getHostName();
+    String namenodeHost = datanode.getNameNodeAddr().getHostName();
     out.print("<br><a href=\"http://" + 
               InetAddress.getByName(namenodeHost).getCanonicalHostName() + ":" +
               namenodeInfoPort + "/dfshealth.jsp\">Go back to DFS home</a>");

Modified: hadoop/core/trunk/src/webapps/datanode/tail.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/tail.jsp?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/datanode/tail.jsp (original)
+++ hadoop/core/trunk/src/webapps/datanode/tail.jsp Mon Feb  2 18:36:01 2009
@@ -18,14 +18,10 @@
 %>
 
 <%!
-  static JspHelper jspHelper = new JspHelper();
+  static final DataNode datanode = DataNode.getDataNode();
 
   public void generateFileChunks(JspWriter out, HttpServletRequest req) 
     throws IOException {
-    long startOffset = 0;
-    
-    int chunkSizeToView = 0;
-
     String referrer = req.getParameter("referrer");
     boolean noLink = false;
     if (referrer == null) {
@@ -43,10 +39,7 @@
     if (namenodeInfoPortStr != null)
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
     
-    String chunkSizeToViewStr = req.getParameter("chunkSizeToView");
-    if (chunkSizeToViewStr != null && Integer.parseInt(chunkSizeToViewStr) > 0)
-      chunkSizeToView = Integer.parseInt(chunkSizeToViewStr);
-    else chunkSizeToView = jspHelper.defaultChunkSizeToView;
+    final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView"));
 
     if (!noLink) {
       out.print("<h3>Tail of File: ");
@@ -70,8 +63,7 @@
                 referrer+ "\">");
 
     //fetch the block from the datanode that has the last block for this file
-    DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, 
-                                         jspHelper.conf);
+    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf);
     List<LocatedBlock> blocks = 
       dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks();
     if (blocks == null || blocks.size() == 0) {
@@ -85,7 +77,7 @@
     long genStamp = lastBlk.getBlock().getGenerationStamp();
     DatanodeInfo chosenNode;
     try {
-      chosenNode = jspHelper.bestNode(lastBlk);
+      chosenNode = JspHelper.bestNode(lastBlk);
     } catch (IOException e) {
       out.print(e.toString());
       dfs.close();
@@ -93,12 +85,10 @@
     }      
     InetSocketAddress addr = NetUtils.createSocketAddr(chosenNode.getName());
     //view the last chunkSizeToView bytes while Tailing
-    if (blockSize >= chunkSizeToView)
-      startOffset = blockSize - chunkSizeToView;
-    else startOffset = 0;
+    final long startOffset = blockSize >= chunkSizeToView? blockSize - chunkSizeToView:
0;
 
     out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\"
READONLY>");
-    jspHelper.streamBlockInAscii(addr, blockId, genStamp, blockSize, startOffset, chunkSizeToView,
out);
+    JspHelper.streamBlockInAscii(addr, blockId, genStamp, blockSize, startOffset, chunkSizeToView,
out);
     out.print("</textarea>");
     dfs.close();
   }

Modified: hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp (original)
+++ hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp Mon Feb  2 18:36:01 2009
@@ -17,8 +17,6 @@
   import="java.net.URLEncoder"
 %>
 <%!
-  JspHelper jspHelper = new JspHelper();
-
   int rowNum = 0;
   int colNum = 0;
 
@@ -161,7 +159,7 @@
     FSNamesystem fsn = nn.getNamesystem();
     ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
     ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
-    jspHelper.DFSNodesStatus(live, dead);
+    fsn.DFSNodesStatus(live, dead);
 
     sorterField = request.getParameter("sorter/field");
     sorterOrder = request.getParameter("sorter/order");
@@ -235,22 +233,15 @@
     
 <body>
 <h1>NameNode '<%=namenodeLabel%>'</h1>
-
-
-<div id="dfstable"> <table>	  
-<tr> <td id="col1"> Started: <td> <%= fsn.getStartTime()%>
-<tr> <td id="col1"> Version: <td> <%= VersionInfo.getVersion()%>,
<%= VersionInfo.getRevision()%>
-<tr> <td id="col1"> Compiled: <td> <%= VersionInfo.getDate()%> by
<%= VersionInfo.getUser()%> from <%= VersionInfo.getBranch()%>
-<tr> <td id="col1"> Upgrades: <td> <%= jspHelper.getUpgradeStatusText()%>
-</table></div><br>				      
-
+<%= JspHelper.getVersionTable(fsn) %>
+<br />
 <b><a href="/nn_browsedfscontent.jsp">Browse the filesystem</a></b><br>
 <b><a href="/logs/">Namenode Logs</a></b>
 
 <hr>
 <h3>Cluster Summary</h3>
-<b> <%= jspHelper.getSafeModeText()%> </b>
-<b> <%= jspHelper.getInodeLimitText()%> </b>
+<b> <%= JspHelper.getSafeModeText(fsn)%> </b>
+<b> <%= JspHelper.getInodeLimitText(fsn)%> </b>
 <%
     generateDFSHealthReport(out, nn, request); 
 %>

Modified: hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp?rev=740064&r1=740063&r2=740064&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp (original)
+++ hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp Mon Feb  2 18:36:01 2009
@@ -16,8 +16,6 @@
 	import="java.net.URLEncoder"
 %>
 <%!
-	JspHelper jspHelper = new JspHelper();
-
 	int rowNum = 0;
 	int colNum = 0;
 
@@ -127,7 +125,7 @@
 throws IOException {
 	ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();  
 
 	ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
-	jspHelper.DFSNodesStatus(live, dead);
+	nn.getNamesystem().DFSNodesStatus(live, dead);
 
 	whatNodes = request.getParameter("whatNodes"); // show only live or only dead nodes
 	sorterField = request.getParameter("sorter/field");
@@ -137,8 +135,8 @@
 	if ( sorterOrder == null )
 		sorterOrder = "ASC";
 
-	jspHelper.sortNodeList(live, sorterField, sorterOrder);
-	jspHelper.sortNodeList(dead, "name", "ASC");
+	JspHelper.sortNodeList(live, sorterField, sorterOrder);
+	JspHelper.sortNodeList(dead, "name", "ASC");
 
 	// Find out common suffix. Should this be before or after the sort?
 	String port_suffix = null;
@@ -203,7 +201,7 @@
 						NodeHeaderStr("pcremaining") + "> Remaining <br>(%) <th " +
 						NodeHeaderStr("blocks") + "> Blocks\n" );
 
-				jspHelper.sortNodeList(live, sorterField, sorterOrder);
+				JspHelper.sortNodeList(live, sorterField, sorterOrder);
 				for ( int i=0; i < live.size(); i++ ) {
 					generateNodeData(out, live.get(i), port_suffix, true, nnHttpPort);
 				}
@@ -218,7 +216,7 @@
 				out.print( "<table border=1 cellspacing=0> <tr id=\"row1\"> " +
 				"<td> Node \n" );
 
-				jspHelper.sortNodeList(dead, "name", "ASC");
+				JspHelper.sortNodeList(dead, "name", "ASC");
 				for ( int i=0; i < dead.size() ; i++ ) {
 					generateNodeData(out, dead.get(i), port_suffix, false, nnHttpPort);
 				}
@@ -243,15 +241,8 @@
   
 <body>
 <h1>NameNode '<%=namenodeLabel%>'</h1>
-
-
-<div id="dfstable"> <table>	  
-<tr> <td id="col1"> Started: <td> <%= fsn.getStartTime()%>
-<tr> <td id="col1"> Version: <td> <%= VersionInfo.getVersion()%>,
r<%= VersionInfo.getRevision()%>
-<tr> <td id="col1"> Compiled: <td> <%= VersionInfo.getDate()%> by
<%= VersionInfo.getUser()%>
-<tr> <td id="col1"> Upgrades: <td> <%= jspHelper.getUpgradeStatusText()%>
-</table></div><br>				      
-
+<%= JspHelper.getVersionTable(fsn) %>
+<br />
 <b><a href="/nn_browsedfscontent.jsp">Browse the filesystem</a></b><br>
 <b><a href="/logs/">Namenode Logs</a></b><br>
 <b><a href=/dfshealth.jsp> Go back to DFS home</a></b>



Mime
View raw message