hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r776490 [2/2] - in /hadoop/core/trunk: ./ src/hdfs/org/apache/hadoop/hdfs/server/common/ src/hdfs/org/apache/hadoop/hdfs/server/datanode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/webapps/datanode/ src/webapps/hdfs/ src/webapps/s...
Date Tue, 19 May 2009 22:37:21 GMT
Modified: hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp (original)
+++ hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp Tue May 19 22:37:21 2009
@@ -27,223 +27,12 @@
   import="org.apache.hadoop.hdfs.*"
   import="org.apache.hadoop.hdfs.server.namenode.*"
   import="org.apache.hadoop.hdfs.server.datanode.*"
-  import="org.apache.hadoop.hdfs.server.common.Storage"
-  import="org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory"
   import="org.apache.hadoop.hdfs.protocol.*"
   import="org.apache.hadoop.util.*"
-  import="java.text.DateFormat"
-  import="java.lang.Math"
-  import="java.net.URLEncoder"
 %>
-<%!
-  int rowNum = 0;
-  int colNum = 0;
-
-  String rowTxt() { colNum = 0;
-      return "<tr class=\"" + (((rowNum++)%2 == 0)? "rowNormal" : "rowAlt")
-          + "\"> "; }
-  String colTxt() { return "<td id=\"col" + ++colNum + "\"> "; }
-  void counterReset () { colNum = 0; rowNum = 0 ; }
-
-  long diskBytes = 1024 * 1024 * 1024;
-  String diskByteStr = "GB";
-
-  String sorterField = null;
-  String sorterOrder = null;
-
-  String NodeHeaderStr(String name) {
-      String ret = "class=header";
-      String order = "ASC";
-      if ( name.equals( sorterField ) ) {
-          ret += sorterOrder;
-          if ( sorterOrder.equals("ASC") )
-              order = "DSC";
-      }
-      ret += " onClick=\"window.document.location=" +
-          "'/dfshealth.jsp?sorter/field=" + name + "&sorter/order=" +
-          order + "'\" title=\"sort on this column\"";
-      
-      return ret;
-  }
-      
-  public void generateNodeData( JspWriter out, DatanodeDescriptor d,
-                                    String suffix, boolean alive,
-                                    int nnHttpPort )
-    throws IOException {
-      
-    /* Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5
-       we use:
-       1) d.getHostName():d.getPort() to display.
-           Domain and port are stripped if they are common across the nodes.
-           i.e. "dn1"
-       2) d.getHost():d.Port() for "title".
-          i.e. "192.168.0.5:50010"
-       3) d.getHostName():d.getInfoPort() for url.
-          i.e. "http://dn1.hadoop.apache.org:50075/..."
-          Note that "d.getHost():d.getPort()" is what DFS clients use
-          to interact with datanodes.
-    */
-    // from nn_browsedfscontent.jsp:
-    String url = "http://" + d.getHostName() + ":" + d.getInfoPort() +
-                 "/browseDirectory.jsp?namenodeInfoPort=" +
-                 nnHttpPort + "&dir=" +
-                 URLEncoder.encode("/", "UTF-8");
-     
-    String name = d.getHostName() + ":" + d.getPort();
-    if ( !name.matches( "\\d+\\.\\d+.\\d+\\.\\d+.*" ) ) 
-        name = name.replaceAll( "\\.[^.:]*", "" );    
-    int idx = (suffix != null && name.endsWith( suffix )) ?
-        name.indexOf( suffix ) : -1;
-    
-    out.print( rowTxt() + "<td class=\"name\"><a title=\""
-               + d.getHost() + ":" + d.getPort() +
-               "\" href=\"" + url + "\">" +
-               (( idx > 0 ) ? name.substring(0, idx) : name) + "</a>" +
-               (( alive ) ? "" : "\n") );
-    if ( !alive )
-        return;
-    
-    long c = d.getCapacity();
-    long u = d.getDfsUsed();
-    long nu = d.getNonDfsUsed();
-    long r = d.getRemaining();
-    String percentUsed = StringUtils.limitDecimalTo2(d.getDfsUsedPercent());    
-    String percentRemaining = StringUtils.limitDecimalTo2(d.getRemainingPercent());    
-    
-    String adminState = (d.isDecommissioned() ? "Decommissioned" :
-                         (d.isDecommissionInProgress() ? "Decommission In Progress":
-                          "In Service"));
-    
-    long timestamp = d.getLastUpdate();
-    long currentTime = System.currentTimeMillis();
-    out.print("<td class=\"lastcontact\"> " +
-              ((currentTime - timestamp)/1000) +
-              "<td class=\"adminstate\">" +
-              adminState +
-              "<td align=\"right\" class=\"capacity\">" +
-              StringUtils.limitDecimalTo2(c*1.0/diskBytes) +
-              "<td align=\"right\" class=\"used\">" +
-              StringUtils.limitDecimalTo2(u*1.0/diskBytes) +      
-              "<td align=\"right\" class=\"nondfsused\">" +
-              StringUtils.limitDecimalTo2(nu*1.0/diskBytes) +      
-              "<td align=\"right\" class=\"remaining\">" +
-              StringUtils.limitDecimalTo2(r*1.0/diskBytes) +      
-              "<td align=\"right\" class=\"pcused\">" + percentUsed +
-              "<td class=\"pcused\">" +
-              ServletUtil.percentageGraph( (int)Double.parseDouble(percentUsed) , 100) +
-              "<td align=\"right\" class=\"pcremaining`\">" + percentRemaining +
-              "<td title=" + "\"blocks scheduled : " + d.getBlocksScheduled() + 
-              "\" class=\"blocks\">" + d.numBlocks() + "\n");
-  }
-  
-  
-  public void generateConfReport( JspWriter out,
-		  NameNode nn,
-		  HttpServletRequest request)
-  throws IOException {
-	  FSNamesystem fsn = nn.getNamesystem();
-	  long underReplicatedBlocks = fsn.getUnderReplicatedBlocks();
-	  FSImage fsImage = fsn.getFSImage();
-	  List<Storage.StorageDirectory> removedStorageDirs = fsImage.getRemovedStorageDirs();
-	  String storageDirsSizeStr="", removedStorageDirsSizeStr="", storageDirsStr="", removedStorageDirsStr="",
storageDirsDiv="", removedStorageDirsDiv="";
-
-	  //FS Image storage configuration
-	  out.print("<h3> " + nn.getRole() + " Storage: </h3>");
-	  out.print("<div id=\"dfstable\"> <table border=1 cellpadding=10 cellspacing=0
title=\"NameNode Storage\">\n"+
-	  "<thead><tr><td><b>Storage Directory</b></td><td><b>Type</b></td><td><b>State</b></td></tr></thead>");
-	  
-	  StorageDirectory st =null;
-	  for (Iterator<StorageDirectory> it = fsImage.dirIterator(); it.hasNext();) {
-	      st = it.next();
-	      String dir = "" +  st.getRoot();
-		  String type = "" + st.getStorageDirType();
-		  out.print("<tr><td>"+dir+"</td><td>"+type+"</td><td>Active</td></tr>");
-	  }
-	  
-	  long storageDirsSize = removedStorageDirs.size();
-	  for(int i=0; i< storageDirsSize; i++){
-		  st = removedStorageDirs.get(i);
-		  String dir = "" +  st.getRoot();
-		  String type = "" + st.getStorageDirType();
-		  out.print("<tr><td>"+dir+"</td><td>"+type+"</td><td><font
color=red>Failed</font></td></tr>");
-	  }
-	  
-	  out.print("</table></div><br>\n");
-  }
-
-
-  public void generateDFSHealthReport(JspWriter out,
-                                      NameNode nn,
-                                      HttpServletRequest request)
-                                      throws IOException {
-    FSNamesystem fsn = nn.getNamesystem();
-    ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
-    ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
-    fsn.DFSNodesStatus(live, dead);
-
-    sorterField = request.getParameter("sorter/field");
-    sorterOrder = request.getParameter("sorter/order");
-    if ( sorterField == null )
-        sorterField = "name";
-    if ( sorterOrder == null )
-        sorterOrder = "ASC";
-
-    // Find out common suffix. Should this be before or after the sort?
-    String port_suffix = null;
-    if ( live.size() > 0 ) {
-        String name = live.get(0).getName();
-        int idx = name.indexOf(':');
-        if ( idx > 0 ) {
-            port_suffix = name.substring( idx );
-        }
-        
-        for ( int i=1; port_suffix != null && i < live.size(); i++ ) {
-            if ( live.get(i).getName().endsWith( port_suffix ) == false ) {
-                port_suffix = null;
-                break;
-            }
-        }
-    }
-        
-    counterReset();
-    long[] fsnStats = fsn.getStats(); 
-    long total = fsnStats[0];
-    long remaining = fsnStats[2];
-    long used = fsnStats[1];
-    long nonDFS = total - remaining - used;
-	nonDFS = nonDFS < 0 ? 0 : nonDFS; 
-    float percentUsed = total <= 0 
-        ? 0f : ((float)used * 100.0f)/(float)total;
-    float percentRemaining = total <= 0 
-        ? 100f : ((float)remaining * 100.0f)/(float)total;
-
-    out.print( "<div id=\"dfstable\"> <table>\n" +
-	       rowTxt() + colTxt() + "Configured Capacity" + colTxt() + ":" + colTxt() +
-	       StringUtils.byteDesc( total ) +
-	       rowTxt() + colTxt() + "DFS Used" + colTxt() + ":" + colTxt() +
-	       StringUtils.byteDesc( used ) +
-	       rowTxt() + colTxt() + "Non DFS Used" + colTxt() + ":" + colTxt() +
-	       StringUtils.byteDesc( nonDFS ) +
-	       rowTxt() + colTxt() + "DFS Remaining" + colTxt() + ":" + colTxt() +
-	       StringUtils.byteDesc( remaining ) +
-	       rowTxt() + colTxt() + "DFS Used%" + colTxt() + ":" + colTxt() +
-	       StringUtils.limitDecimalTo2(percentUsed) + " %" +
-	       rowTxt() + colTxt() + "DFS Remaining%" + colTxt() + ":" + colTxt() +
-	       StringUtils.limitDecimalTo2(percentRemaining) + " %" +
-	       rowTxt() + colTxt() +
-	       		"<a href=\"dfsnodelist.jsp?whatNodes=LIVE\">Live Nodes</a> " +
-	       		colTxt() + ":" + colTxt() + live.size() +
-	       rowTxt() + colTxt() +
-	       		"<a href=\"dfsnodelist.jsp?whatNodes=DEAD\">Dead Nodes</a> " +
-	       		colTxt() + ":" + colTxt() + dead.size() +
-               "</table></div><br>\n" );
-    
-    if (live.isEmpty() && dead.isEmpty()) {
-        out.print("There are no datanodes in the cluster");
-    }
-  }%>
 
 <%
+  final NamenodeJspHelper.HealthJsp healthjsp  = new NamenodeJspHelper.HealthJsp();
   NameNode nn = (NameNode)application.getAttribute("name.node");
   FSNamesystem fsn = nn.getNamesystem();
   String namenodeRole = nn.getRole().toString();
@@ -257,24 +46,20 @@
     
 <body>
 <h1><%=namenodeRole%> '<%=namenodeLabel%>'</h1>
-<%= JspHelper.getVersionTable(fsn) %>
+<%= NamenodeJspHelper.getVersionTable(fsn) %>
 <br />
 <b><a href="/nn_browsedfscontent.jsp">Browse the filesystem</a></b><br>
 <b><a href="/logs/"><%=namenodeRole%> Logs</a></b>
 
 <hr>
 <h3>Cluster Summary</h3>
-<b> <%= JspHelper.getSafeModeText(fsn)%> </b>
-<b> <%= JspHelper.getInodeLimitText(fsn)%> </b>
-<a class="warning"> <%= JspHelper.getWarningText(fsn)%></a>
+<b> <%= NamenodeJspHelper.getSafeModeText(fsn)%> </b>
+<b> <%= NamenodeJspHelper.getInodeLimitText(fsn)%> </b>
+<a class="warning"><%= NamenodeJspHelper.getWarningText(fsn)%></a>
 
-<%
-    generateDFSHealthReport(out, nn, request); 
-%>
+<% healthjsp.generateHealthReport(out, nn, request); %>
 <hr>
-<%
-	generateConfReport(out, nn, request);
-%>
+<% healthjsp.generateConfReport(out, nn, request); %>
 <%
 out.println(ServletUtil.htmlFooter());
 %>

Modified: hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp (original)
+++ hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp Tue May 19 22:37:21 2009
@@ -31,223 +31,10 @@
 	import="org.apache.hadoop.hdfs.protocol.*"
 	import="org.apache.hadoop.util.*"
 	import="java.text.DateFormat"
-	import="java.lang.Math"
-	import="java.net.URLEncoder"
+  import="org.apache.hadoop.util.*"
 %>
-<%!
-	int rowNum = 0;
-	int colNum = 0;
-
-	String rowTxt() { colNum = 0;
-	return "<tr class=\"" + (((rowNum++)%2 == 0)? "rowNormal" : "rowAlt")
-	+ "\"> "; }
-	String colTxt() { return "<td id=\"col" + ++colNum + "\"> "; }
-	void counterReset () { colNum = 0; rowNum = 0 ; }
-
-	long diskBytes = 1024 * 1024 * 1024;
-	String diskByteStr = "GB";
-
-	String sorterField = null;
-	String sorterOrder = null;
-	String whatNodes = "LIVE";
-
-String NodeHeaderStr(String name) {
-	String ret = "class=header";
-	String order = "ASC";
-	if ( name.equals( sorterField ) ) {
-		ret += sorterOrder;
-		if ( sorterOrder.equals("ASC") )
-			order = "DSC";
-	}
-	ret += " onClick=\"window.document.location=" +
-	"'/dfsnodelist.jsp?whatNodes="+whatNodes+"&sorter/field=" + name + "&sorter/order="
+
-	order + "'\" title=\"sort on this column\"";
-
-	return ret;
-}
-
-public void generateNodeData( JspWriter out, DatanodeDescriptor d,
-		String suffix, boolean alive,
-		int nnHttpPort )
-throws IOException {
-
-	/* Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5
-we use:
-1) d.getHostName():d.getPort() to display.
-Domain and port are stripped if they are common across the nodes.
-i.e. "dn1"
-2) d.getHost():d.Port() for "title".
-i.e. "192.168.0.5:50010"
-3) d.getHostName():d.getInfoPort() for url.
-i.e. "http://dn1.hadoop.apache.org:50075/..."
-Note that "d.getHost():d.getPort()" is what DFS clients use
-to interact with datanodes.
-	 */
-	// from nn_browsedfscontent.jsp:
-	String url = "http://" + d.getHostName() + ":" + d.getInfoPort() +
-	"/browseDirectory.jsp?namenodeInfoPort=" +
-	nnHttpPort + "&dir=" +
-	URLEncoder.encode("/", "UTF-8");
-
-	String name = d.getHostName() + ":" + d.getPort();
-	if ( !name.matches( "\\d+\\.\\d+.\\d+\\.\\d+.*" ) ) 
-		name = name.replaceAll( "\\.[^.:]*", "" );    
-	int idx = (suffix != null && name.endsWith( suffix )) ?
-			name.indexOf( suffix ) : -1;
-
-			out.print( rowTxt() + "<td class=\"name\"><a title=\""
-					+ d.getHost() + ":" + d.getPort() +
-					"\" href=\"" + url + "\">" +
-					(( idx > 0 ) ? name.substring(0, idx) : name) + "</a>" +
-					(( alive ) ? "" : "\n") );
-			if ( !alive )
-				return;
-
-			long c = d.getCapacity();
-			long u = d.getDfsUsed();
-			long nu = d.getNonDfsUsed();
-			long r = d.getRemaining();
-			String percentUsed = StringUtils.limitDecimalTo2(d.getDfsUsedPercent());    
-			String percentRemaining = StringUtils.limitDecimalTo2(d.getRemainingPercent());    
-
-			String adminState = (d.isDecommissioned() ? "Decommissioned" :
-				(d.isDecommissionInProgress() ? "Decommission In Progress":
-				"In Service"));
-
-			long timestamp = d.getLastUpdate();
-			long currentTime = System.currentTimeMillis();
-			out.print("<td class=\"lastcontact\"> " +
-					((currentTime - timestamp)/1000) +
-					"<td class=\"adminstate\">" +
-					adminState +
-					"<td align=\"right\" class=\"capacity\">" +
-					StringUtils.limitDecimalTo2(c*1.0/diskBytes) +
-					"<td align=\"right\" class=\"used\">" +
-					StringUtils.limitDecimalTo2(u*1.0/diskBytes) +      
-					"<td align=\"right\" class=\"nondfsused\">" +
-					StringUtils.limitDecimalTo2(nu*1.0/diskBytes) +      
-					"<td align=\"right\" class=\"remaining\">" +
-					StringUtils.limitDecimalTo2(r*1.0/diskBytes) +      
-					"<td align=\"right\" class=\"pcused\">" + percentUsed +
-					"<td class=\"pcused\">" +
-					ServletUtil.percentageGraph( (int)Double.parseDouble(percentUsed) , 100) +
-					"<td align=\"right\" class=\"pcremaining`\">" + percentRemaining +
-					"<td title=" + "\"blocks scheduled : " + d.getBlocksScheduled() + 
-					"\" class=\"blocks\">" + d.numBlocks() + "\n");
-}
-
-
-
-public void generateDFSNodesList(JspWriter out, 
-		NameNode nn,
-		HttpServletRequest request)
-throws IOException {
-	ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();  
 
-	ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
-	nn.getNamesystem().DFSNodesStatus(live, dead);
-
-	whatNodes = request.getParameter("whatNodes"); // show only live or only dead nodes
-	sorterField = request.getParameter("sorter/field");
-	sorterOrder = request.getParameter("sorter/order");
-	if ( sorterField == null )
-		sorterField = "name";
-	if ( sorterOrder == null )
-		sorterOrder = "ASC";
-
-	JspHelper.sortNodeList(live, sorterField, sorterOrder);
-	JspHelper.sortNodeList(dead, "name", "ASC");
-
-	// Find out common suffix. Should this be before or after the sort?
-	String port_suffix = null;
-	if ( live.size() > 0 ) {
-		String name = live.get(0).getName();
-		int idx = name.indexOf(':');
-		if ( idx > 0 ) {
-			port_suffix = name.substring( idx );
-		}
-
-		for ( int i=1; port_suffix != null && i < live.size(); i++ ) {
-			if ( live.get(i).getName().endsWith( port_suffix ) == false ) {
-				port_suffix = null;
-				break;
-			}
-		}
-	}
-
-	counterReset();
-
-	try {
-		Thread.sleep(1000);
-	} catch (InterruptedException e) {}
-
-	if (live.isEmpty() && dead.isEmpty()) {
-		out.print("There are no datanodes in the cluster");
-	}
-	else {
-
-		int nnHttpPort = nn.getHttpAddress().getPort();
-		out.print( "<div id=\"dfsnodetable\"> ");
-		if(whatNodes.equals("LIVE")) {
-
-			out.print( 
-					"<a name=\"LiveNodes\" id=\"title\">" +
-					"Live Datanodes : " + live.size() + "</a>" +
-			"<br><br>\n<table border=1 cellspacing=0>\n" );
-
-			counterReset();
-
-			if ( live.size() > 0 ) {
-
-				if ( live.get(0).getCapacity() > 1024 * diskBytes ) {
-					diskBytes *= 1024;
-					diskByteStr = "TB";
-				}
-
-				out.print( "<tr class=\"headerRow\"> <th " +
-						NodeHeaderStr("name") + "> Node <th " +
-						NodeHeaderStr("lastcontact") + "> Last <br>Contact <th " +
-						NodeHeaderStr("adminstate") + "> Admin State <th " +
-						NodeHeaderStr("capacity") + "> Configured <br>Capacity (" + 
-						diskByteStr + ") <th " + 
-						NodeHeaderStr("used") + "> Used <br>(" + 
-						diskByteStr + ") <th " + 
-						NodeHeaderStr("nondfsused") + "> Non DFS <br>Used (" + 
-						diskByteStr + ") <th " + 
-						NodeHeaderStr("remaining") + "> Remaining <br>(" + 
-						diskByteStr + ") <th " + 
-						NodeHeaderStr("pcused") + "> Used <br>(%) <th " + 
-						NodeHeaderStr("pcused") + "> Used <br>(%) <th " +
-						NodeHeaderStr("pcremaining") + "> Remaining <br>(%) <th " +
-						NodeHeaderStr("blocks") + "> Blocks\n" );
-
-				JspHelper.sortNodeList(live, sorterField, sorterOrder);
-				for ( int i=0; i < live.size(); i++ ) {
-					generateNodeData(out, live.get(i), port_suffix, true, nnHttpPort);
-				}
-			}
-			out.print("</table>\n");
-		} else {
-
-			out.print("<br> <a name=\"DeadNodes\" id=\"title\"> " +
-					" Dead Datanodes : " +dead.size() + "</a><br><br>\n");
-
-			if ( dead.size() > 0 ) {
-				out.print( "<table border=1 cellspacing=0> <tr id=\"row1\"> " +
-				"<td> Node \n" );
-
-				JspHelper.sortNodeList(dead, "name", "ASC");
-				for ( int i=0; i < dead.size() ; i++ ) {
-					generateNodeData(out, dead.get(i), port_suffix, false, nnHttpPort);
-				}
-
-				out.print("</table>\n");
-			}
-		}
-		out.print("</div>");
-	}
-}%>
-
 <%
+final NamenodeJspHelper.NodeListJsp nodelistjsp = new NamenodeJspHelper.NodeListJsp();
 NameNode nn = (NameNode)application.getAttribute("name.node");
 String namenodeRole = nn.getRole().toString();
 FSNamesystem fsn = nn.getNamesystem();
@@ -261,15 +48,13 @@
   
 <body>
 <h1><%=namenodeRole%> '<%=namenodeLabel%>'</h1>
-<%= JspHelper.getVersionTable(fsn) %>
+<%= NamenodeJspHelper.getVersionTable(fsn) %>
 <br />
 <b><a href="/nn_browsedfscontent.jsp">Browse the filesystem</a></b><br>
 <b><a href="/logs/"><%=namenodeRole%> Logs</a></b><br>
 <b><a href=/dfshealth.jsp> Go back to DFS home</a></b>
 <hr>
-<%
-	generateDFSNodesList(out, nn, request); 
-%>
+<% nodelistjsp.generateNodesList(out, nn, request); %>
 
 <%
 out.println(ServletUtil.htmlFooter());

Modified: hadoop/core/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp (original)
+++ hadoop/core/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp Tue May 19 22:37:21 2009
@@ -32,32 +32,6 @@
   import="java.net.InetAddress"
   import="java.net.URLEncoder"
 %>
-<%!
-  public void redirectToRandomDataNode(
-                            NameNode nn, 
-                            HttpServletResponse resp) throws IOException {
-    FSNamesystem fsn = nn.getNamesystem();
-    String datanode = fsn.randomDataNode();
-    String redirectLocation;
-    String nodeToRedirect;
-    int redirectPort;
-    if (datanode != null) {
-      redirectPort = Integer.parseInt(datanode.substring(datanode.indexOf(':') + 1));
-      nodeToRedirect = datanode.substring(0, datanode.indexOf(':'));
-    }
-    else {
-      nodeToRedirect = nn.getHttpAddress().getHostName();
-      redirectPort = nn.getHttpAddress().getPort();
-    }
-    String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
-    redirectLocation = "http://" + fqdn + ":" + redirectPort + 
-                       "/browseDirectory.jsp?namenodeInfoPort=" + 
-                       nn.getHttpAddress().getPort() +
-                       "&dir=" + URLEncoder.encode("/", "UTF-8");
-    resp.sendRedirect(redirectLocation);
-  }
-%>
-
 <html>
 
 <title></title>
@@ -65,7 +39,7 @@
 <body>
 <% 
   NameNode nn = (NameNode)application.getAttribute("name.node");
-  redirectToRandomDataNode(nn, response); 
+  NamenodeJspHelper.redirectToRandomDataNode(nn, response); 
 %>
 <hr>
 

Modified: hadoop/core/trunk/src/webapps/secondary/status.jsp
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/secondary/status.jsp?rev=776490&r1=776489&r2=776490&view=diff
==============================================================================
--- hadoop/core/trunk/src/webapps/secondary/status.jsp (original)
+++ hadoop/core/trunk/src/webapps/secondary/status.jsp Tue May 19 22:37:21 2009
@@ -19,7 +19,8 @@
 %>
 <%@ page
   contentType="text/html; charset=UTF-8"
-  import="org.apache.hadoop.util.*"
+  import="org.apache.hadoop.hdfs.server.common.JspHelper"
+  import="org.apache.hadoop.util.ServletUtil"
 %>
 
 <html>



Mime
View raw message