Return-Path: Delivered-To: apmail-hadoop-core-commits-archive@www.apache.org Received: (qmail 8678 invoked from network); 19 May 2009 22:37:39 -0000 Received: from hermes.apache.org (HELO mail.apache.org) (140.211.11.3) by minotaur.apache.org with SMTP; 19 May 2009 22:37:39 -0000 Received: (qmail 81491 invoked by uid 500); 19 May 2009 22:37:51 -0000 Delivered-To: apmail-hadoop-core-commits-archive@hadoop.apache.org Received: (qmail 81430 invoked by uid 500); 19 May 2009 22:37:51 -0000 Mailing-List: contact core-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: core-dev@hadoop.apache.org Delivered-To: mailing list core-commits@hadoop.apache.org Received: (qmail 81421 invoked by uid 99); 19 May 2009 22:37:51 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 19 May 2009 22:37:51 +0000 X-ASF-Spam-Status: No, hits=-1998.5 required=10.0 tests=ALL_TRUSTED,WEIRD_PORT X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 19 May 2009 22:37:44 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 46B9B23888AD; Tue, 19 May 2009 22:37:22 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r776490 [1/2] - in /hadoop/core/trunk: ./ src/hdfs/org/apache/hadoop/hdfs/server/common/ src/hdfs/org/apache/hadoop/hdfs/server/datanode/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/webapps/datanode/ src/webapps/hdfs/ src/webapps/s... Date: Tue, 19 May 2009 22:37:21 -0000 To: core-commits@hadoop.apache.org From: szetszwo@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20090519223722.46B9B23888AD@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: szetszwo Date: Tue May 19 22:37:21 2009 New Revision: 776490 URL: http://svn.apache.org/viewvc?rev=776490&view=rev Log: HADOOP-5857. Move normal java methods from hdfs .jsp files to .java files. (szetszwo) Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java - copied, changed from r776489, hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java Removed: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Modified: hadoop/core/trunk/CHANGES.txt hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp hadoop/core/trunk/src/webapps/datanode/tail.jsp hadoop/core/trunk/src/webapps/hdfs/dfshealth.jsp hadoop/core/trunk/src/webapps/hdfs/dfsnodelist.jsp hadoop/core/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp hadoop/core/trunk/src/webapps/secondary/status.jsp Modified: hadoop/core/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/CHANGES.txt (original) +++ hadoop/core/trunk/CHANGES.txt Tue May 19 22:37:21 2009 @@ -370,6 +370,9 @@ HADOOP-5866. Move DeprecatedUTF8 from o.a.h.io to o.a.h.hdfs since it may not be used outside hdfs. (Raghu Angadi) + HADOOP-5857. Move normal java methods from hdfs .jsp files to .java files. + (szetszwo) + OPTIMIZATIONS HADOOP-5595. NameNode does not need to run a replicator to choose a Copied: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java (from r776489, hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java) URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java?p2=hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java&p1=hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java&r1=776489&r2=776490&rev=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/common/JspHelper.java Tue May 19 22:37:21 2009 @@ -16,12 +16,11 @@ * limitations under the License. */ -package org.apache.hadoop.hdfs.server.namenode; +package org.apache.hadoop.hdfs.server.common; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.InetSocketAddress; -import java.net.MalformedURLException; import java.net.Socket; import java.net.URL; import java.net.URLEncoder; @@ -39,13 +38,10 @@ import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.LocatedBlock; -import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction; -import org.apache.hadoop.hdfs.server.common.HdfsConstants; -import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; +import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessToken; import org.apache.hadoop.security.UnixUserGroupInformation; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionInfo; public class JspHelper { @@ -178,59 +174,6 @@ out.print(""); } - public static String getSafeModeText(FSNamesystem fsn) { - if (!fsn.isInSafeMode()) - return ""; - return "Safe mode is ON. " + fsn.getSafeModeTip() + "
"; - } - - public static String getWarningText(FSNamesystem fsn) { - // Ideally this should be displayed in RED - long missingBlocks = fsn.getMissingBlocksCount(); - if (missingBlocks > 0) { - return "
WARNING :" + - " There are about " + missingBlocks + - " missing blocks. Please check the log or run fsck.

"; - } - return ""; - } - - public static String getInodeLimitText(FSNamesystem fsn) { - long inodes = fsn.dir.totalInodes(); - long blocks = fsn.getBlocksTotal(); - long maxobjects = fsn.getMaxObjects(); - long totalMemory = Runtime.getRuntime().totalMemory(); - long maxMemory = Runtime.getRuntime().maxMemory(); - - long used = (totalMemory * 100)/maxMemory; - - String str = inodes + " files and directories, " + - blocks + " blocks = " + - (inodes + blocks) + " total"; - if (maxobjects != 0) { - long pct = ((inodes + blocks) * 100)/maxobjects; - str += " / " + maxobjects + " (" + pct + "%)"; - } - str += ". Heap Size is " + StringUtils.byteDesc(totalMemory) + " / " + - StringUtils.byteDesc(maxMemory) + - " (" + used + "%)
"; - return str; - } - - public static String getUpgradeStatusText(FSNamesystem fsn) { - String statusText = ""; - try { - UpgradeStatusReport status = - fsn.distributedUpgradeProgress(UpgradeAction.GET_STATUS); - statusText = (status == null ? - "There are no upgrades in progress." : - status.getStatusText(false)); - } catch(IOException e) { - statusText = "Upgrade status unknown."; - } - return statusText; - } - public static void sortNodeList(ArrayList nodes, String field, String order) { @@ -378,16 +321,6 @@ } /** Return a table containing version information. */ - public static String getVersionTable(FSNamesystem fsn) { - return "
" - + "\n \n" - + "\n
Started:" + fsn.getStartTime() + "
Version:" + VersionInfo.getVersion() + ", " + VersionInfo.getRevision() - + "\n
Compiled:" + VersionInfo.getDate() + " by " + VersionInfo.getUser() + " from " + VersionInfo.getBranch() - + "\n
Upgrades:" + getUpgradeStatusText(fsn) - + "\n
"; - } - - /** Return a table containing version information. */ public static String getVersionTable() { return "
" + "\n "; + } + + private String colTxt() { + return " "; + } + + private void counterReset() { + rowNum = 0; + } + + private String nodeHeaderStr(String name) { + String ret = "class=header"; + String order = "ASC"; + if (name.equals(sorterField)) { + ret += sorterOrder; + if (sorterOrder.equals("ASC")) + order = "DSC"; + } + ret += " onClick=\"window.document.location=" + + "'/dfsnodelist.jsp?whatNodes=" + whatNodes + "&sorter/field=" + + name + "&sorter/order=" + order + + "'\" title=\"sort on this column\""; + + return ret; + } + + void generateNodeData(JspWriter out, DatanodeDescriptor d, + String suffix, boolean alive, int nnHttpPort) throws IOException { + /* + * Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5 we use: + * 1) d.getHostName():d.getPort() to display. Domain and port are stripped + * if they are common across the nodes. i.e. "dn1" + * 2) d.getHost():d.Port() for "title". i.e. "192.168.0.5:50010" + * 3) d.getHostName():d.getInfoPort() for url. + * i.e. "http://dn1.hadoop.apache.org:50075/..." + * Note that "d.getHost():d.getPort()" is what DFS clients use to + * interact with datanodes. + */ + + // from nn_browsedfscontent.jsp: + String url = "http://" + d.getHostName() + ":" + d.getInfoPort() + + "/browseDirectory.jsp?namenodeInfoPort=" + nnHttpPort + "&dir=" + + URLEncoder.encode("/", "UTF-8"); + + String name = d.getHostName() + ":" + d.getPort(); + if (!name.matches("\\d+\\.\\d+.\\d+\\.\\d+.*")) + name = name.replaceAll("\\.[^.:]*", ""); + int idx = (suffix != null && name.endsWith(suffix)) ? name + .indexOf(suffix) : -1; + + out.print(rowTxt() + "
Version:" + VersionInfo.getVersion() + ", " + VersionInfo.getRevision() Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java?rev=776490&view=auto ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java (added) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java Tue May 19 22:37:21 2009 @@ -0,0 +1,580 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.datanode; + +import java.io.File; +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.URLEncoder; +import java.util.Date; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.jsp.JspWriter; + +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FsShell; +import org.apache.hadoop.hdfs.DFSClient; +import org.apache.hadoop.hdfs.protocol.DatanodeInfo; +import org.apache.hadoop.hdfs.protocol.LocatedBlock; +import org.apache.hadoop.hdfs.server.common.JspHelper; +import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.AccessToken; +import org.apache.hadoop.security.AccessTokenHandler; +import org.apache.hadoop.util.StringUtils; + +class DatanodeJspHelper { + private static final DataNode datanode = DataNode.getDataNode(); + + static void generateDirectoryStructure(JspWriter out, HttpServletRequest req, + HttpServletResponse resp) throws IOException { + final String dir = JspHelper.validatePath(req.getParameter("dir")); + if (dir == null) { + out.print("Invalid input"); + return; + } + + String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); + int namenodeInfoPort = -1; + if (namenodeInfoPortStr != null) + namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); + + final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), + JspHelper.conf); + String target = dir; + final FileStatus targetStatus = dfs.getFileInfo(target); + if (targetStatus == null) { // not exists + out.print("

File or directory : " + target + " does not exist

"); + JspHelper.printGotoForm(out, namenodeInfoPort, target); + } else { + if (!targetStatus.isDir()) { // a file + List blocks = dfs.namenode.getBlockLocations(dir, 0, 1) + .getLocatedBlocks(); + + LocatedBlock firstBlock = null; + DatanodeInfo[] locations = null; + if (blocks.size() > 0) { + firstBlock = blocks.get(0); + locations = firstBlock.getLocations(); + } + if (locations == null || locations.length == 0) { + out.print("Empty file"); + } else { + DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock); + String fqdn = InetAddress.getByName(chosenNode.getHost()) + .getCanonicalHostName(); + String datanodeAddr = chosenNode.getName(); + int datanodePort = Integer.parseInt(datanodeAddr.substring( + datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); + String redirectLocation = "http://" + fqdn + ":" + + chosenNode.getInfoPort() + "/browseBlock.jsp?blockId=" + + firstBlock.getBlock().getBlockId() + "&blockSize=" + + firstBlock.getBlock().getNumBytes() + "&genstamp=" + + firstBlock.getBlock().getGenerationStamp() + "&filename=" + + URLEncoder.encode(dir, "UTF-8") + "&datanodePort=" + + datanodePort + "&namenodeInfoPort=" + namenodeInfoPort; + resp.sendRedirect(redirectLocation); + } + return; + } + // directory + FileStatus[] files = dfs.listPaths(target); + // generate a table and dump the info + String[] headings = { "Name", "Type", "Size", "Replication", + "Block Size", "Modification Time", "Permission", "Owner", "Group" }; + out.print("

Contents of directory "); + JspHelper.printPathWithLinks(dir, out, namenodeInfoPort); + out.print("


"); + JspHelper.printGotoForm(out, namenodeInfoPort, dir); + out.print("
"); + + File f = new File(dir); + String parent; + if ((parent = f.getParent()) != null) + out.print("Go to parent directory
"); + + if (files == null || files.length == 0) { + out.print("Empty directory"); + } else { + JspHelper.addTableHeader(out); + int row = 0; + JspHelper.addTableRow(out, headings, row++); + String cols[] = new String[headings.length]; + for (int i = 0; i < files.length; i++) { + // Get the location of the first block of the file + if (files[i].getPath().toString().endsWith(".crc")) + continue; + if (!files[i].isDir()) { + cols[1] = "file"; + cols[2] = StringUtils.byteDesc(files[i].getLen()); + cols[3] = Short.toString(files[i].getReplication()); + cols[4] = StringUtils.byteDesc(files[i].getBlockSize()); + } else { + cols[1] = "dir"; + cols[2] = ""; + cols[3] = ""; + cols[4] = ""; + } + String datanodeUrl = req.getRequestURL() + "?dir=" + + URLEncoder.encode(files[i].getPath().toString(), "UTF-8") + + "&namenodeInfoPort=" + namenodeInfoPort; + cols[0] = "" + + files[i].getPath().getName() + ""; + cols[5] = FsShell.dateForm.format(new Date((files[i] + .getModificationTime()))); + cols[6] = files[i].getPermission().toString(); + cols[7] = files[i].getOwner(); + cols[8] = files[i].getGroup(); + JspHelper.addTableRow(out, cols, row++); + } + JspHelper.addTableFooter(out); + } + } + String namenodeHost = datanode.getNameNodeAddr().getHostName(); + out.print("
Go back to DFS home"); + dfs.close(); + } + + static void generateFileDetails(JspWriter out, HttpServletRequest req) + throws IOException { + + long startOffset = 0; + int datanodePort; + + final Long blockId = JspHelper.validateLong(req.getParameter("blockId")); + if (blockId == null) { + out.print("Invalid input (blockId absent)"); + return; + } + + String datanodePortStr = req.getParameter("datanodePort"); + if (datanodePortStr == null) { + out.print("Invalid input (datanodePort absent)"); + return; + } + datanodePort = Integer.parseInt(datanodePortStr); + + String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); + int namenodeInfoPort = -1; + if (namenodeInfoPortStr != null) + namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); + + final int chunkSizeToView = JspHelper.string2ChunkSizeToView( + req.getParameter("chunkSizeToView")); + + String startOffsetStr = req.getParameter("startOffset"); + if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0) + startOffset = 0; + else + startOffset = Long.parseLong(startOffsetStr); + + final String filename=JspHelper.validatePath(req.getParameter("filename")); + if (filename == null) { + out.print("Invalid input"); + return; + } + + String blockSizeStr = req.getParameter("blockSize"); + long blockSize = 0; + if (blockSizeStr == null || blockSizeStr.length() == 0) { + out.print("Invalid input"); + return; + } + blockSize = Long.parseLong(blockSizeStr); + + final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), + JspHelper.conf); + List blocks = dfs.namenode.getBlockLocations(filename, 0, + Long.MAX_VALUE).getLocatedBlocks(); + // Add the various links for looking at the file contents + // URL for downloading the full file + String downloadUrl = "http://" + req.getServerName() + ":" + + req.getServerPort() + "/streamFile?" + "filename=" + + URLEncoder.encode(filename, "UTF-8"); + out.print(""); + out.print("Download this file
"); + + DatanodeInfo chosenNode; + // URL for TAIL + LocatedBlock lastBlk = blocks.get(blocks.size() - 1); + try { + chosenNode = JspHelper.bestNode(lastBlk); + } catch (IOException e) { + out.print(e.toString()); + dfs.close(); + return; + } + String fqdn = InetAddress.getByName(chosenNode.getHost()) + .getCanonicalHostName(); + String tailUrl = "http://" + fqdn + ":" + chosenNode.getInfoPort() + + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + + "&namenodeInfoPort=" + namenodeInfoPort + + "&chunkSizeToView=" + chunkSizeToView + + "&referrer=" + URLEncoder.encode( + req.getRequestURL() + "?" + req.getQueryString(), "UTF-8"); + out.print("Tail this file
"); + + out.print("
"); + out.print("Chunk size to view (in bytes, up to file's DFS block size): "); + out.print(""); + out.print(""); + out.print(""); + out.print(""); + out.print(""); + out.print(""); + out.print(""); + out.print("  "); + out.print("
"); + out.print("
"); + out.print(""); + out.print("Total number of blocks: " + blocks.size() + "
"); + // generate a table and dump the info + out.println("\n"); + for (LocatedBlock cur : blocks) { + out.print(""); + final String blockidstring = Long.toString(cur.getBlock().getBlockId()); + blockSize = cur.getBlock().getNumBytes(); + out.print(""); + DatanodeInfo[] locs = cur.getLocations(); + for (int j = 0; j < locs.length; j++) { + String datanodeAddr = locs[j].getName(); + datanodePort = Integer.parseInt(datanodeAddr.substring(datanodeAddr + .indexOf(':') + 1, datanodeAddr.length())); + fqdn = InetAddress.getByName(locs[j].getHost()).getCanonicalHostName(); + String blockUrl = "http://" + fqdn + ":" + locs[j].getInfoPort() + + "/browseBlock.jsp?blockId=" + blockidstring + + "&blockSize=" + blockSize + + "&filename=" + URLEncoder.encode(filename, "UTF-8") + + "&datanodePort=" + datanodePort + + "&genstamp=" + cur.getBlock().getGenerationStamp() + + "&namenodeInfoPort=" + namenodeInfoPort + + "&chunkSizeToView=" + chunkSizeToView; + out.print(""); + } + out.println(""); + } + out.println("
" + blockidstring + ": " + + datanodeAddr + "
"); + out.print("
"); + String namenodeHost = datanode.getNameNodeAddr().getHostName(); + out.print("
Go back to DFS home"); + dfs.close(); + } + + static void generateFileChunks(JspWriter out, HttpServletRequest req) + throws IOException { + long startOffset = 0; + int datanodePort = 0; + + String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); + int namenodeInfoPort = -1; + if (namenodeInfoPortStr != null) + namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); + + final String filename = JspHelper + .validatePath(req.getParameter("filename")); + if (filename == null) { + out.print("Invalid input (filename absent)"); + return; + } + + final Long blockId = JspHelper.validateLong(req.getParameter("blockId")); + if (blockId == null) { + out.print("Invalid input (blockId absent)"); + return; + } + + final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), + JspHelper.conf); + + AccessToken accessToken = AccessToken.DUMMY_TOKEN; + if (JspHelper.conf.getBoolean( + AccessTokenHandler.STRING_ENABLE_ACCESS_TOKEN, false)) { + List blks = dfs.namenode.getBlockLocations(filename, 0, + Long.MAX_VALUE).getLocatedBlocks(); + if (blks == null || blks.size() == 0) { + out.print("Can't locate file blocks"); + dfs.close(); + return; + } + for (int i = 0; i < blks.size(); i++) { + if (blks.get(i).getBlock().getBlockId() == blockId) { + accessToken = blks.get(i).getAccessToken(); + break; + } + } + } + + final Long genStamp = JspHelper.validateLong(req.getParameter("genstamp")); + if (genStamp == null) { + out.print("Invalid input (genstamp absent)"); + return; + } + + String blockSizeStr; + long blockSize = 0; + blockSizeStr = req.getParameter("blockSize"); + if (blockSizeStr == null) { + out.print("Invalid input (blockSize absent)"); + return; + } + blockSize = Long.parseLong(blockSizeStr); + + final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req + .getParameter("chunkSizeToView")); + + String startOffsetStr = req.getParameter("startOffset"); + if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0) + startOffset = 0; + else + startOffset = Long.parseLong(startOffsetStr); + + String datanodePortStr = req.getParameter("datanodePort"); + if (datanodePortStr == null) { + out.print("Invalid input (datanodePort absent)"); + return; + } + datanodePort = Integer.parseInt(datanodePortStr); + out.print("

File: "); + JspHelper.printPathWithLinks(filename, out, namenodeInfoPort); + out.print("


"); + String parent = new File(filename).getParent(); + JspHelper.printGotoForm(out, namenodeInfoPort, parent); + out.print("
"); + out.print("Go back to dir listing
"); + out.print("Advanced view/download options
"); + out.print("
"); + + // Determine the prev & next blocks + long nextStartOffset = 0; + long nextBlockSize = 0; + String nextBlockIdStr = null; + String nextGenStamp = null; + String nextHost = req.getServerName(); + int nextPort = req.getServerPort(); + int nextDatanodePort = datanodePort; + // determine data for the next link + if (startOffset + chunkSizeToView >= blockSize) { + // we have to go to the next block from this point onwards + List blocks = dfs.namenode.getBlockLocations(filename, 0, + Long.MAX_VALUE).getLocatedBlocks(); + for (int i = 0; i < blocks.size(); i++) { + if (blocks.get(i).getBlock().getBlockId() == blockId) { + if (i != blocks.size() - 1) { + LocatedBlock nextBlock = blocks.get(i + 1); + nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId()); + nextGenStamp = Long.toString(nextBlock.getBlock() + .getGenerationStamp()); + nextStartOffset = 0; + nextBlockSize = nextBlock.getBlock().getNumBytes(); + DatanodeInfo d = JspHelper.bestNode(nextBlock); + String datanodeAddr = d.getName(); + nextDatanodePort = Integer.parseInt(datanodeAddr.substring( + datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); + nextHost = InetAddress.getByName(d.getHost()) + .getCanonicalHostName(); + nextPort = d.getInfoPort(); + } + } + } + } else { + // we are in the same block + nextBlockIdStr = blockId.toString(); + nextStartOffset = startOffset + chunkSizeToView; + nextBlockSize = blockSize; + nextGenStamp = genStamp.toString(); + } + String nextUrl = null; + if (nextBlockIdStr != null) { + nextUrl = "http://" + nextHost + ":" + nextPort + + "/browseBlock.jsp?blockId=" + nextBlockIdStr + + "&blockSize=" + nextBlockSize + + "&startOffset=" + nextStartOffset + + "&genstamp=" + nextGenStamp + + "&filename=" + URLEncoder.encode(filename, "UTF-8") + + "&chunkSizeToView=" + chunkSizeToView + + "&datanodePort=" + nextDatanodePort + + "&namenodeInfoPort=" + namenodeInfoPort; + out.print("View Next chunk  "); + } + // determine data for the prev link + String prevBlockIdStr = null; + String prevGenStamp = null; + long prevStartOffset = 0; + long prevBlockSize = 0; + String prevHost = req.getServerName(); + int prevPort = req.getServerPort(); + int prevDatanodePort = datanodePort; + if (startOffset == 0) { + List blocks = dfs.namenode.getBlockLocations(filename, 0, + Long.MAX_VALUE).getLocatedBlocks(); + for (int i = 0; i < blocks.size(); i++) { + if (blocks.get(i).getBlock().getBlockId() == blockId) { + if (i != 0) { + LocatedBlock prevBlock = blocks.get(i - 1); + prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId()); + prevGenStamp = Long.toString(prevBlock.getBlock() + .getGenerationStamp()); + prevStartOffset = prevBlock.getBlock().getNumBytes() + - chunkSizeToView; + if (prevStartOffset < 0) + prevStartOffset = 0; + prevBlockSize = prevBlock.getBlock().getNumBytes(); + DatanodeInfo d = JspHelper.bestNode(prevBlock); + String datanodeAddr = d.getName(); + prevDatanodePort = Integer.parseInt(datanodeAddr.substring( + datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); + prevHost = InetAddress.getByName(d.getHost()) + .getCanonicalHostName(); + prevPort = d.getInfoPort(); + } + } + } + } else { + // we are in the same block + prevBlockIdStr = blockId.toString(); + prevStartOffset = startOffset - chunkSizeToView; + if (prevStartOffset < 0) + prevStartOffset = 0; + prevBlockSize = blockSize; + prevGenStamp = genStamp.toString(); + } + + String prevUrl = null; + if (prevBlockIdStr != null) { + prevUrl = "http://" + prevHost + ":" + prevPort + + "/browseBlock.jsp?blockId=" + prevBlockIdStr + + "&blockSize=" + prevBlockSize + + "&startOffset=" + prevStartOffset + + "&filename=" + URLEncoder.encode(filename, "UTF-8") + + "&chunkSizeToView=" + chunkSizeToView + + "&genstamp=" + prevGenStamp + + "&datanodePort=" + prevDatanodePort + + "&namenodeInfoPort=" + namenodeInfoPort; + out.print("View Prev chunk  "); + } + out.print("
"); + out.print(""); + dfs.close(); + } + + static void generateFileChunksForTail(JspWriter out, HttpServletRequest req) + throws IOException { + final String referrer = JspHelper.validateURL(req.getParameter("referrer")); + boolean noLink = false; + if (referrer == null) { + noLink = true; + } + + final String filename = JspHelper + .validatePath(req.getParameter("filename")); + if (filename == null) { + out.print("Invalid input (file name absent)"); + return; + } + + String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); + int namenodeInfoPort = -1; + if (namenodeInfoPortStr != null) + namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); + + final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req + .getParameter("chunkSizeToView")); + + if (!noLink) { + out.print("

Tail of File: "); + JspHelper.printPathWithLinks(filename, out, namenodeInfoPort); + out.print("


"); + out.print("Go Back to File View
"); + } else { + out.print("

" + filename + "

"); + } + out.print("Chunk size to view (in bytes, up to file's DFS block size): "); + out.print(""); + out.print("  
"); + out.print(""); + out.print(""); + if (!noLink) + out.print(""); + + // fetch the block from the datanode that has the last block for this file + final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), + JspHelper.conf); + List blocks = dfs.namenode.getBlockLocations(filename, 0, + Long.MAX_VALUE).getLocatedBlocks(); + if (blocks == null || blocks.size() == 0) { + out.print("No datanodes contain blocks of file " + filename); + dfs.close(); + return; + } + LocatedBlock lastBlk = blocks.get(blocks.size() - 1); + long blockSize = lastBlk.getBlock().getNumBytes(); + long blockId = lastBlk.getBlock().getBlockId(); + AccessToken accessToken = lastBlk.getAccessToken(); + long genStamp = lastBlk.getBlock().getGenerationStamp(); + DatanodeInfo chosenNode; + try { + chosenNode = JspHelper.bestNode(lastBlk); + } catch (IOException e) { + out.print(e.toString()); + dfs.close(); + return; + } + InetSocketAddress addr = NetUtils.createSocketAddr(chosenNode.getName()); + // view the last chunkSizeToView bytes while Tailing + final long startOffset = blockSize >= chunkSizeToView ? blockSize + - chunkSizeToView : 0; + + out.print(""); + dfs.close(); + } +} \ No newline at end of file Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java Tue May 19 22:37:21 2009 @@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; +import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.security.UnixUserGroupInformation; import org.apache.hadoop.security.UserGroupInformation; Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Tue May 19 22:37:21 2009 @@ -28,6 +28,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; +import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.security.UnixUserGroupInformation; /** Redirect queries about the hosted filesystem to an appropriate datanode. Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java?rev=776490&view=auto ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java (added) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java Tue May 19 22:37:21 2009 @@ -0,0 +1,453 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.namenode; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.URLEncoder; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.jsp.JspWriter; + +import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction; +import org.apache.hadoop.hdfs.server.common.JspHelper; +import org.apache.hadoop.hdfs.server.common.Storage; +import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; +import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; +import org.apache.hadoop.util.ServletUtil; +import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.VersionInfo; + +class NamenodeJspHelper { + static String getSafeModeText(FSNamesystem fsn) { + if (!fsn.isInSafeMode()) + return ""; + return "Safe mode is ON. " + fsn.getSafeModeTip() + "
"; + } + + static String getInodeLimitText(FSNamesystem fsn) { + long inodes = fsn.dir.totalInodes(); + long blocks = fsn.getBlocksTotal(); + long maxobjects = fsn.getMaxObjects(); + long totalMemory = Runtime.getRuntime().totalMemory(); + long maxMemory = Runtime.getRuntime().maxMemory(); + + long used = (totalMemory * 100) / maxMemory; + + String str = inodes + " files and directories, " + blocks + " blocks = " + + (inodes + blocks) + " total"; + if (maxobjects != 0) { + long pct = ((inodes + blocks) * 100) / maxobjects; + str += " / " + maxobjects + " (" + pct + "%)"; + } + str += ". Heap Size is " + StringUtils.byteDesc(totalMemory) + " / " + + StringUtils.byteDesc(maxMemory) + " (" + used + "%)
"; + return str; + } + + static String getUpgradeStatusText(FSNamesystem fsn) { + String statusText = ""; + try { + UpgradeStatusReport status = fsn + .distributedUpgradeProgress(UpgradeAction.GET_STATUS); + statusText = (status == null ? "There are no upgrades in progress." + : status.getStatusText(false)); + } catch (IOException e) { + statusText = "Upgrade status unknown."; + } + return statusText; + } + + /** Return a table containing version information. */ + static String getVersionTable(FSNamesystem fsn) { + return "
" + + "\n \n" + "\n
Started:" + fsn.getStartTime() + + "
Version:" + + VersionInfo.getVersion() + ", " + VersionInfo.getRevision() + + "\n
Compiled:" + VersionInfo.getDate() + + " by " + VersionInfo.getUser() + " from " + VersionInfo.getBranch() + + "\n
Upgrades:" + + getUpgradeStatusText(fsn) + "\n
"; + } + + static String getWarningText(FSNamesystem fsn) { + // Ideally this should be displayed in RED + long missingBlocks = fsn.getMissingBlocksCount(); + if (missingBlocks > 0) { + return "
WARNING :" + " There are about " + missingBlocks + + " missing blocks. Please check the log or run fsck.

"; + } + return ""; + } + + static class HealthJsp { + private int rowNum = 0; + private int colNum = 0; + private String sorterField = null; + private String sorterOrder = null; + + private String rowTxt() { + colNum = 0; + return "
"; + } + + private void counterReset() { + colNum = 0; + rowNum = 0; + } + + void generateConfReport(JspWriter out, NameNode nn, + HttpServletRequest request) throws IOException { + FSNamesystem fsn = nn.getNamesystem(); + FSImage fsImage = fsn.getFSImage(); + List removedStorageDirs = fsImage + .getRemovedStorageDirs(); + + // FS Image storage configuration + out.print("

" + nn.getRole() + " Storage:

"); + out.print("
\n" + + ""); + + StorageDirectory st = null; + for (Iterator it = fsImage.dirIterator(); it.hasNext();) { + st = it.next(); + String dir = "" + st.getRoot(); + String type = "" + st.getStorageDirType(); + out.print(""); + } + + long storageDirsSize = removedStorageDirs.size(); + for (int i = 0; i < storageDirsSize; i++) { + st = removedStorageDirs.get(i); + String dir = "" + st.getRoot(); + String type = "" + st.getStorageDirType(); + out.print(""); + } + + out.print("
Storage DirectoryTypeState
" + dir + "" + type + + "Active
" + dir + "" + type + + "Failed

\n"); + } + + void generateHealthReport(JspWriter out, NameNode nn, + HttpServletRequest request) throws IOException { + FSNamesystem fsn = nn.getNamesystem(); + ArrayList live = new ArrayList(); + ArrayList dead = new ArrayList(); + fsn.DFSNodesStatus(live, dead); + + sorterField = request.getParameter("sorter/field"); + sorterOrder = request.getParameter("sorter/order"); + if (sorterField == null) + sorterField = "name"; + if (sorterOrder == null) + sorterOrder = "ASC"; + + // Find out common suffix. Should this be before or after the sort? + String port_suffix = null; + if (live.size() > 0) { + String name = live.get(0).getName(); + int idx = name.indexOf(':'); + if (idx > 0) { + port_suffix = name.substring(idx); + } + + for (int i = 1; port_suffix != null && i < live.size(); i++) { + if (live.get(i).getName().endsWith(port_suffix) == false) { + port_suffix = null; + break; + } + } + } + + counterReset(); + long[] fsnStats = fsn.getStats(); + long total = fsnStats[0]; + long remaining = fsnStats[2]; + long used = fsnStats[1]; + long nonDFS = total - remaining - used; + nonDFS = nonDFS < 0 ? 0 : nonDFS; + float percentUsed = total <= 0 ? 0f : ((float) used * 100.0f) + / (float) total; + float percentRemaining = total <= 0 ? 100f : ((float) remaining * 100.0f) + / (float) total; + + out.print("
\n" + rowTxt() + colTxt() + + "Configured Capacity" + colTxt() + ":" + colTxt() + + StringUtils.byteDesc(total) + rowTxt() + colTxt() + "DFS Used" + + colTxt() + ":" + colTxt() + StringUtils.byteDesc(used) + rowTxt() + + colTxt() + "Non DFS Used" + colTxt() + ":" + colTxt() + + StringUtils.byteDesc(nonDFS) + rowTxt() + colTxt() + + "DFS Remaining" + colTxt() + ":" + colTxt() + + StringUtils.byteDesc(remaining) + rowTxt() + colTxt() + "DFS Used%" + + colTxt() + ":" + colTxt() + + StringUtils.limitDecimalTo2(percentUsed) + " %" + rowTxt() + + colTxt() + "DFS Remaining%" + colTxt() + ":" + colTxt() + + StringUtils.limitDecimalTo2(percentRemaining) + " %" + rowTxt() + + colTxt() + + "Live Nodes " + + colTxt() + ":" + colTxt() + live.size() + rowTxt() + colTxt() + + "Dead Nodes " + + colTxt() + ":" + colTxt() + dead.size() + "

\n"); + + if (live.isEmpty() && dead.isEmpty()) { + out.print("There are no datanodes in the cluster"); + } + } + } + + static void redirectToRandomDataNode(NameNode nn, HttpServletResponse resp) + throws IOException { + FSNamesystem fsn = nn.getNamesystem(); + String datanode = fsn.randomDataNode(); + String redirectLocation; + String nodeToRedirect; + int redirectPort; + if (datanode != null) { + redirectPort = Integer.parseInt(datanode + .substring(datanode.indexOf(':') + 1)); + nodeToRedirect = datanode.substring(0, datanode.indexOf(':')); + } else { + nodeToRedirect = nn.getHttpAddress().getHostName(); + redirectPort = nn.getHttpAddress().getPort(); + } + String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName(); + redirectLocation = "http://" + fqdn + ":" + redirectPort + + "/browseDirectory.jsp?namenodeInfoPort=" + + nn.getHttpAddress().getPort() + "&dir=" + + URLEncoder.encode("/", "UTF-8"); + resp.sendRedirect(redirectLocation); + } + + static class NodeListJsp { + private int rowNum = 0; + + private long diskBytes = 1024 * 1024 * 1024; + private String diskByteStr = "GB"; + + private String sorterField = null; + private String sorterOrder = null; + + private String whatNodes = "LIVE"; + + private String rowTxt() { + return "
" + + ((idx > 0) ? name.substring(0, idx) : name) + "" + + ((alive) ? "" : "\n")); + if (!alive) + return; + + long c = d.getCapacity(); + long u = d.getDfsUsed(); + long nu = d.getNonDfsUsed(); + long r = d.getRemaining(); + String percentUsed = StringUtils.limitDecimalTo2(d.getDfsUsedPercent()); + String percentRemaining = StringUtils.limitDecimalTo2(d + .getRemainingPercent()); + + String adminState = (d.isDecommissioned() ? "Decommissioned" : (d + .isDecommissionInProgress() ? "Decommission In Progress" + : "In Service")); + + long timestamp = d.getLastUpdate(); + long currentTime = System.currentTimeMillis(); + out.print(" " + + ((currentTime - timestamp) / 1000) + + "" + + adminState + + "" + + StringUtils.limitDecimalTo2(c * 1.0 / diskBytes) + + "" + + StringUtils.limitDecimalTo2(u * 1.0 / diskBytes) + + "" + + StringUtils.limitDecimalTo2(nu * 1.0 / diskBytes) + + "" + + StringUtils.limitDecimalTo2(r * 1.0 / diskBytes) + + "" + + percentUsed + + "" + + ServletUtil.percentageGraph((int) Double.parseDouble(percentUsed), + 100) + "" + + percentRemaining + "" + d.numBlocks() + + "\n"); + } + + void generateNodesList(JspWriter out, NameNode nn, + HttpServletRequest request) throws IOException { + ArrayList live = new ArrayList(); + ArrayList dead = new ArrayList(); + nn.getNamesystem().DFSNodesStatus(live, dead); + + whatNodes = request.getParameter("whatNodes"); // show only live or only + // dead nodes + sorterField = request.getParameter("sorter/field"); + sorterOrder = request.getParameter("sorter/order"); + if (sorterField == null) + sorterField = "name"; + if (sorterOrder == null) + sorterOrder = "ASC"; + + JspHelper.sortNodeList(live, sorterField, sorterOrder); + JspHelper.sortNodeList(dead, "name", "ASC"); + + // Find out common suffix. Should this be before or after the sort? + String port_suffix = null; + if (live.size() > 0) { + String name = live.get(0).getName(); + int idx = name.indexOf(':'); + if (idx > 0) { + port_suffix = name.substring(idx); + } + + for (int i = 1; port_suffix != null && i < live.size(); i++) { + if (live.get(i).getName().endsWith(port_suffix) == false) { + port_suffix = null; + break; + } + } + } + + counterReset(); + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + } + + if (live.isEmpty() && dead.isEmpty()) { + out.print("There are no datanodes in the cluster"); + } else { + + int nnHttpPort = nn.getHttpAddress().getPort(); + out.print("
"); + if (whatNodes.equals("LIVE")) { + + out.print("" + "Live Datanodes : " + + live.size() + "" + + "

\n\n"); + + counterReset(); + + if (live.size() > 0) { + if (live.get(0).getCapacity() > 1024 * diskBytes) { + diskBytes *= 1024; + diskByteStr = "TB"; + } + + out.print("
Node Last
Contact
Admin State Configured
Capacity (" + diskByteStr + ")
Used
(" + diskByteStr + + ")
Non DFS
Used (" + diskByteStr + ")
Remaining
(" + + diskByteStr + ")
Used
(%)
Used
(%)
Remaining
(%)
Blocks\n"); + + JspHelper.sortNodeList(live, sorterField, sorterOrder); + for (int i = 0; i < live.size(); i++) { + generateNodeData(out, live.get(i), port_suffix, true, nnHttpPort); + } + } + out.print("
\n"); + } else { + + out.print("
" + + " Dead Datanodes : " + dead.size() + "

\n"); + + if (dead.size() > 0) { + out.print(" " + + "
Node \n"); + + JspHelper.sortNodeList(dead, "name", "ASC"); + for (int i = 0; i < dead.size(); i++) { + generateNodeData(out, dead.get(i), port_suffix, false, nnHttpPort); + } + + out.print("
\n"); + } + } + out.print("
"); + } + } + } +} \ No newline at end of file Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java (original) +++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java Tue May 19 22:37:21 2009 @@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSInputStream; import org.apache.hadoop.hdfs.DFSClient; +import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.security.UnixUserGroupInformation; Modified: hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp (original) +++ hadoop/core/trunk/src/webapps/datanode/browseBlock.jsp Tue May 19 22:37:21 2009 @@ -26,375 +26,18 @@ import="java.net.*" import="org.apache.hadoop.hdfs.*" - import="org.apache.hadoop.hdfs.server.namenode.*" import="org.apache.hadoop.hdfs.protocol.*" - import="org.apache.hadoop.security.AccessToken" - import="org.apache.hadoop.security.AccessTokenHandler" + import="org.apache.hadoop.hdfs.server.common.JspHelper" import="org.apache.hadoop.util.*" %> - -<%! - static final DataNode datanode = DataNode.getDataNode(); - - public void generateFileDetails(JspWriter out, HttpServletRequest req) - throws IOException { - - long startOffset = 0; - int datanodePort; - - final Long blockId = JspHelper.validateLong(req.getParameter("blockId")); - if (blockId == null) { - out.print("Invalid input (blockId absent)"); - return; - } - - String datanodePortStr = req.getParameter("datanodePort"); - if (datanodePortStr == null) { - out.print("Invalid input (datanodePort absent)"); - return; - } - datanodePort = Integer.parseInt(datanodePortStr); - - String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); - int namenodeInfoPort = -1; - if (namenodeInfoPortStr != null) - namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); - - final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView")); - - String startOffsetStr = req.getParameter("startOffset"); - if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0) - startOffset = 0; - else startOffset = Long.parseLong(startOffsetStr); - - final String filename = JspHelper.validatePath( - req.getParameter("filename")); - if (filename == null) { - out.print("Invalid input"); - return; - } - - String blockSizeStr = req.getParameter("blockSize"); - long blockSize = 0; - if (blockSizeStr == null || blockSizeStr.length() == 0) { - out.print("Invalid input"); - return; - } - blockSize = Long.parseLong(blockSizeStr); - - final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf); - List blocks = - dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); - //Add the various links for looking at the file contents - //URL for downloading the full file - String downloadUrl = "http://" + req.getServerName() + ":" + - + req.getServerPort() + "/streamFile?" + "filename=" + - URLEncoder.encode(filename, "UTF-8"); - out.print(""); - out.print("Download this file
"); - - DatanodeInfo chosenNode; - //URL for TAIL - LocatedBlock lastBlk = blocks.get(blocks.size() - 1); - try { - chosenNode = JspHelper.bestNode(lastBlk); - } catch (IOException e) { - out.print(e.toString()); - dfs.close(); - return; - } - String fqdn = - InetAddress.getByName(chosenNode.getHost()).getCanonicalHostName(); - String tailUrl = "http://" + fqdn + ":" + - chosenNode.getInfoPort() + - "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + - "&namenodeInfoPort=" + namenodeInfoPort + - "&chunkSizeToView=" + chunkSizeToView + - "&referrer=" + - URLEncoder.encode(req.getRequestURL() + "?" + req.getQueryString(), - "UTF-8"); - out.print("Tail this file
"); - - out.print("
"); - out.print("Chunk size to view (in bytes, up to file's DFS block size): "); - out.print(""); - out.print(""); - out.print(""); - out.print(""); - out.print(""); - out.print(""); - out.print(""); - out.print("  "); - out.print("
"); - out.print("
"); - out.print(""); - out.print("Total number of blocks: "+blocks.size()+"
"); - //generate a table and dump the info - out.println("\n"); - for (LocatedBlock cur : blocks) { - out.print(""); - final String blockidstring = Long.toString(cur.getBlock().getBlockId()); - blockSize = cur.getBlock().getNumBytes(); - out.print(""); - DatanodeInfo[] locs = cur.getLocations(); - for(int j=0; j " - + ""); - } - out.println(""); - } - out.println("
"+blockidstring+":" + datanodeAddr + "
"); - out.print("
"); - String namenodeHost = datanode.getNameNodeAddr().getHostName(); - out.print("
Go back to DFS home"); - dfs.close(); - } - - public void generateFileChunks(JspWriter out, HttpServletRequest req) - throws IOException { - long startOffset = 0; - int datanodePort = 0; - - String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); - int namenodeInfoPort = -1; - if (namenodeInfoPortStr != null) - namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); - - final String filename = JspHelper.validatePath( - req.getParameter("filename")); - if (filename == null) { - out.print("Invalid input (filename absent)"); - return; - } - - final Long blockId = JspHelper.validateLong(req.getParameter("blockId")); - if (blockId == null) { - out.print("Invalid input (blockId absent)"); - return; - } - - final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf); - - AccessToken accessToken = AccessToken.DUMMY_TOKEN; - if (JspHelper.conf - .getBoolean(AccessTokenHandler.STRING_ENABLE_ACCESS_TOKEN, false)) { - List blks = dfs.namenode.getBlockLocations(filename, 0, - Long.MAX_VALUE).getLocatedBlocks(); - if (blks == null || blks.size() == 0) { - out.print("Can't locate file blocks"); - dfs.close(); - return; - } - for (int i = 0; i < blks.size(); i++) { - if (blks.get(i).getBlock().getBlockId() == blockId) { - accessToken = blks.get(i).getAccessToken(); - break; - } - } - } - - final Long genStamp = JspHelper.validateLong(req.getParameter("genstamp")); - if (genStamp == null) { - out.print("Invalid input (genstamp absent)"); - return; - } - - String blockSizeStr; - long blockSize = 0; - blockSizeStr = req.getParameter("blockSize"); - if (blockSizeStr == null) { - out.print("Invalid input (blockSize absent)"); - return; - } - blockSize = Long.parseLong(blockSizeStr); - - final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView")); - - String startOffsetStr = req.getParameter("startOffset"); - if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0) - startOffset = 0; - else startOffset = Long.parseLong(startOffsetStr); - - String datanodePortStr = req.getParameter("datanodePort"); - if (datanodePortStr == null) { - out.print("Invalid input (datanodePort absent)"); - return; - } - datanodePort = Integer.parseInt(datanodePortStr); - out.print("

File: "); - JspHelper.printPathWithLinks(filename, out, namenodeInfoPort); - out.print("


"); - String parent = new File(filename).getParent(); - JspHelper.printGotoForm(out, namenodeInfoPort, parent); - out.print("
"); - out.print("Go back to dir listing
"); - out.print("Advanced view/download options
"); - out.print("
"); - - //Determine the prev & next blocks - long nextStartOffset = 0; - long nextBlockSize = 0; - String nextBlockIdStr = null; - String nextGenStamp = null; - String nextHost = req.getServerName(); - int nextPort = req.getServerPort(); - int nextDatanodePort = datanodePort; - //determine data for the next link - if (startOffset + chunkSizeToView >= blockSize) { - //we have to go to the next block from this point onwards - List blocks = - dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); - for (int i = 0; i < blocks.size(); i++) { - if (blocks.get(i).getBlock().getBlockId() == blockId) { - if (i != blocks.size() - 1) { - LocatedBlock nextBlock = blocks.get(i+1); - nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId()); - nextGenStamp = Long.toString(nextBlock.getBlock().getGenerationStamp()); - nextStartOffset = 0; - nextBlockSize = nextBlock.getBlock().getNumBytes(); - DatanodeInfo d = JspHelper.bestNode(nextBlock); - String datanodeAddr = d.getName(); - nextDatanodePort = Integer.parseInt( - datanodeAddr.substring( - datanodeAddr.indexOf(':') + 1, - datanodeAddr.length())); - nextHost = InetAddress.getByName(d.getHost()).getCanonicalHostName(); - nextPort = d.getInfoPort(); - } - } - } - } - else { - //we are in the same block - nextBlockIdStr = blockId.toString(); - nextStartOffset = startOffset + chunkSizeToView; - nextBlockSize = blockSize; - nextGenStamp = genStamp.toString(); - } - String nextUrl = null; - if (nextBlockIdStr != null) { - nextUrl = "http://" + nextHost + ":" + - nextPort + - "/browseBlock.jsp?blockId=" + nextBlockIdStr + - "&blockSize=" + nextBlockSize + "&startOffset=" + - nextStartOffset + - "&genstamp=" + nextGenStamp + - "&filename=" + URLEncoder.encode(filename, "UTF-8") + - "&chunkSizeToView=" + chunkSizeToView + - "&datanodePort=" + nextDatanodePort + - "&namenodeInfoPort=" + namenodeInfoPort; - out.print("View Next chunk  "); - } - //determine data for the prev link - String prevBlockIdStr = null; - String prevGenStamp = null; - long prevStartOffset = 0; - long prevBlockSize = 0; - String prevHost = req.getServerName(); - int prevPort = req.getServerPort(); - int prevDatanodePort = datanodePort; - if (startOffset == 0) { - List blocks = - dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); - for (int i = 0; i < blocks.size(); i++) { - if (blocks.get(i).getBlock().getBlockId() == blockId) { - if (i != 0) { - LocatedBlock prevBlock = blocks.get(i-1); - prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId()); - prevGenStamp = Long.toString(prevBlock.getBlock().getGenerationStamp()); - prevStartOffset = prevBlock.getBlock().getNumBytes() - chunkSizeToView; - if (prevStartOffset < 0) - prevStartOffset = 0; - prevBlockSize = prevBlock.getBlock().getNumBytes(); - DatanodeInfo d = JspHelper.bestNode(prevBlock); - String datanodeAddr = d.getName(); - prevDatanodePort = Integer.parseInt( - datanodeAddr.substring( - datanodeAddr.indexOf(':') + 1, - datanodeAddr.length())); - prevHost = InetAddress.getByName(d.getHost()).getCanonicalHostName(); - prevPort = d.getInfoPort(); - } - } - } - } - else { - //we are in the same block - prevBlockIdStr = blockId.toString(); - prevStartOffset = startOffset - chunkSizeToView; - if (prevStartOffset < 0) prevStartOffset = 0; - prevBlockSize = blockSize; - prevGenStamp = genStamp.toString(); - } - - String prevUrl = null; - if (prevBlockIdStr != null) { - prevUrl = "http://" + prevHost + ":" + - prevPort + - "/browseBlock.jsp?blockId=" + prevBlockIdStr + - "&blockSize=" + prevBlockSize + "&startOffset=" + - prevStartOffset + - "&filename=" + URLEncoder.encode(filename, "UTF-8") + - "&chunkSizeToView=" + chunkSizeToView + - "&genstamp=" + prevGenStamp + - "&datanodePort=" + prevDatanodePort + - "&namenodeInfoPort=" + namenodeInfoPort; - out.print("View Prev chunk  "); - } - out.print("
"); - out.print(""); - dfs.close(); - } - -%> <%JspHelper.createTitle(out, request, request.getParameter("filename")); %> -<% - generateFileChunks(out,request); -%> +<% DatanodeJspHelper.generateFileChunks(out,request); %>
-<% - generateFileDetails(out,request); -%> +<% DatanodeJspHelper.generateFileDetails(out,request); %>

Local logs

Log directory Modified: hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp (original) +++ hadoop/core/trunk/src/webapps/datanode/browseDirectory.jsp Tue May 19 22:37:21 2009 @@ -27,133 +27,10 @@ import="org.apache.hadoop.fs.*" import="org.apache.hadoop.hdfs.*" - import="org.apache.hadoop.hdfs.server.namenode.*" import="org.apache.hadoop.hdfs.protocol.*" + import="org.apache.hadoop.hdfs.server.common.JspHelper" import="org.apache.hadoop.util.*" %> -<%! - static final DataNode datanode = DataNode.getDataNode(); - - public void generateDirectoryStructure( JspWriter out, - HttpServletRequest req, - HttpServletResponse resp) - throws IOException { - final String dir = JspHelper.validatePath(req.getParameter("dir")); - if (dir == null) { - out.print("Invalid input"); - return; - } - - String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); - int namenodeInfoPort = -1; - if (namenodeInfoPortStr != null) - namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); - - final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf); - String target = dir; - final FileStatus targetStatus = dfs.getFileInfo(target); - if (targetStatus == null) { // not exists - out.print("

File or directory : " + target + " does not exist

"); - JspHelper.printGotoForm(out, namenodeInfoPort, target); - } - else { - if( !targetStatus.isDir() ) { // a file - List blocks = - dfs.namenode.getBlockLocations(dir, 0, 1).getLocatedBlocks(); - - LocatedBlock firstBlock = null; - DatanodeInfo [] locations = null; - if (blocks.size() > 0) { - firstBlock = blocks.get(0); - locations = firstBlock.getLocations(); - } - if (locations == null || locations.length == 0) { - out.print("Empty file"); - } else { - DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock); - String fqdn = InetAddress.getByName(chosenNode.getHost()). - getCanonicalHostName(); - String datanodeAddr = chosenNode.getName(); - int datanodePort = Integer.parseInt( - datanodeAddr.substring( - datanodeAddr.indexOf(':') + 1, - datanodeAddr.length())); - String redirectLocation = "http://"+fqdn+":" + - chosenNode.getInfoPort() + - "/browseBlock.jsp?blockId=" + - firstBlock.getBlock().getBlockId() + - "&blockSize=" + firstBlock.getBlock().getNumBytes() + - "&genstamp=" + firstBlock.getBlock().getGenerationStamp() + - "&filename=" + URLEncoder.encode(dir, "UTF-8") + - "&datanodePort=" + datanodePort + - "&namenodeInfoPort=" + namenodeInfoPort; - resp.sendRedirect(redirectLocation); - } - return; - } - // directory - FileStatus[] files = dfs.listPaths(target); - //generate a table and dump the info - String [] headings = { "Name", "Type", "Size", "Replication", - "Block Size", "Modification Time", - "Permission", "Owner", "Group" }; - out.print("

Contents of directory "); - JspHelper.printPathWithLinks(dir, out, namenodeInfoPort); - out.print("


"); - JspHelper.printGotoForm(out, namenodeInfoPort, dir); - out.print("
"); - - File f = new File(dir); - String parent; - if ((parent = f.getParent()) != null) - out.print("Go to parent directory
"); - - if (files == null || files.length == 0) { - out.print("Empty directory"); - } - else { - JspHelper.addTableHeader(out); - int row=0; - JspHelper.addTableRow(out, headings, row++); - String cols [] = new String[headings.length]; - for (int i = 0; i < files.length; i++) { - //Get the location of the first block of the file - if (files[i].getPath().toString().endsWith(".crc")) continue; - if (!files[i].isDir()) { - cols[1] = "file"; - cols[2] = StringUtils.byteDesc(files[i].getLen()); - cols[3] = Short.toString(files[i].getReplication()); - cols[4] = StringUtils.byteDesc(files[i].getBlockSize()); - } - else { - cols[1] = "dir"; - cols[2] = ""; - cols[3] = ""; - cols[4] = ""; - } - String datanodeUrl = req.getRequestURL()+"?dir="+ - URLEncoder.encode(files[i].getPath().toString(), "UTF-8") + - "&namenodeInfoPort=" + namenodeInfoPort; - cols[0] = ""+files[i].getPath().getName()+""; - cols[5] = FsShell.dateForm.format(new Date((files[i].getModificationTime()))); - cols[6] = files[i].getPermission().toString(); - cols[7] = files[i].getOwner(); - cols[8] = files[i].getGroup(); - JspHelper.addTableRow(out, cols, row++); - } - JspHelper.addTableFooter(out); - } - } - String namenodeHost = datanode.getNameNodeAddr().getHostName(); - out.print("
Go back to DFS home"); - dfs.close(); - } - -%> @@ -171,7 +48,7 @@ <% try { - generateDirectoryStructure(out,request,response); + DatanodeJspHelper.generateDirectoryStructure(out,request,response); } catch(IOException ioe) { String msg = ioe.getLocalizedMessage(); Modified: hadoop/core/trunk/src/webapps/datanode/tail.jsp URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/webapps/datanode/tail.jsp?rev=776490&r1=776489&r2=776490&view=diff ============================================================================== --- hadoop/core/trunk/src/webapps/datanode/tail.jsp (original) +++ hadoop/core/trunk/src/webapps/datanode/tail.jsp Tue May 19 22:37:21 2009 @@ -30,100 +30,15 @@ import="org.apache.hadoop.hdfs.protocol.*" import="org.apache.hadoop.security.AccessToken" import="org.apache.hadoop.util.*" - import="org.apache.hadoop.net.NetUtils" + import="org.apache.hadoop.hdfs.server.common.JspHelper" %> - -<%! - static final DataNode datanode = DataNode.getDataNode(); - - public void generateFileChunks(JspWriter out, HttpServletRequest req) - throws IOException { - final String referrer = JspHelper.validateURL(req.getParameter("referrer")); - boolean noLink = false; - if (referrer == null) { - noLink = true; - } - - final String filename = JspHelper.validatePath( - req.getParameter("filename")); - if (filename == null) { - out.print("Invalid input (file name absent)"); - return; - } - - String namenodeInfoPortStr = req.getParameter("namenodeInfoPort"); - int namenodeInfoPort = -1; - if (namenodeInfoPortStr != null) - namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr); - - final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req.getParameter("chunkSizeToView")); - - if (!noLink) { - out.print("

Tail of File: "); - JspHelper.printPathWithLinks(filename, out, namenodeInfoPort); - out.print("


"); - out.print("Go Back to File View
"); - } - else { - out.print("

" + filename + "

"); - } - out.print("Chunk size to view (in bytes, up to file's DFS block size): "); - out.print(""); - out.print("  
"); - out.print(""); - out.print(""); - if (!noLink) - out.print(""); - - //fetch the block from the datanode that has the last block for this file - final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(), JspHelper.conf); - List blocks = - dfs.namenode.getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); - if (blocks == null || blocks.size() == 0) { - out.print("No datanodes contain blocks of file "+filename); - dfs.close(); - return; - } - LocatedBlock lastBlk = blocks.get(blocks.size() - 1); - long blockSize = lastBlk.getBlock().getNumBytes(); - long blockId = lastBlk.getBlock().getBlockId(); - AccessToken accessToken = lastBlk.getAccessToken(); - long genStamp = lastBlk.getBlock().getGenerationStamp(); - DatanodeInfo chosenNode; - try { - chosenNode = JspHelper.bestNode(lastBlk); - } catch (IOException e) { - out.print(e.toString()); - dfs.close(); - return; - } - InetSocketAddress addr = NetUtils.createSocketAddr(chosenNode.getName()); - //view the last chunkSizeToView bytes while Tailing - final long startOffset = blockSize >= chunkSizeToView? blockSize - chunkSizeToView: 0; - - out.print(""); - dfs.close(); - } - -%> - - - <%JspHelper.createTitle(out, request, request.getParameter("filename")); %>
-<% - generateFileChunks(out,request); -%> +<% DatanodeJspHelper.generateFileChunksForTail(out,request); %>