Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id B1DC1108C0 for ; Tue, 8 Oct 2013 02:02:03 +0000 (UTC) Received: (qmail 97390 invoked by uid 500); 8 Oct 2013 02:02:03 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 97291 invoked by uid 500); 8 Oct 2013 02:02:03 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 97283 invoked by uid 99); 8 Oct 2013 02:02:03 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 08 Oct 2013 02:02:03 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 08 Oct 2013 02:01:58 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 68731238896F; Tue, 8 Oct 2013 02:01:35 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1530126 - in /hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/common/ src/main/java/org/apache/hadoop/hdfs/server/datanode/ src/main/java/org/apache/hadoop/hdfs/server/namen... Date: Tue, 08 Oct 2013 02:01:35 -0000 To: hdfs-commits@hadoop.apache.org From: acmurthy@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20131008020135.68731238896F@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: acmurthy Date: Tue Oct 8 02:01:34 2013 New Revision: 1530126 URL: http://svn.apache.org/r1530126 Log: HDFS-5307. Merging change r1530033 from branch-2 Modified: hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDatanodeJsp.java Modified: hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1530126&r1=1530125&r2=1530126&view=diff ============================================================================== --- hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Oct 8 02:01:34 2013 @@ -1,3 +1,4 @@ + Hadoop HDFS Change Log Release 2.2.0 - 2013-10-13 @@ -64,6 +65,9 @@ Release 2.2.0 - 2013-10-13 HDFS-5259. Support client which combines appended data with old data before sends it to NFS server. (brandonli) + HDFS-5307. Support both HTTP and HTTPS in jsp pages (Haohui Mai via + branconli) + HDFS-5291. Standby namenode after transition to active goes into safemode. (jing9) Modified: hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=1530126&r1=1530125&r2=1530126&view=diff ============================================================================== --- hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original) +++ hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Tue Oct 8 02:01:34 2013 @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.common; import com.google.common.base.Charsets; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -55,6 +56,7 @@ import org.apache.hadoop.util.VersionInf import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.jsp.JspWriter; + import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; @@ -101,7 +103,7 @@ public class JspHelper { return super.hashCode(); } } - + // compare two records based on their frequency private static class NodeRecordComparator implements Comparator { @@ -115,6 +117,27 @@ public class JspHelper { return 0; } } + + /** + * A helper class that generates the correct URL for different schema. + * + */ + public static final class Url { + public static String authority(String scheme, DatanodeID d) { + if (scheme.equals("http")) { + return d.getInfoAddr(); + } else if (scheme.equals("https")) { + return d.getInfoSecureAddr(); + } else { + throw new IllegalArgumentException("Unknown scheme:" + scheme); + } + } + + public static String url(String scheme, DatanodeID d) { + return scheme + "://" + authority(scheme, d); + } + } + public static DatanodeInfo bestNode(LocatedBlocks blks, Configuration conf) throws IOException { HashMap map = Modified: hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java?rev=1530126&r1=1530125&r2=1530126&view=diff ============================================================================== --- hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java (original) +++ hadoop/common/branches/branch-2.2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java Tue Oct 8 02:01:34 2013 @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; +import java.io.FileNotFoundException; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URL; @@ -34,6 +35,7 @@ import javax.servlet.jsp.JspWriter; import org.apache.commons.lang.StringEscapeUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; @@ -44,15 +46,21 @@ import org.apache.hadoop.hdfs.security.t import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.http.HtmlQuoting; -import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ServletUtil; import org.apache.hadoop.util.StringUtils; +import com.google.common.base.Predicate; +import com.google.common.collect.Iterables; + @InterfaceAudience.Private public class DatanodeJspHelper { + private static final int PREV_BLOCK = -1; + private static final int NEXT_BLOCK = 1; + private static DFSClient getDFSClient(final UserGroupInformation user, final String addr, final Configuration conf @@ -135,10 +143,10 @@ public class DatanodeJspHelper { out.print("Empty file"); } else { DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock, conf); - String fqdn = canonicalize(chosenNode.getIpAddr()); int datanodePort = chosenNode.getXferPort(); - String redirectLocation = HttpConfig.getSchemePrefix() + fqdn + ":" - + chosenNode.getInfoPort() + "/browseBlock.jsp?blockId=" + String redirectLocation = JspHelper.Url.url(req.getScheme(), + chosenNode) + + "/browseBlock.jsp?blockId=" + firstBlock.getBlock().getBlockId() + "&blockSize=" + firstBlock.getBlock().getNumBytes() + "&genstamp=" + firstBlock.getBlock().getGenerationStamp() + "&filename=" @@ -310,8 +318,8 @@ public class DatanodeJspHelper { dfs.close(); return; } - String fqdn = canonicalize(chosenNode.getIpAddr()); - String tailUrl = "///" + fqdn + ":" + chosenNode.getInfoPort() + + String tailUrl = "///" + JspHelper.Url.authority(req.getScheme(), chosenNode) + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort + "&chunkSizeToView=" + chunkSizeToView @@ -359,8 +367,7 @@ public class DatanodeJspHelper { for (int j = 0; j < locs.length; j++) { String datanodeAddr = locs[j].getXferAddr(); datanodePort = locs[j].getXferPort(); - fqdn = canonicalize(locs[j].getIpAddr()); - String blockUrl = "///" + fqdn + ":" + locs[j].getInfoPort() + String blockUrl = "///" + JspHelper.Url.authority(req.getScheme(), locs[j]) + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize + "&filename=" + URLEncoder.encode(filename, "UTF-8") @@ -490,112 +497,23 @@ public class DatanodeJspHelper { out.print("Advanced view/download options
"); out.print("
"); - // Determine the prev & next blocks - long nextStartOffset = 0; - long nextBlockSize = 0; - String nextBlockIdStr = null; - String nextGenStamp = null; - String nextHost = req.getServerName(); - int nextPort = req.getServerPort(); - int nextDatanodePort = datanodePort; - // determine data for the next link - if (startOffset + chunkSizeToView >= blockSize) { - // we have to go to the next block from this point onwards - List blocks = dfs.getNamenode().getBlockLocations(filename, 0, - Long.MAX_VALUE).getLocatedBlocks(); - for (int i = 0; i < blocks.size(); i++) { - if (blocks.get(i).getBlock().getBlockId() == blockId) { - if (i != blocks.size() - 1) { - LocatedBlock nextBlock = blocks.get(i + 1); - nextBlockIdStr = Long.toString(nextBlock.getBlock().getBlockId()); - nextGenStamp = Long.toString(nextBlock.getBlock() - .getGenerationStamp()); - nextStartOffset = 0; - nextBlockSize = nextBlock.getBlock().getNumBytes(); - DatanodeInfo d = JspHelper.bestNode(nextBlock, conf); - nextDatanodePort = d.getXferPort(); - nextHost = d.getIpAddr(); - nextPort = d.getInfoPort(); - } - } - } - } else { - // we are in the same block - nextBlockIdStr = blockId.toString(); - nextStartOffset = startOffset + chunkSizeToView; - nextBlockSize = blockSize; - nextGenStamp = genStamp.toString(); - } - String nextUrl = null; - if (nextBlockIdStr != null) { - nextUrl = "///" + canonicalize(nextHost) + ":" + nextPort - + "/browseBlock.jsp?blockId=" + nextBlockIdStr - + "&blockSize=" + nextBlockSize - + "&startOffset=" + nextStartOffset - + "&genstamp=" + nextGenStamp - + "&filename=" + URLEncoder.encode(filename, "UTF-8") - + "&chunkSizeToView=" + chunkSizeToView - + "&datanodePort=" + nextDatanodePort - + "&namenodeInfoPort=" + namenodeInfoPort - + JspHelper.getDelegationTokenUrlParam(tokenString) - + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); + String authority = req.getServerName() + ":" + req.getServerPort(); + String nextUrl = generateLinksForAdjacentBlock(NEXT_BLOCK, authority, + datanodePort, startOffset, chunkSizeToView, blockSize, blockId, + genStamp, dfs, filename, conf, req.getScheme(), tokenString, + namenodeInfoPort, nnAddr); + if (nextUrl != null) { out.print("View Next chunk  "); } - // determine data for the prev link - String prevBlockIdStr = null; - String prevGenStamp = null; - long prevStartOffset = 0; - long prevBlockSize = 0; - String prevHost = req.getServerName(); - int prevPort = req.getServerPort(); - int prevDatanodePort = datanodePort; - if (startOffset == 0) { - List blocks = dfs.getNamenode().getBlockLocations(filename, 0, - Long.MAX_VALUE).getLocatedBlocks(); - for (int i = 0; i < blocks.size(); i++) { - if (blocks.get(i).getBlock().getBlockId() == blockId) { - if (i != 0) { - LocatedBlock prevBlock = blocks.get(i - 1); - prevBlockIdStr = Long.toString(prevBlock.getBlock().getBlockId()); - prevGenStamp = Long.toString(prevBlock.getBlock() - .getGenerationStamp()); - prevStartOffset = prevBlock.getBlock().getNumBytes() - - chunkSizeToView; - if (prevStartOffset < 0) - prevStartOffset = 0; - prevBlockSize = prevBlock.getBlock().getNumBytes(); - DatanodeInfo d = JspHelper.bestNode(prevBlock, conf); - prevDatanodePort = d.getXferPort(); - prevHost = d.getIpAddr(); - prevPort = d.getInfoPort(); - } - } - } - } else { - // we are in the same block - prevBlockIdStr = blockId.toString(); - prevStartOffset = startOffset - chunkSizeToView; - if (prevStartOffset < 0) - prevStartOffset = 0; - prevBlockSize = blockSize; - prevGenStamp = genStamp.toString(); - } - String prevUrl = null; - if (prevBlockIdStr != null) { - prevUrl = "///" + canonicalize(prevHost) + ":" + prevPort - + "/browseBlock.jsp?blockId=" + prevBlockIdStr - + "&blockSize=" + prevBlockSize - + "&startOffset=" + prevStartOffset - + "&filename=" + URLEncoder.encode(filename, "UTF-8") - + "&chunkSizeToView=" + chunkSizeToView - + "&genstamp=" + prevGenStamp - + "&datanodePort=" + prevDatanodePort - + "&namenodeInfoPort=" + namenodeInfoPort - + JspHelper.getDelegationTokenUrlParam(tokenString) - + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); + String prevUrl = generateLinksForAdjacentBlock(PREV_BLOCK, authority, + datanodePort, startOffset, chunkSizeToView, blockSize, blockId, + genStamp, dfs, filename, conf, req.getScheme(), tokenString, + namenodeInfoPort, nnAddr); + if (prevUrl != null) { out.print("View Prev chunk  "); } + out.print("
"); out.print("