Return-Path: X-Original-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-hdfs-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 94DF59BFF for ; Wed, 8 Feb 2012 03:56:13 +0000 (UTC) Received: (qmail 7388 invoked by uid 500); 8 Feb 2012 03:56:11 -0000 Delivered-To: apmail-hadoop-hdfs-commits-archive@hadoop.apache.org Received: (qmail 7239 invoked by uid 500); 8 Feb 2012 03:56:04 -0000 Mailing-List: contact hdfs-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hdfs-dev@hadoop.apache.org Delivered-To: mailing list hdfs-commits@hadoop.apache.org Received: (qmail 7227 invoked by uid 99); 8 Feb 2012 03:55:59 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 08 Feb 2012 03:55:59 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 08 Feb 2012 03:55:56 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id CFD3A23888E7; Wed, 8 Feb 2012 03:55:36 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1241766 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/server/common/ src/main/java/org/apache/hadoop/hdfs/server/datanode/ src/main/java/org/apache/... Date: Wed, 08 Feb 2012 03:55:36 -0000 To: hdfs-commits@hadoop.apache.org From: jitendra@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20120208035536.CFD3A23888E7@eris.apache.org> Author: jitendra Date: Wed Feb 8 03:55:36 2012 New Revision: 1241766 URL: http://svn.apache.org/viewvc?rev=1241766&view=rev Log: HDFS-2786. Fix host-based token incompatibilities in DFSUtil. Contributed by Kihwal Lee. Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1241766&r1=1241765&r2=1241766&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Feb 8 03:55:36 2012 @@ -123,6 +123,9 @@ Trunk (unreleased changes) HDS-2895. Remove Writable wire protocol types and translators to complete transition to protocol buffers. (suresh) + HDFS-2786. Fix host-based token incompatibilities in DFSUtil. (Kihwal Lee + via jitendra) + OPTIMIZATIONS HDFS-2477. Optimize computing the diff between a block report and the namenode state. (Tomasz Nykiel via hairong) Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1241766&r1=1241765&r2=1241766&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java Wed Feb 8 03:55:36 2012 @@ -600,19 +600,6 @@ public class DFSUtil { } /** - * @param address address of format host:port - * @return InetSocketAddress for the address - */ - public static InetSocketAddress getSocketAddress(String address) { - int colon = address.indexOf(":"); - if (colon < 0) { - return new InetSocketAddress(address, 0); - } - return new InetSocketAddress(address.substring(0, colon), - Integer.parseInt(address.substring(colon + 1))); - } - - /** * Round bytes to GiB (gibibyte) * @param bytes number of bytes * @return number of GiB @@ -767,4 +754,4 @@ public class DFSUtil { RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine.class); server.addProtocol(RpcKind.RPC_PROTOCOL_BUFFER, protocol, service); } -} \ No newline at end of file +} Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=1241766&r1=1241765&r2=1241766&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Wed Feb 8 03:55:36 2012 @@ -498,7 +498,7 @@ public class JspHelper { String namenodeAddressInUrl = request.getParameter(NAMENODE_ADDRESS); InetSocketAddress namenodeAddress = null; if (namenodeAddressInUrl != null) { - namenodeAddress = DFSUtil.getSocketAddress(namenodeAddressInUrl); + namenodeAddress = NetUtils.createSocketAddr(namenodeAddressInUrl); } else if (context != null) { namenodeAddress = NameNodeHttpServer.getNameNodeAddressFromContext( context); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java?rev=1241766&r1=1241765&r2=1241766&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java Wed Feb 8 03:55:36 2012 @@ -53,18 +53,30 @@ import org.apache.hadoop.util.StringUtil @InterfaceAudience.Private public class DatanodeJspHelper { private static DFSClient getDFSClient(final UserGroupInformation user, - final InetSocketAddress addr, + final String addr, final Configuration conf ) throws IOException, InterruptedException { return user.doAs(new PrivilegedExceptionAction() { public DFSClient run() throws IOException { - return new DFSClient(addr, conf); + return new DFSClient(NetUtils.createSocketAddr(addr), conf); } }); } + /** + * Internal convenience method for canonicalizing host name. + * @param addr name:port or name + * @return canonicalized host name + */ + private static String canonicalize(String addr) { + // default port 1 is supplied to allow addr without port. + // the port will be ignored. + return NetUtils.createSocketAddr(addr, 1).getAddress() + .getCanonicalHostName(); + } + private static final SimpleDateFormat lsDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm"); @@ -102,8 +114,7 @@ public class DatanodeJspHelper { return; } - InetSocketAddress namenodeAddress = DFSUtil.getSocketAddress(nnAddr); - DFSClient dfs = getDFSClient(ugi, namenodeAddress, conf); + DFSClient dfs = getDFSClient(ugi, nnAddr, conf); String target = dir; final HdfsFileStatus targetStatus = dfs.getFileInfo(target); if (targetStatus == null) { // not exists @@ -125,8 +136,7 @@ public class DatanodeJspHelper { out.print("Empty file"); } else { DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock, conf); - String fqdn = InetAddress.getByName(chosenNode.getHost()) - .getCanonicalHostName(); + String fqdn = canonicalize(chosenNode.getHost()); String datanodeAddr = chosenNode.getName(); int datanodePort = Integer.parseInt(datanodeAddr.substring( datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); @@ -210,9 +220,8 @@ public class DatanodeJspHelper { JspHelper.addTableFooter(out); } } - String namenodeHost = namenodeAddress.getHostName(); out.print("
Go back to DFS home"); dfs.close(); } @@ -282,8 +291,7 @@ public class DatanodeJspHelper { } long blockSize = Long.parseLong(blockSizeStr); - final InetSocketAddress namenodeAddress = DFSUtil.getSocketAddress(nnAddr); - final DFSClient dfs = getDFSClient(ugi, namenodeAddress, conf); + final DFSClient dfs = getDFSClient(ugi, nnAddr, conf); List blocks = dfs.getNamenode().getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); // Add the various links for looking at the file contents @@ -305,8 +313,7 @@ public class DatanodeJspHelper { dfs.close(); return; } - String fqdn = InetAddress.getByName(chosenNode.getHost()) - .getCanonicalHostName(); + String fqdn = canonicalize(chosenNode.getHost()); String tailUrl = "http://" + fqdn + ":" + chosenNode.getInfoPort() + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort @@ -345,9 +352,7 @@ public class DatanodeJspHelper { // generate a table and dump the info out.println("\n"); - String namenodeHost = namenodeAddress.getHostName(); - String namenodeHostName = InetAddress.getByName(namenodeHost).getCanonicalHostName(); - + String nnCanonicalName = canonicalize(nnAddr); for (LocatedBlock cur : blocks) { out.print(""); final String blockidstring = Long.toString(cur.getBlock().getBlockId()); @@ -358,7 +363,7 @@ public class DatanodeJspHelper { String datanodeAddr = locs[j].getName(); datanodePort = Integer.parseInt(datanodeAddr.substring(datanodeAddr .indexOf(':') + 1, datanodeAddr.length())); - fqdn = InetAddress.getByName(locs[j].getHost()).getCanonicalHostName(); + fqdn = canonicalize(locs[j].getHost()); String blockUrl = "http://" + fqdn + ":" + locs[j].getInfoPort() + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize @@ -370,7 +375,7 @@ public class DatanodeJspHelper { + JspHelper.getDelegationTokenUrlParam(tokenString) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); - String blockInfoUrl = "http://" + namenodeHostName + ":" + String blockInfoUrl = "http://" + nnCanonicalName + ":" + namenodeInfoPort + "/block_info_xml.jsp?blockId=" + blockidstring; out.print("
 " @@ -382,7 +387,7 @@ public class DatanodeJspHelper { out.println("
"); out.print("
"); out.print("
Go back to DFS home"); dfs.close(); } @@ -419,8 +424,7 @@ public class DatanodeJspHelper { return; } - final DFSClient dfs = getDFSClient(ugi, - DFSUtil.getSocketAddress(nnAddr), conf); + final DFSClient dfs = getDFSClient(ugi, nnAddr, conf); String bpid = null; Token blockToken = BlockTokenSecretManager.DUMMY_TOKEN; @@ -518,8 +522,7 @@ public class DatanodeJspHelper { String datanodeAddr = d.getName(); nextDatanodePort = Integer.parseInt(datanodeAddr.substring( datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); - nextHost = InetAddress.getByName(d.getHost()) - .getCanonicalHostName(); + nextHost = d.getHost(); nextPort = d.getInfoPort(); } } @@ -533,7 +536,7 @@ public class DatanodeJspHelper { } String nextUrl = null; if (nextBlockIdStr != null) { - nextUrl = "http://" + nextHost + ":" + nextPort + nextUrl = "http://" + canonicalize(nextHost) + ":" + nextPort + "/browseBlock.jsp?blockId=" + nextBlockIdStr + "&blockSize=" + nextBlockSize + "&startOffset=" + nextStartOffset @@ -573,8 +576,7 @@ public class DatanodeJspHelper { String datanodeAddr = d.getName(); prevDatanodePort = Integer.parseInt(datanodeAddr.substring( datanodeAddr.indexOf(':') + 1, datanodeAddr.length())); - prevHost = InetAddress.getByName(d.getHost()) - .getCanonicalHostName(); + prevHost = d.getHost(); prevPort = d.getInfoPort(); } } @@ -591,7 +593,7 @@ public class DatanodeJspHelper { String prevUrl = null; if (prevBlockIdStr != null) { - prevUrl = "http://" + prevHost + ":" + prevPort + prevUrl = "http://" + canonicalize(prevHost) + ":" + prevPort + "/browseBlock.jsp?blockId=" + prevBlockIdStr + "&blockSize=" + prevBlockSize + "&startOffset=" + prevStartOffset @@ -669,8 +671,7 @@ public class DatanodeJspHelper { + "\">"); // fetch the block from the datanode that has the last block for this file - final DFSClient dfs = getDFSClient(ugi, DFSUtil.getSocketAddress(nnAddr), - conf); + final DFSClient dfs = getDFSClient(ugi, nnAddr, conf); List blocks = dfs.getNamenode().getBlockLocations(filename, 0, Long.MAX_VALUE).getLocatedBlocks(); if (blocks == null || blocks.size() == 0) { @@ -710,6 +711,6 @@ public class DatanodeJspHelper { final DataNode datanode, final Configuration conf, final UserGroupInformation ugi) throws IOException, InterruptedException { final String nnAddr = request.getParameter(JspHelper.NAMENODE_ADDRESS); - return getDFSClient(ugi, DFSUtil.getSocketAddress(nnAddr), conf); + return getDFSClient(ugi, nnAddr, conf); } } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=1241766&r1=1241765&r2=1241766&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java Wed Feb 8 03:55:36 2012 @@ -1127,7 +1127,7 @@ public class DFSAdmin extends FsShell { private ClientDatanodeProtocol getDataNodeProxy(String datanode) throws IOException { - InetSocketAddress datanodeAddr = DFSUtil.getSocketAddress(datanode); + InetSocketAddress datanodeAddr = NetUtils.createSocketAddr(datanode); // Get the current configuration Configuration conf = getConf();