hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r916873 - in /hadoop/hdfs/trunk: ./ src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ src/java/org/apache/hadoop/hdfs/server/common/ src/java/org/apache/hadoop/hdfs/server/datanode/ src/java/org/apache/hadoop/hdfs/server/namenode/...
Date Fri, 26 Feb 2010 23:58:10 GMT
Author: omalley
Date: Fri Feb 26 23:58:09 2010
New Revision: 916873

URL: http://svn.apache.org/viewvc?rev=916873&view=rev
Log:
HDFS-991. Allow authentication to the web ui via a delegation token. 
(omalley)

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
    hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
    hadoop/hdfs/trunk/src/test/findbugsExcludeFile.xml
    hadoop/hdfs/trunk/src/webapps/datanode/browseBlock.jsp
    hadoop/hdfs/trunk/src/webapps/datanode/browseDirectory.jsp
    hadoop/hdfs/trunk/src/webapps/datanode/tail.jsp
    hadoop/hdfs/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Fri Feb 26 23:58:09 2010
@@ -77,6 +77,9 @@
 
     HADOOP-6579. Upgrade the commons-codec library to 1.4. (omalley)
 
+    HDFS-991. Allow authentication to the web ui via a delegation token. 
+    (omalley)
+
   OPTIMIZATIONS
 
     HDFS-946. NameNode should not return full path name when lisitng a

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyFileDataServlet.java Fri Feb 26 23:58:09 2010
@@ -25,6 +25,7 @@
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
@@ -58,7 +59,8 @@
 
   /** {@inheritDoc} */
   @Override
-  protected UserGroupInformation getUGI(HttpServletRequest request) {
+  protected UserGroupInformation getUGI(HttpServletRequest request,
+                                        Configuration conf) {
     String userID = (String) request
         .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
     return UserGroupInformation.createRemoteUser(userID);

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyListPathsServlet.java Fri Feb 26 23:58:09 2010
@@ -42,7 +42,8 @@
 
   /** {@inheritDoc} */
   @Override
-  protected UserGroupInformation getUGI(HttpServletRequest request) {
+  protected UserGroupInformation getUGI(HttpServletRequest request,
+                                        Configuration conf) {
     String userID = (String) request
         .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
     return UserGroupInformation.createRemoteUser(userID);

Modified: hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java (original)
+++ hadoop/hdfs/trunk/src/contrib/hdfsproxy/src/java/org/apache/hadoop/hdfsproxy/ProxyStreamFile.java Fri Feb 26 23:58:09 2010
@@ -50,13 +50,13 @@
   protected DFSClient getDFSClient(HttpServletRequest request)
       throws IOException, InterruptedException {
     ServletContext context = getServletContext();
-    final Configuration conf = new HdfsConfiguration((Configuration) context
-        .getAttribute("name.conf"));
+    final Configuration conf = 
+      (Configuration) context.getAttribute("name.conf");
     final InetSocketAddress nameNodeAddr = (InetSocketAddress) context
         .getAttribute("name.node.address");
     
-    DFSClient client = 
-              getUGI(request).doAs(new PrivilegedExceptionAction<DFSClient>() {
+    DFSClient client = getUGI(request, conf).doAs
+      ( new PrivilegedExceptionAction<DFSClient>() {
       @Override
       public DFSClient run() throws IOException {
         return new DFSClient(nameNodeAddr, conf);
@@ -68,7 +68,8 @@
 
   /** {@inheritDoc} */
   @Override
-  protected UserGroupInformation getUGI(HttpServletRequest request) {
+  protected UserGroupInformation getUGI(HttpServletRequest request,
+                                        Configuration conf) {
     String userID = (String) request
         .getAttribute("org.apache.hadoop.hdfsproxy.authorized.userID");
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Fri Feb 26 23:58:09 2010
@@ -33,24 +33,33 @@
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.jsp.JspWriter;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.BlockReader;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.security.BlockAccessToken;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
 import org.apache.hadoop.hdfs.server.namenode.DatanodeDescriptor;
+import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.VersionInfo;
 
+@InterfaceAudience.Private
 public class JspHelper {
   final static public String WEB_UGI_PROPERTY_NAME = "dfs.web.ugi";
+  public static final String DELEGATION_PARAMETER_NAME = "delegation";
+  public static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME +
+                                              "=";
+  private static final Log LOG = LogFactory.getLog(JspHelper.class);
 
-  public static final Configuration conf = new HdfsConfiguration();
-  
-  private static final int defaultChunkSizeToView = 
-    conf.getInt("dfs.default.chunk.view.size", 32 * 1024);
   static final Random rand = new Random();
 
   /** Private constructor for preventing creating JspHelper object. */
@@ -97,8 +106,11 @@
   }
 
   public static void streamBlockInAscii(InetSocketAddress addr, long blockId, 
-                                 BlockAccessToken accessToken, long genStamp, long blockSize, 
-                                 long offsetIntoBlock, long chunkSizeToView, JspWriter out) 
+                                 BlockAccessToken accessToken, long genStamp, 
+                                 long blockSize, 
+                                 long offsetIntoBlock, long chunkSizeToView, 
+                                 JspWriter out,
+                                 Configuration conf) 
     throws IOException {
     if (chunkSizeToView == 0) return;
     Socket s = new Socket();
@@ -261,7 +273,10 @@
     Collections.sort(nodes, new NodeComapare(field, order));
   }
 
-  public static void printPathWithLinks(String dir, JspWriter out, int namenodeInfoPort ) throws IOException {
+  public static void printPathWithLinks(String dir, JspWriter out, 
+                                        int namenodeInfoPort,
+                                        String tokenString
+                                        ) throws IOException {
     try {
       String[] parts = dir.split(Path.SEPARATOR);
       StringBuilder tempPath = new StringBuilder(dir.length());
@@ -273,7 +288,8 @@
         if (!parts[i].equals("")) {
           tempPath.append(parts[i]);
           out.print("<a href=\"browseDirectory.jsp" + "?dir="
-              + tempPath.toString() + "&namenodeInfoPort=" + namenodeInfoPort);
+              + tempPath.toString() + "&namenodeInfoPort=" + namenodeInfoPort
+              + SET_DELEGATION + tokenString);
           out.print("\">" + parts[i] + "</a>" + Path.SEPARATOR);
           tempPath.append(Path.SEPARATOR);
         }
@@ -287,18 +303,24 @@
     }
   }
 
-  public static void printGotoForm(JspWriter out, int namenodeInfoPort, String file) throws IOException {
+  public static void printGotoForm(JspWriter out,
+                                   int namenodeInfoPort,
+                                   String tokenString,
+                                   String file) throws IOException {
     out.print("<form action=\"browseDirectory.jsp\" method=\"get\" name=\"goto\">");
     out.print("Goto : ");
     out.print("<input name=\"dir\" type=\"text\" width=\"50\" id\"dir\" value=\""+ file+"\">");
     out.print("<input name=\"go\" type=\"submit\" value=\"go\">");
     out.print("<input name=\"namenodeInfoPort\" type=\"hidden\" "
         + "value=\"" + namenodeInfoPort  + "\">");
+    out.print("<input name=\"" + DELEGATION_PARAMETER_NAME +
+              "\" type=\"hidden\" value=\"" + tokenString + "\">");
     out.print("</form>");
   }
   
   public static void createTitle(JspWriter out, 
-      HttpServletRequest req, String  file) throws IOException{
+                                 HttpServletRequest req, 
+                                 String  file) throws IOException{
     if(file == null) file = "";
     int start = Math.max(0,file.length() - 100);
     if(start != 0)
@@ -307,9 +329,9 @@
   }
 
   /** Convert a String to chunk-size-to-view. */
-  public static int string2ChunkSizeToView(String s) {
+  public static int string2ChunkSizeToView(String s, int defaultValue) {
     int n = s == null? 0: Integer.parseInt(s);
-    return n > 0? n: defaultChunkSizeToView;
+    return n > 0? n: defaultValue;
   }
 
   /** Return a table containing version information. */
@@ -351,4 +373,62 @@
       return null;
     }
   }
+  
+  /**
+   * If security is turned off, what is the default web user?
+   * @param conf the configuration to look in
+   * @return the remote user that was configuration
+   */
+  public static UserGroupInformation getDefaultWebUser(Configuration conf
+                                                       ) throws IOException {
+    String[] strings = conf.getStrings(JspHelper.WEB_UGI_PROPERTY_NAME);
+    if (strings == null || strings.length == 0) {
+      throw new IOException("Cannot determine UGI from request or conf");
+    }
+    return UserGroupInformation.createRemoteUser(strings[0]);
+  }
+
+  /**
+   * Get {@link UserGroupInformation} and possibly the delegation token out of
+   * the request.
+   * @param request the http request
+   * @return a new user from the request
+   * @throws AccessControlException if the request has no token
+   */
+  public static UserGroupInformation getUGI(HttpServletRequest request,
+                                            Configuration conf
+                                           ) throws IOException {
+    UserGroupInformation ugi = null;
+    if(UserGroupInformation.isSecurityEnabled()) {
+      String user = request.getRemoteUser();
+      String tokenString = request.getParameter(DELEGATION_PARAMETER_NAME);
+      if (tokenString != null) {
+        Token<DelegationTokenIdentifier> token = 
+          new Token<DelegationTokenIdentifier>();
+        token.decodeFromUrlString(tokenString);
+        ugi = UserGroupInformation.createRemoteUser(user);
+        ugi.addToken(token);        
+      } else {
+        if(user == null) {
+          throw new IOException("Security enabled but user not " +
+                                "authenticated by filter");
+        }
+        ugi = UserGroupInformation.createRemoteUser(user);
+      }
+    } else { // Security's not on, pull from url
+      String user = request.getParameter("ugi");
+      
+      if(user == null) { // not specified in request
+        ugi = getDefaultWebUser(conf);
+      } else {
+        ugi = UserGroupInformation.createRemoteUser(user);
+      }
+    }
+    
+    if(LOG.isDebugEnabled())
+      LOG.debug("getUGI is returning: " + ugi.getShortUserName());
+    return ugi;
+  }
+
+
 }
\ No newline at end of file

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Fri Feb 26 23:58:09 2010
@@ -387,6 +387,7 @@
     this.infoServer.addInternalServlet(null, "/getFileChecksum/*",
         FileChecksumServlets.GetServlet.class);
     this.infoServer.setAttribute("datanode.blockScanner", blockScanner);
+    this.infoServer.setAttribute("datanode.conf", conf);
     this.infoServer.addServlet(null, "/blockScannerReport", 
                                DataBlockScanner.Servlet.class);
     this.infoServer.start();

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java Fri Feb 26 23:58:09 2010
@@ -22,6 +22,7 @@
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URLEncoder;
+import java.security.PrivilegedExceptionAction;
 import java.util.Date;
 import java.util.List;
 
@@ -29,6 +30,9 @@
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.jsp.JspWriter;
 
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
@@ -38,31 +42,59 @@
 import org.apache.hadoop.hdfs.security.BlockAccessToken;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
-class DatanodeJspHelper {
+@InterfaceAudience.Private
+public class DatanodeJspHelper {
   private static final DataNode datanode = DataNode.getDataNode();
 
-  static void generateDirectoryStructure(JspWriter out, HttpServletRequest req,
-      HttpServletResponse resp) throws IOException {
+  private static DFSClient getDFSClient(final UserGroupInformation user,
+                                        final InetSocketAddress addr,
+                                        final Configuration conf
+                                        ) throws IOException,
+                                                 InterruptedException {
+    return
+      user.doAs(new PrivilegedExceptionAction<DFSClient>() {
+        public DFSClient run() throws IOException {
+          return new DFSClient(addr, conf);
+        }
+      });
+  }
+
+  /**
+   * Get the default chunk size.
+   * @param conf the configuration
+   * @return the number of bytes to chunk in
+   */
+  private static int getDefaultChunkSize(Configuration conf) {
+    return conf.getInt("dfs.default.chunk.view.size", 32 * 1024);
+  }
+
+  static void generateDirectoryStructure(JspWriter out, 
+                                         HttpServletRequest req,
+                                         HttpServletResponse resp,
+                                         Configuration conf
+                                         ) throws IOException,
+                                                  InterruptedException {
     final String dir = JspHelper.validatePath(req.getParameter("dir"));
     if (dir == null) {
       out.print("Invalid input");
       return;
     }
-
+    String tokenString = req.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
+    UserGroupInformation ugi = JspHelper.getUGI(req, conf);
     String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
     int namenodeInfoPort = -1;
     if (namenodeInfoPortStr != null)
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
 
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
-        JspHelper.conf);
+    DFSClient dfs = getDFSClient(ugi, datanode.getNameNodeAddr(), conf);
     String target = dir;
     final HdfsFileStatus targetStatus = dfs.getFileInfo(target);
     if (targetStatus == null) { // not exists
       out.print("<h3>File or directory : " + target + " does not exist</h3>");
-      JspHelper.printGotoForm(out, namenodeInfoPort, target);
+      JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, target);
     } else {
       if (!targetStatus.isDir()) { // a file
         List<LocatedBlock> blocks = dfs.getNamenode().getBlockLocations(dir, 0, 1)
@@ -89,7 +121,8 @@
               + firstBlock.getBlock().getNumBytes() + "&genstamp="
               + firstBlock.getBlock().getGenerationStamp() + "&filename="
               + URLEncoder.encode(dir, "UTF-8") + "&datanodePort="
-              + datanodePort + "&namenodeInfoPort=" + namenodeInfoPort;
+              + datanodePort + "&namenodeInfoPort=" + namenodeInfoPort
+              + JspHelper.SET_DELEGATION + tokenString;
           resp.sendRedirect(redirectLocation);
         }
         return;
@@ -100,9 +133,9 @@
       String[] headings = { "Name", "Type", "Size", "Replication",
           "Block Size", "Modification Time", "Permission", "Owner", "Group" };
       out.print("<h3>Contents of directory ");
-      JspHelper.printPathWithLinks(dir, out, namenodeInfoPort);
+      JspHelper.printPathWithLinks(dir, out, namenodeInfoPort, tokenString);
       out.print("</h3><hr>");
-      JspHelper.printGotoForm(out, namenodeInfoPort, dir);
+      JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, dir);
       out.print("<hr>");
 
       File f = new File(dir);
@@ -110,6 +143,7 @@
       if ((parent = f.getParent()) != null)
         out.print("<a href=\"" + req.getRequestURL() + "?dir=" + parent
             + "&namenodeInfoPort=" + namenodeInfoPort
+            + JspHelper.SET_DELEGATION + tokenString
             + "\">Go to parent directory</a><br>");
 
       if (files == null || files.length == 0) {
@@ -137,7 +171,8 @@
           }
           String datanodeUrl = req.getRequestURL() + "?dir="
               + URLEncoder.encode(files[i].getFullName(target), "UTF-8")
-              + "&namenodeInfoPort=" + namenodeInfoPort;
+              + "&namenodeInfoPort=" + namenodeInfoPort
+              + JspHelper.SET_DELEGATION + tokenString;
           cols[0] = "<a href=\"" + datanodeUrl + "\">"
               + localFileName + "</a>";
           cols[5] = FsShell.dateForm.format(new Date((files[i]
@@ -157,8 +192,11 @@
     dfs.close();
   }
 
-  static void generateFileDetails(JspWriter out, HttpServletRequest req)
-      throws IOException {
+  static void generateFileDetails(JspWriter out, 
+                                  HttpServletRequest req,
+                                  Configuration conf
+                                  ) throws IOException,
+                                           InterruptedException {
 
     long startOffset = 0;
     int datanodePort;
@@ -168,6 +206,8 @@
       out.print("Invalid input (blockId absent)");
       return;
     }
+    String tokenString = req.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
+    UserGroupInformation ugi = JspHelper.getUGI(req, conf);
 
     String datanodePortStr = req.getParameter("datanodePort");
     if (datanodePortStr == null) {
@@ -182,7 +222,7 @@
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
 
     final int chunkSizeToView = JspHelper.string2ChunkSizeToView(
-        req.getParameter("chunkSizeToView"));
+        req.getParameter("chunkSizeToView"), getDefaultChunkSize(conf));
 
     String startOffsetStr = req.getParameter("startOffset");
     if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
@@ -204,15 +244,15 @@
     }
     blockSize = Long.parseLong(blockSizeStr);
 
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
-        JspHelper.conf);
+    final DFSClient dfs = getDFSClient(ugi, datanode.getNameNodeAddr(), conf);
     List<LocatedBlock> blocks = dfs.getNamenode().getBlockLocations(filename, 0,
         Long.MAX_VALUE).getLocatedBlocks();
     // Add the various links for looking at the file contents
     // URL for downloading the full file
     String downloadUrl = "http://" + req.getServerName() + ":"
         + req.getServerPort() + "/streamFile?" + "filename="
-        + URLEncoder.encode(filename, "UTF-8");
+        + URLEncoder.encode(filename, "UTF-8")
+        + JspHelper.SET_DELEGATION + tokenString;
     out.print("<a name=\"viewOptions\"></a>");
     out.print("<a href=\"" + downloadUrl + "\">Download this file</a><br>");
 
@@ -232,6 +272,7 @@
         + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8")
         + "&namenodeInfoPort=" + namenodeInfoPort
         + "&chunkSizeToView=" + chunkSizeToView
+        + JspHelper.SET_DELEGATION + tokenString
         + "&referrer=" + URLEncoder.encode(
             req.getRequestURL() + "?" + req.getQueryString(), "UTF-8");
     out.print("<a href=\"" + tailUrl + "\">Tail this file</a><br>");
@@ -281,7 +322,8 @@
             + "&datanodePort=" + datanodePort
             + "&genstamp=" + cur.getBlock().getGenerationStamp()
             + "&namenodeInfoPort=" + namenodeInfoPort
-            + "&chunkSizeToView=" + chunkSizeToView;
+            + "&chunkSizeToView=" + chunkSizeToView
+            + JspHelper.SET_DELEGATION + tokenString;
 
         String blockInfoUrl = "http://" + namenodeHostName + ":"
             + namenodeInfoPort
@@ -300,12 +342,16 @@
     dfs.close();
   }
 
-  static void generateFileChunks(JspWriter out, HttpServletRequest req)
-      throws IOException {
+  static void generateFileChunks(JspWriter out, HttpServletRequest req,
+                                 Configuration conf
+                                 ) throws IOException,
+                                          InterruptedException {
     long startOffset = 0;
     int datanodePort = 0;
 
     String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
+    String tokenString = req.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
+    UserGroupInformation ugi = JspHelper.getUGI(req, conf);
     int namenodeInfoPort = -1;
     if (namenodeInfoPortStr != null)
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
@@ -323,11 +369,10 @@
       return;
     }
 
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
-        JspHelper.conf);
+    final DFSClient dfs = getDFSClient(ugi, datanode.getNameNodeAddr(), conf);
 
     BlockAccessToken accessToken = BlockAccessToken.DUMMY_TOKEN;
-    if (JspHelper.conf.getBoolean(
+    if (conf.getBoolean(
         DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, 
         DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_DEFAULT)) {
       List<LocatedBlock> blks = dfs.getNamenode().getBlockLocations(filename, 0,
@@ -361,7 +406,7 @@
     blockSize = Long.parseLong(blockSizeStr);
 
     final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req
-        .getParameter("chunkSizeToView"));
+        .getParameter("chunkSizeToView"), getDefaultChunkSize(conf));
 
     String startOffsetStr = req.getParameter("startOffset");
     if (startOffsetStr == null || Long.parseLong(startOffsetStr) < 0)
@@ -376,15 +421,17 @@
     }
     datanodePort = Integer.parseInt(datanodePortStr);
     out.print("<h3>File: ");
-    JspHelper.printPathWithLinks(filename, out, namenodeInfoPort);
+    JspHelper.printPathWithLinks(filename, out, namenodeInfoPort,
+                                 tokenString);
     out.print("</h3><hr>");
     String parent = new File(filename).getParent();
-    JspHelper.printGotoForm(out, namenodeInfoPort, parent);
+    JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, parent);
     out.print("<hr>");
     out.print("<a href=\"http://"
         + req.getServerName() + ":" + req.getServerPort()
         + "/browseDirectory.jsp?dir=" + URLEncoder.encode(parent, "UTF-8")
         + "&namenodeInfoPort=" + namenodeInfoPort
+        + JspHelper.SET_DELEGATION + tokenString
         + "\"><i>Go back to dir listing</i></a><br>");
     out.print("<a href=\"#viewOptions\">Advanced view/download options</a><br>");
     out.print("<hr>");
@@ -438,7 +485,8 @@
           + "&filename=" + URLEncoder.encode(filename, "UTF-8")
           + "&chunkSizeToView=" + chunkSizeToView
           + "&datanodePort=" + nextDatanodePort
-          + "&namenodeInfoPort=" + namenodeInfoPort;
+          + "&namenodeInfoPort=" + namenodeInfoPort
+          + JspHelper.SET_DELEGATION + tokenString;
       out.print("<a href=\"" + nextUrl + "\">View Next chunk</a>&nbsp;&nbsp;");
     }
     // determine data for the prev link
@@ -494,7 +542,8 @@
           + "&chunkSizeToView=" + chunkSizeToView
           + "&genstamp=" + prevGenStamp
           + "&datanodePort=" + prevDatanodePort
-          + "&namenodeInfoPort=" + namenodeInfoPort;
+          + "&namenodeInfoPort=" + namenodeInfoPort
+          + JspHelper.SET_DELEGATION + tokenString;
       out.print("<a href=\"" + prevUrl + "\">View Prev chunk</a>&nbsp;&nbsp;");
     }
     out.print("<hr>");
@@ -502,7 +551,7 @@
     try {
       JspHelper.streamBlockInAscii(new InetSocketAddress(req.getServerName(),
           datanodePort), blockId, accessToken, genStamp, blockSize,
-          startOffset, chunkSizeToView, out);
+          startOffset, chunkSizeToView, out, conf);
     } catch (Exception e) {
       out.print(e);
     }
@@ -510,8 +559,10 @@
     dfs.close();
   }
 
-  static void generateFileChunksForTail(JspWriter out, HttpServletRequest req)
-      throws IOException {
+  static void generateFileChunksForTail(JspWriter out, HttpServletRequest req,
+                                        Configuration conf
+                                        ) throws IOException,
+                                                 InterruptedException {
     final String referrer = JspHelper.validateURL(req.getParameter("referrer"));
     boolean noLink = false;
     if (referrer == null) {
@@ -524,6 +575,8 @@
       out.print("Invalid input (file name absent)");
       return;
     }
+    String tokenString = req.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
+    UserGroupInformation ugi = JspHelper.getUGI(req, conf);
 
     String namenodeInfoPortStr = req.getParameter("namenodeInfoPort");
     int namenodeInfoPort = -1;
@@ -531,11 +584,12 @@
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
 
     final int chunkSizeToView = JspHelper.string2ChunkSizeToView(req
-        .getParameter("chunkSizeToView"));
+        .getParameter("chunkSizeToView"), getDefaultChunkSize(conf));
 
     if (!noLink) {
       out.print("<h3>Tail of File: ");
-      JspHelper.printPathWithLinks(filename, out, namenodeInfoPort);
+      JspHelper.printPathWithLinks(filename, out, namenodeInfoPort, 
+                                   tokenString);
       out.print("</h3><hr>");
       out.print("<a href=\"" + referrer + "\">Go Back to File View</a><hr>");
     } else {
@@ -554,8 +608,7 @@
           + "\">");
 
     // fetch the block from the datanode that has the last block for this file
-    final DFSClient dfs = new DFSClient(datanode.getNameNodeAddr(),
-        JspHelper.conf);
+    final DFSClient dfs = getDFSClient(ugi, datanode.getNameNodeAddr(), conf);
     List<LocatedBlock> blocks = dfs.getNamenode().getBlockLocations(filename, 0,
         Long.MAX_VALUE).getLocatedBlocks();
     if (blocks == null || blocks.size() == 0) {
@@ -583,7 +636,7 @@
 
     out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
     JspHelper.streamBlockInAscii(addr, blockId, accessToken, genStamp,
-        blockSize, startOffset, chunkSizeToView, out);
+        blockSize, startOffset, chunkSizeToView, out, conf);
     out.print("</textarea>");
     dfs.close();
   }

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java Fri Feb 26 23:58:09 2010
@@ -25,8 +25,10 @@
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.znerd.xmlenc.XMLOutputter;
@@ -39,7 +41,9 @@
   /** {@inheritDoc} */
   public void doGet(final HttpServletRequest request,
       final HttpServletResponse response) throws ServletException, IOException {
-    final UserGroupInformation ugi = getUGI(request);
+    final Configuration conf = 
+      (Configuration) getServletContext().getAttribute("name.conf");
+    final UserGroupInformation ugi = getUGI(request, conf);
     try {
       ugi.doAs(new PrivilegedExceptionAction<Object>() {
         @Override

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/DfsServlet.java Fri Feb 26 23:58:09 2010
@@ -47,35 +47,6 @@
 
   static final Log LOG = LogFactory.getLog(DfsServlet.class.getCanonicalName());
 
-  /** Get {@link UserGroupInformation} from request 
-   * @throws IOException */
-  protected UserGroupInformation getUGI(HttpServletRequest request) 
-        throws IOException {
-    UserGroupInformation u = null;
-    if(UserGroupInformation.isSecurityEnabled()) {
-      String user = request.getRemoteUser();
-      if(user != null)
-        throw new IOException("Security enabled but user not " +
-        		"authenticated by filter");
-      
-      u = UserGroupInformation.createRemoteUser(user);
-    } else { // Security's not on, pull from url
-      String ugi = request.getParameter("ugi");
-      
-      if(ugi == null) // not specified in request
-        ugi = new Configuration().get(JspHelper.WEB_UGI_PROPERTY_NAME);
-      
-      if(ugi == null) // not specified in conf either
-        throw new IOException("Cannot determine UGI from request or conf");
-      
-      u = UserGroupInformation.createRemoteUser(ugi);
-    }
-    
-    if(LOG.isDebugEnabled())
-      LOG.debug("getUGI is returning: " + u.getShortUserName());
-    return u;
-  }
-
   /**
    * Create a {@link NameNode} proxy from the current {@link ServletContext}. 
    */
@@ -88,8 +59,12 @@
   }
 
   /** Create a URI for redirecting request */
-  protected URI createRedirectUri(String servletpath, UserGroupInformation ugi,
-      DatanodeID host, HttpServletRequest request) throws URISyntaxException {
+  protected URI createRedirectUri(String servletpath, 
+                                  UserGroupInformation ugi,
+                                  DatanodeID host, 
+                                  HttpServletRequest request,
+                                  NameNode nn
+                                  ) throws IOException, URISyntaxException {
     final String hostname = host instanceof DatanodeInfo?
         ((DatanodeInfo)host).getHostName(): host.getHost();
     final String scheme = request.getScheme();
@@ -97,8 +72,18 @@
         (Integer)getServletContext().getAttribute("datanode.https.port")
         : host.getInfoPort();
     final String filename = request.getPathInfo();
+    StringBuilder params = new StringBuilder();
+    params.append("filename=");
+    params.append(filename);
+    if (UserGroupInformation.isSecurityEnabled()) {
+      params.append(JspHelper.SET_DELEGATION);
+      params.append(ugi.getTokens().iterator().next().encodeToUrlString());
+    } else {
+      params.append("&ugi=");
+      params.append(ugi.getShortUserName());
+    }
     return new URI(scheme, null, hostname, port, servletpath,
-        "filename=" + filename + "&ugi=" + ugi.getShortUserName(), null);
+                   params.toString(), null);
   }
 
   /** Get filename from the request */
@@ -110,4 +95,9 @@
     }
     return filename;
   }
+  
+  protected UserGroupInformation getUGI(HttpServletRequest request,
+                                        Configuration conf) throws IOException {
+    return JspHelper.getUGI(request, conf);
+  }
 }

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java Fri Feb 26 23:58:09 2010
@@ -35,6 +35,7 @@
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -53,12 +54,15 @@
     /** {@inheritDoc} */
     public void doGet(HttpServletRequest request, HttpServletResponse response
         ) throws ServletException, IOException {
-      final UserGroupInformation ugi = getUGI(request);
       final ServletContext context = getServletContext();
+      final Configuration conf = 
+        (Configuration) context.getAttribute("name.conf");
+      final UserGroupInformation ugi = getUGI(request, conf);
       final NameNode namenode = (NameNode)context.getAttribute("name.node");
       final DatanodeID datanode = namenode.getNamesystem().getRandomDatanode();
       try {
-        final URI uri = createRedirectUri("/getFileChecksum", ugi, datanode, request); 
+        final URI uri = createRedirectUri("/getFileChecksum", ugi, datanode, 
+                                          request, namenode); 
         response.sendRedirect(uri.toURL().toString());
       } catch(URISyntaxException e) {
         throw new ServletException(e); 
@@ -87,7 +91,8 @@
       final SocketFactory socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class);
       
       try {
-        ClientProtocol nnproxy = getUGI(request).doAs(new PrivilegedExceptionAction<ClientProtocol>() {
+        ClientProtocol nnproxy = getUGI(request, conf).doAs
+        (new PrivilegedExceptionAction<ClientProtocol>() {
           @Override
           public ClientProtocol run() throws IOException {
             return DFSClient.createNamenode(conf);

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Fri Feb 26 23:58:09 2010
@@ -25,6 +25,8 @@
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
@@ -86,7 +88,9 @@
    */
   public void doGet(HttpServletRequest request, HttpServletResponse response)
       throws IOException {
-    final UserGroupInformation ugi = getUGI(request);
+    final Configuration conf = 
+      (Configuration) getServletContext().getAttribute("name.conf");
+    final UserGroupInformation ugi = getUGI(request, conf);
 
     try {
       final ClientProtocol nnproxy = ugi

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/FsckServlet.java Fri Feb 26 23:58:09 2010
@@ -29,6 +29,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.security.UserGroupInformation;
 
 /**
@@ -44,15 +45,15 @@
     @SuppressWarnings("unchecked")
     final Map<String,String[]> pmap = request.getParameterMap();
     final PrintWriter out = response.getWriter();
+    final Configuration conf = 
+      (Configuration) getServletContext().getAttribute("name.conf");
 
-    final UserGroupInformation ugi = getUGI(request);
+    final UserGroupInformation ugi = getUGI(request, conf);
     try {
       ugi.doAs(new PrivilegedExceptionAction<Object>() {
         @Override
         public Object run() throws Exception {
           final ServletContext context = getServletContext();
-          final Configuration conf = 
-            new HdfsConfiguration((Configuration)context.getAttribute("name.conf"));
           
           NameNode nn = (NameNode) context.getAttribute("name.node");
           

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java Fri Feb 26 23:58:09 2010
@@ -17,11 +17,13 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.HftpFileSystem;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.util.VersionInfo;
 
@@ -135,9 +137,11 @@
       final boolean recur = "yes".equals(root.get("recursive"));
       final Pattern filter = Pattern.compile(root.get("filter"));
       final Pattern exclude = Pattern.compile(root.get("exclude"));
+      final Configuration conf = 
+        (Configuration) getServletContext().getAttribute("name.conf");
       
-      ClientProtocol nnproxy = 
-        getUGI(request).doAs(new PrivilegedExceptionAction<ClientProtocol>() {
+      ClientProtocol nnproxy = getUGI(request, conf).doAs
+        (new PrivilegedExceptionAction<ClientProtocol>() {
         @Override
         public ClientProtocol run() throws IOException {
           return createNameNodeProxy();

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java Fri Feb 26 23:58:09 2010
@@ -23,6 +23,7 @@
 import java.lang.management.MemoryUsage;
 import java.net.InetAddress;
 import java.net.URLEncoder;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
@@ -35,10 +36,16 @@
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.FSConstants.UpgradeAction;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.common.Storage;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
 import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
+import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.ServletUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
@@ -241,9 +248,32 @@
     }
   }
 
-  static void redirectToRandomDataNode(NameNode nn, HttpServletResponse resp)
-      throws IOException {
+  static String getDelegationToken(final NameNode nn, final String user
+                                   ) throws IOException, InterruptedException {
+    if (user == null) {
+      return null;
+    }
+    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
+    Token<DelegationTokenIdentifier> token =
+      ugi.doAs(
+               new PrivilegedExceptionAction<Token<DelegationTokenIdentifier>>() {
+                 public Token<DelegationTokenIdentifier> run() throws IOException {
+                   return nn.getDelegationToken(new Text(user));
+                 }
+               });
+    return token.encodeToUrlString();
+  }
+
+  static void redirectToRandomDataNode(final NameNode nn, 
+                                       HttpServletRequest request,
+                                       HttpServletResponse resp,
+                                       Configuration conf
+                                       ) throws IOException,
+                                                InterruptedException {
     final DatanodeID datanode = nn.getNamesystem().getRandomDatanode();
+    final String user = request.getRemoteUser();
+    String tokenString = getDelegationToken(nn, user);
+    // if the user is defined, get a delegation token and stringify it
     final String redirectLocation;
     final String nodeToRedirect;
     int redirectPort;
@@ -257,8 +287,9 @@
     String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
     redirectLocation = "http://" + fqdn + ":" + redirectPort
         + "/browseDirectory.jsp?namenodeInfoPort="
-        + nn.getHttpAddress().getPort() + "&dir="
-        + URLEncoder.encode("/", "UTF-8");
+        + nn.getHttpAddress().getPort() + "&dir=/"
+        + (tokenString == null ? "" :
+           JspHelper.SET_DELEGATION + tokenString);
     resp.sendRedirect(redirectLocation);
   }
 

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/StreamFile.java Fri Feb 26 23:58:09 2010
@@ -33,6 +33,7 @@
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.mortbay.jetty.InclusiveByteRange;
 
 public class StreamFile extends DfsServlet {
@@ -41,7 +42,6 @@
 
   static InetSocketAddress nameNodeAddr;
   static DataNode datanode = null;
-  private static final Configuration masterConf = new HdfsConfiguration();
   static {
     if ((datanode = DataNode.getDataNode()) != null) {
       nameNodeAddr = datanode.getNameNodeAddr();
@@ -51,10 +51,11 @@
   /** getting a client for connecting to dfs */
   protected DFSClient getDFSClient(HttpServletRequest request)
       throws IOException, InterruptedException {
-    final Configuration conf = new HdfsConfiguration(masterConf);
+    final Configuration conf =
+      (Configuration) getServletContext().getAttribute("name.conf");
     
-    DFSClient client = 
-      getUGI(request).doAs(new PrivilegedExceptionAction<DFSClient>() {
+    UserGroupInformation ugi = getUGI(request, conf);
+    DFSClient client = ugi.doAs(new PrivilegedExceptionAction<DFSClient>() {
       @Override
       public DFSClient run() throws IOException {
         return new DFSClient(nameNodeAddr, conf);

Modified: hadoop/hdfs/trunk/src/test/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/findbugsExcludeFile.xml?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/findbugsExcludeFile.xml (original)
+++ hadoop/hdfs/trunk/src/test/findbugsExcludeFile.xml Fri Feb 26 23:58:09 2010
@@ -30,13 +30,12 @@
      </Match>
      <!-- 
        Ignore Cross Scripting Vulnerabilities
+       We have an input quoting filter that protects us.
      -->
      <Match>
-       <Package name="~org.apache.hadoop.mapred.*" />
        <Bug code="XSS" />
      </Match>
      <Match>
-       <Class name="org.apache.hadoop.mapred.taskdetails_jsp" />
        <Bug code="HRS" />
      </Match>
      <!--

Modified: hadoop/hdfs/trunk/src/webapps/datanode/browseBlock.jsp
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/webapps/datanode/browseBlock.jsp?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/webapps/datanode/browseBlock.jsp (original)
+++ hadoop/hdfs/trunk/src/webapps/datanode/browseBlock.jsp Fri Feb 26 23:58:09 2010
@@ -21,6 +21,7 @@
   contentType="text/html; charset=UTF-8"
   import="org.apache.hadoop.hdfs.server.common.JspHelper"
   import="org.apache.hadoop.util.ServletUtil"
+  import="org.apache.hadoop.conf.Configuration"
 %>
 <%!
   //for java.io.Serializable
@@ -32,9 +33,13 @@
 <%JspHelper.createTitle(out, request, request.getParameter("filename")); %>
 </head>
 <body onload="document.goto.dir.focus()">
-<% DatanodeJspHelper.generateFileChunks(out,request); %>
+<% 
+  Configuration conf = 
+     (Configuration) application.getAttribute("datanode.conf");
+  DatanodeJspHelper.generateFileChunks(out, request, conf); 
+%>
 <hr>
-<% DatanodeJspHelper.generateFileDetails(out,request); %>
+<% DatanodeJspHelper.generateFileDetails(out, request, conf); %>
 
 <h2>Local logs</h2>
 <a href="/logs/">Log</a> directory

Modified: hadoop/hdfs/trunk/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/webapps/datanode/browseDirectory.jsp?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/webapps/datanode/browseDirectory.jsp (original)
+++ hadoop/hdfs/trunk/src/webapps/datanode/browseDirectory.jsp Fri Feb 26 23:58:09 2010
@@ -23,6 +23,7 @@
 
   import="org.apache.hadoop.hdfs.server.common.JspHelper"
   import="org.apache.hadoop.util.ServletUtil"
+  import="org.apache.hadoop.conf.Configuration"
 %>
 <%!
   //for java.io.Serializable
@@ -45,7 +46,9 @@
 <body onload="document.goto.dir.focus()">
 <% 
   try {
-    DatanodeJspHelper.generateDirectoryStructure(out,request,response);
+    Configuration conf = 
+      (Configuration) application.getAttribute("datanode.conf");
+    DatanodeJspHelper.generateDirectoryStructure(out,request,response, conf);
   }
   catch(IOException ioe) {
     String msg = ioe.getLocalizedMessage();

Modified: hadoop/hdfs/trunk/src/webapps/datanode/tail.jsp
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/webapps/datanode/tail.jsp?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/webapps/datanode/tail.jsp (original)
+++ hadoop/hdfs/trunk/src/webapps/datanode/tail.jsp Fri Feb 26 23:58:09 2010
@@ -21,6 +21,7 @@
   contentType="text/html; charset=UTF-8"
   import="org.apache.hadoop.hdfs.server.common.JspHelper"
   import="org.apache.hadoop.util.ServletUtil"
+  import="org.apache.hadoop.conf.Configuration"
 %>
 <%!
   //for java.io.Serializable
@@ -32,7 +33,11 @@
 </head>
 <body>
 <form action="/tail.jsp" method="GET">
-<% DatanodeJspHelper.generateFileChunksForTail(out,request); %>
+<% 
+   Configuration conf = 
+     (Configuration) application.getAttribute("datanode.conf");
+   DatanodeJspHelper.generateFileChunksForTail(out,request, conf); 
+%>
 </form>
 <hr>
 

Modified: hadoop/hdfs/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp?rev=916873&r1=916872&r2=916873&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp (original)
+++ hadoop/hdfs/trunk/src/webapps/hdfs/nn_browsedfscontent.jsp Fri Feb 26 23:58:09 2010
@@ -19,6 +19,7 @@
 %>
 <%@ page
   contentType="text/html; charset=UTF-8"
+  import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.util.ServletUtil"
 %>
 <%!
@@ -32,7 +33,9 @@
 <body>
 <% 
   NameNode nn = (NameNode)application.getAttribute("name.node");
-  NamenodeJspHelper.redirectToRandomDataNode(nn, response); 
+  Configuration conf = 
+     (Configuration) application.getAttribute("name.conf");
+  NamenodeJspHelper.redirectToRandomDataNode(nn, request, response, conf); 
 %>
 <hr>
 



Mime
View raw message