hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dhr...@apache.org
Subject svn commit: r615364 - in /hadoop/core/trunk: ./ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/permission/ src/java/org/apache/hadoop/security/
Date Fri, 25 Jan 2008 21:57:12 GMT
Author: dhruba
Date: Fri Jan 25 13:57:10 2008
New Revision: 615364

URL: http://svn.apache.org/viewvc?rev=615364&view=rev
Log:
HADOOP-2652. Fix permission issues for HftpFileSystem. This is an
incompatible change since distcp may not be able to copy files
from cluster A (compiled with this patch) to cluster B (compiled
with previous versions). (Tsz Wo (Nicholas), SZE via dhruba)


Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java
    hadoop/core/trunk/src/java/org/apache/hadoop/dfs/HftpFileSystem.java
    hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java
    hadoop/core/trunk/src/java/org/apache/hadoop/dfs/ListPathsServlet.java
    hadoop/core/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java
    hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/FsPermission.java
    hadoop/core/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri Jan 25 13:57:10 2008
@@ -60,6 +60,11 @@
     writes to a HDFS file. Changed Data Transfer Version from 7 to 8.
     (dhruba)
 
+    HADOOP-2652. Fix permission issues for HftpFileSystem. This is an 
+    incompatible change since distcp may not be able to copy files 
+    from cluster A (compiled with this patch) to cluster B (compiled 
+    with previous versions). (Tsz Wo (Nicholas), SZE via dhruba)
+
   NEW FEATURES
 
     HADOOP-1857.  Ability to run a script when a task fails to capture stack

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/FileDataServlet.java Fri Jan 25 13:57:10
2008
@@ -25,21 +25,20 @@
 import java.util.Map;
 import java.util.Random;
 
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.security.UnixUserGroupInformation;
+
 /** Redirect queries about the hosted filesystem to an appropriate datanode.
  * @see org.apache.hadoop.dfs.HftpFileSystem
  */
-public class FileDataServlet extends HttpServlet {
-
-  static URI getUri(DFSFileInfo i, NameNode nn)
-      throws IOException, URISyntaxException {
-    final DatanodeInfo host = pickSrcDatanode(i, nn);
+public class FileDataServlet extends DfsServlet {
+  private static URI createUri(DFSFileInfo i, UnixUserGroupInformation ugi,
+      ClientProtocol nnproxy) throws IOException, URISyntaxException {
+    final DatanodeInfo host = pickSrcDatanode(i, nnproxy);
     return new URI("http", null, host.getHostName(), host.getInfoPort(),
-          "/streamFile", "filename=" + i.getPath(), null);
+          "/streamFile", "filename=" + i.getPath() + "&ugi=" + ugi, null);
   }
 
   private final static int BLOCK_SAMPLE = 5;
@@ -48,14 +47,14 @@
    * Currently, this looks at no more than the first five blocks of a file,
    * selecting a datanode randomly from the most represented.
    */
-  protected static DatanodeInfo pickSrcDatanode(DFSFileInfo i, NameNode nn)
-      throws IOException {
+  private static DatanodeInfo pickSrcDatanode(DFSFileInfo i,
+      ClientProtocol nnproxy) throws IOException {
     long sample;
     if (i.getLen() == 0) sample = 1;
     else sample = i.getLen() / i.getBlockSize() > BLOCK_SAMPLE
         ? i.getBlockSize() * BLOCK_SAMPLE - 1
         : i.getLen();
-    final LocatedBlocks blks = nn.getBlockLocations(
+    final LocatedBlocks blks = nnproxy.getBlockLocations(
         i.getPath().toUri().getPath(), 0, sample);
     HashMap<DatanodeInfo, Integer> count = new HashMap<DatanodeInfo, Integer>();
     for (LocatedBlock b : blks.getLocatedBlocks()) {
@@ -89,15 +88,16 @@
    * }
    */
   public void doGet(HttpServletRequest request, HttpServletResponse response)
-    throws ServletException, IOException {
+    throws IOException {
+    final UnixUserGroupInformation ugi = getUGI(request);
+    final ClientProtocol nnproxy = createNameNodeProxy(ugi);
 
     try {
       final String path = request.getPathInfo() != null
         ? request.getPathInfo() : "/";
-      final NameNode nn = (NameNode)getServletContext().getAttribute("name.node");
-      DFSFileInfo info = nn.getFileInfo(path);
+      DFSFileInfo info = nnproxy.getFileInfo(path);
       if (!info.isDir()) {
-        response.sendRedirect(getUri(info, nn).toURL().toString());
+        response.sendRedirect(createUri(info, ugi, nnproxy).toURL().toString());
       } else {
         response.sendError(400, "cat: " + path + ": is a directory");
       }

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/HftpFileSystem.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/HftpFileSystem.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/HftpFileSystem.java Fri Jan 25 13:57:10
2008
@@ -30,6 +30,7 @@
 import java.text.SimpleDateFormat;
 
 import java.util.ArrayList;
+import javax.security.auth.login.LoginException;
 
 import org.xml.sax.Attributes;
 import org.xml.sax.InputSource;
@@ -47,7 +48,9 @@
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.*;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.StringUtils;
 
 /** An implementation of a protocol for accessing filesystems over HTTP.
  * The following implementation provides a limited, read-only interface
@@ -63,10 +66,17 @@
   private String fshostname = "";
   private int fsport = -1;
   protected static final SimpleDateFormat df = ListPathsServlet.df;
+  private UserGroupInformation ugi; 
 
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
     setConf(conf);
+    try {
+      this.ugi = UnixUserGroupInformation.login(conf);
+    } catch (LoginException le) {
+      throw new IOException(StringUtils.stringifyException(le));
+    } 
+
     this.fshostname = name.getHost();
     this.fsport = name.getPort();
     if(fsport >= 0)
@@ -89,7 +99,7 @@
     HttpURLConnection connection = null;
     try {
       final URL url = new URI("http", null, fshostname, fsport,
-          "/data" + f.toUri().getPath(), null, null).toURL();
+          "/data" + f.toUri().getPath(), "ugi=" + ugi, null).toURL();
       connection = (HttpURLConnection)url.openConnection();
       connection.setRequestMethod("GET");
       connection.connect();
@@ -160,7 +170,8 @@
         XMLReader xr = XMLReaderFactory.createXMLReader();
         xr.setContentHandler(this);
         final URL url = new URI("http", null, fshostname, fsport,
-            "/listPaths" + path, recur ? "recursive=yes" : null , null).toURL();
+            "/listPaths" + path, "ugi=" + ugi + (recur? "&recursive=yes" : ""),
+            null).toURL();
         HttpURLConnection connection = (HttpURLConnection)url.openConnection();
         connection.setRequestMethod("GET");
         connection.connect();

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/JspHelper.java Fri Jan 25 13:57:10 2008
@@ -44,6 +44,9 @@
   static FSNamesystem fsn = null;
   static InetSocketAddress nameNodeAddr;
   public static Configuration conf = new Configuration();
+  public static final UnixUserGroupInformation webUGI
+  = UnixUserGroupInformation.createImmutable(
+      conf.getStrings(WEB_UGI_PROPERTY_NAME));
 
   static int defaultChunkSizeToView = 
     conf.getInt("dfs.default.chunk.view.size", 32 * 1024);
@@ -60,8 +63,7 @@
     }      
 
     UnixUserGroupInformation.saveToConf(conf,
-      UnixUserGroupInformation.UGI_PROPERTY_NAME,
-      new UnixUserGroupInformation(conf.getStrings(WEB_UGI_PROPERTY_NAME)));
+        UnixUserGroupInformation.UGI_PROPERTY_NAME, webUGI);
   }
   public DatanodeInfo bestNode(LocatedBlock blk) throws IOException {
     TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>();

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/ListPathsServlet.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/ListPathsServlet.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/ListPathsServlet.java Fri Jan 25 13:57:10
2008
@@ -17,13 +17,14 @@
  */
 package org.apache.hadoop.dfs;
 
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.util.VersionInfo;
 
 import org.znerd.xmlenc.*;
 
 import java.io.IOException;
 import java.io.PrintWriter;
-import java.net.URISyntaxException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.HashMap;
@@ -33,7 +34,6 @@
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
@@ -41,8 +41,7 @@
  * Obtain meta-information about a filesystem.
  * @see org.apache.hadoop.dfs.HftpFileSystem
  */
-public class ListPathsServlet extends HttpServlet {
-
+public class ListPathsServlet extends DfsServlet {
   static final SimpleDateFormat df =
     new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ");
   static {
@@ -51,11 +50,10 @@
 
   /**
    * Write a node to output.
-   * Dir: path, modification
-   * File: path, size, replication, blocksize, and modification
+   * Node information includes path, modification, permission, owner and group.
+   * For files, it also includes size, replication and block-size. 
    */
-  protected void writeItem(DFSFileInfo i, XMLOutputter doc, NameNode nn)
-      throws IOException, URISyntaxException {
+  static void writeInfo(DFSFileInfo i, XMLOutputter doc) throws IOException {
     doc.startTag(i.isDir() ? "directory" : "file");
     doc.attribute("path", i.getPath().toUri().getPath());
     doc.attribute("modified", df.format(new Date(i.getModificationTime())));
@@ -64,7 +62,7 @@
       doc.attribute("replication", String.valueOf(i.getReplication()));
       doc.attribute("blocksize", String.valueOf(i.getBlockSize()));
     }
-    doc.attribute("permission", i.getPermission().toString());
+    doc.attribute("permission", (i.isDir()? "d": "-") + i.getPermission());
     doc.attribute("owner", i.getOwner());
     doc.attribute("group", i.getGroup());
     doc.endTag();
@@ -118,7 +116,7 @@
    */
   public void doGet(HttpServletRequest request, HttpServletResponse response)
     throws ServletException, IOException {
-
+    final UnixUserGroupInformation ugi = getUGI(request);
     final PrintWriter out = response.getWriter();
     final XMLOutputter doc = new XMLOutputter(out, "UTF-8");
     try {
@@ -127,35 +125,37 @@
       final boolean recur = "yes".equals(root.get("recursive"));
       final Pattern filter = Pattern.compile(root.get("filter"));
       final Pattern exclude = Pattern.compile(root.get("exclude"));
-      final NameNode nn = (NameNode)getServletContext().getAttribute("name.node");
+      ClientProtocol nnproxy = createNameNodeProxy(ugi);
+
       doc.declaration();
       doc.startTag("listing");
       for (Map.Entry<String,String> m : root.entrySet()) {
         doc.attribute(m.getKey(), m.getValue());
       }
 
-      DFSFileInfo base = nn.getFileInfo(path);
+      DFSFileInfo base = nnproxy.getFileInfo(path);
       if (base.isDir()) {
-        writeItem(base, doc, nn);
+        writeInfo(base, doc);
       }
 
       Stack<String> pathstack = new Stack<String>();
       pathstack.push(path);
       while (!pathstack.empty()) {
-        for (DFSFileInfo i : nn.getListing(pathstack.pop())) {
-          if (exclude.matcher(i.getName()).matches()
-              || !filter.matcher(i.getName()).matches()) {
-            continue;
+        String p = pathstack.pop();
+        try {
+          for (DFSFileInfo i : nnproxy.getListing(p)) {
+            if (exclude.matcher(i.getName()).matches()
+                || !filter.matcher(i.getName()).matches()) {
+              continue;
+            }
+            if (recur && i.isDir()) {
+              pathstack.push(i.getPath().toUri().getPath());
+            }
+            writeInfo(i, doc);
           }
-          if (recur && i.isDir()) {
-            pathstack.push(i.getPath().toUri().getPath());
-          }
-          writeItem(i, doc, nn);
         }
+        catch(RemoteException re) {writeRemoteException(p, re, doc);}
       }
-
-    } catch (URISyntaxException e) {
-      out.println(e.toString());
     } catch (PatternSyntaxException e) {
       out.println(e.toString());
     } finally {

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java Fri Jan 25 13:57:10 2008
@@ -20,17 +20,15 @@
 import javax.servlet.*;
 import javax.servlet.http.*;
 import java.io.*;
-import java.util.*;
 import java.net.*;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.conf.*;
 
-public class StreamFile extends HttpServlet {
-
+public class StreamFile extends DfsServlet {
   static InetSocketAddress nameNodeAddr;
   static DataNode datanode = null;
-  static Configuration conf = new Configuration();
-  Random rand = new Random();
+  private static final Configuration masterConf = new Configuration();
   static {
     if ((datanode = DataNode.getDataNode()) != null) {
       nameNodeAddr = datanode.getNameNodeAddr();
@@ -38,6 +36,10 @@
   }
   public void doGet(HttpServletRequest request, HttpServletResponse response)
     throws ServletException, IOException {
+    Configuration conf = new Configuration(masterConf);
+    UnixUserGroupInformation.saveToConf(conf,
+        UnixUserGroupInformation.UGI_PROPERTY_NAME, getUGI(request));
+
     String filename = request.getParameter("filename");
     if (filename == null || filename.length() == 0) {
       response.setContentType("text/plain");

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/FsPermission.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/FsPermission.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/FsPermission.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/FsPermission.java Fri Jan 25
13:57:10 2008
@@ -179,7 +179,10 @@
    * @param unixSymbolicPermission e.g. "-rw-rw-rw-"
    */
   public static FsPermission valueOf(String unixSymbolicPermission) {
-    if (unixSymbolicPermission.length() != 10) {
+    if (unixSymbolicPermission == null) {
+      return null;
+    }
+    else if (unixSymbolicPermission.length() != 10) {
       throw new IllegalArgumentException("length != 10(unixSymbolicPermission="
           + unixSymbolicPermission + ")");
     }

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java?rev=615364&r1=615363&r2=615364&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/security/UnixUserGroupInformation.java Fri
Jan 25 13:57:10 2008
@@ -39,6 +39,15 @@
   final static private HashMap<String, UnixUserGroupInformation> user2UGIMap =
     new HashMap<String, UnixUserGroupInformation>();
 
+  /** Create an immutable {@link UnixUserGroupInformation} object. */
+  public static UnixUserGroupInformation createImmutable(String[] ugi) {
+    return new UnixUserGroupInformation(ugi) {
+      public void readFields(DataInput in) throws IOException {
+        throw new UnsupportedOperationException();
+      }
+    };
+  }
+
   private String userName;
   private String[] groupNames;
 



Mime
View raw message