hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1077206 [2/2] - in /hadoop/common/branches/branch-0.20-security-patches: ./ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/protocol/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/org/apache/hadoop/hdfs/ src/we...
Date Fri, 04 Mar 2011 03:51:50 GMT
Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
Fri Mar  4 03:51:49 2011
@@ -24,10 +24,11 @@ import java.security.PrivilegedException
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -37,11 +38,11 @@ import org.apache.hadoop.security.UserGr
 public class FileDataServlet extends DfsServlet {
 
   /** Create a redirection URI */
-  protected URI createUri(FileStatus i, UserGroupInformation ugi,
+  protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ugi,
       ClientProtocol nnproxy, HttpServletRequest request)
       throws IOException, URISyntaxException {
     String scheme = request.getScheme();
-    final DatanodeID host = pickSrcDatanode(i, nnproxy);
+    final DatanodeID host = pickSrcDatanode(parent, i, nnproxy);
     final String hostname;
     if (host instanceof DatanodeInfo) {
       hostname = ((DatanodeInfo)host).getHostName();
@@ -52,7 +53,7 @@ public class FileDataServlet extends Dfs
         "https".equals(scheme)
           ? (Integer)getServletContext().getAttribute("datanode.https.port")
           : host.getInfoPort(),
-        "/streamFile", "filename=" + i.getPath() + 
+        "/streamFile", "filename=" + i.getFullName(parent) + 
         "&ugi=" + ugi.getShortUserName(), null);
   }
 
@@ -62,7 +63,7 @@ public class FileDataServlet extends Dfs
    * Currently, this looks at no more than the first five blocks of a file,
    * selecting a datanode randomly from the most represented.
    */
-  private static DatanodeID pickSrcDatanode(FileStatus i,
+  private static DatanodeID pickSrcDatanode(String parent, HdfsFileStatus i,
       ClientProtocol nnproxy) throws IOException {
     // a race condition can happen by initializing a static member this way.
     // A proper fix should make JspHelper a singleton. Since it doesn't affect 
@@ -70,7 +71,7 @@ public class FileDataServlet extends Dfs
     if (jspHelper == null)
       jspHelper = new JspHelper();
     final LocatedBlocks blks = nnproxy.getBlockLocations(
-        i.getPath().toUri().getPath(), 0, 1);
+        i.getFullPath(new Path(parent)).toUri().getPath(), 0, 1);
     if (i.getLen() == 0 || blks.getLocatedBlocks().size() <= 0) {
       // pick a random datanode
       return jspHelper.randomNode();
@@ -101,9 +102,9 @@ public class FileDataServlet extends Dfs
       final String path = request.getPathInfo() != null ? 
                                                     request.getPathInfo() : "/";
       
-      FileStatus info = nnproxy.getFileInfo(path);
+      HdfsFileStatus info = nnproxy.getFileInfo(path);
       if ((info != null) && !info.isDir()) {
-        response.sendRedirect(createUri(info, ugi, nnproxy,
+        response.sendRedirect(createUri(path, info, ugi, nnproxy,
               request).toURL().toString());
       } else if (info == null){
         response.sendError(400, "cat: File not found " + path);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INode.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INode.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INode.java
Fri Mar  4 03:51:49 2011
@@ -17,13 +17,13 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
-import java.io.UnsupportedEncodingException;
 import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.permission.*;
+import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
@@ -221,7 +221,7 @@ abstract class INode implements Comparab
    * @return local file name
    */
   String getLocalName() {
-    return bytes2String(name);
+    return DFSUtil.bytes2String(name);
   }
 
   /**
@@ -236,7 +236,7 @@ abstract class INode implements Comparab
    * Set local file name
    */
   void setLocalName(String name) {
-    this.name = string2Bytes(name);
+    this.name = DFSUtil.string2Bytes(name);
   }
 
   /**
@@ -324,7 +324,7 @@ abstract class INode implements Comparab
     }
     byte[][] bytes = new byte[strings.length][];
     for (int i = 0; i < strings.length; i++)
-      bytes[i] = string2Bytes(strings[i]);
+      bytes[i] = DFSUtil.string2Bytes(strings[i]);
     return bytes;
   }
 
@@ -393,30 +393,6 @@ abstract class INode implements Comparab
     }
     return len1 - len2;
   }
-
-  /**
-   * Converts a byte array to a string using UTF8 encoding.
-   */
-  static String bytes2String(byte[] bytes) {
-    try {
-      return new String(bytes, "UTF8");
-    } catch(UnsupportedEncodingException e) {
-      assert false : "UTF8 encoding is not supported ";
-    }
-    return null;
-  }
-
-  /**
-   * Converts a string to a byte array using UTF8 encoding.
-   */
-  static byte[] string2Bytes(String str) {
-    try {
-      return str.getBytes("UTF8");
-    } catch(UnsupportedEncodingException e) {
-      assert false : "UTF8 encoding is not supported ";
-    }
-    return null;
-  }
   
   
   LocatedBlocks createLocatedBlocks(List<LocatedBlock> blocks) {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
Fri Mar  4 03:51:49 2011
@@ -25,6 +25,7 @@ import java.util.List;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.permission.PermissionStatus;
+import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.protocol.Block;
 
 /**
@@ -95,7 +96,7 @@ class INodeDirectory extends INode {
   }
   
   INode getChild(String name) {
-    return getChildINode(string2Bytes(name));
+    return getChildINode(DFSUtil.string2Bytes(name));
   }
 
   private INode getChildINode(byte[] name) {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
Fri Mar  4 03:51:49 2011
@@ -17,9 +17,10 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
-import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.HftpFileSystem;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.util.VersionInfo;
 
@@ -59,10 +60,10 @@ public class ListPathsServlet extends Df
    * Node information includes path, modification, permission, owner and group.
    * For files, it also includes size, replication and block-size. 
    */
-  static void writeInfo(FileStatus i, XMLOutputter doc) throws IOException {
+  static void writeInfo(String parent, HdfsFileStatus i, XMLOutputter doc) throws IOException
{
     final SimpleDateFormat ldf = df.get();
     doc.startTag(i.isDir() ? "directory" : "file");
-    doc.attribute("path", i.getPath().toUri().getPath());
+    doc.attribute("path", i.getFullPath(new Path(parent)).toUri().getPath());
     doc.attribute("modified", ldf.format(new Date(i.getModificationTime())));
     doc.attribute("accesstime", ldf.format(new Date(i.getAccessTime())));
     if (!i.isDir()) {
@@ -148,9 +149,9 @@ public class ListPathsServlet extends Df
         doc.attribute(m.getKey(), m.getValue());
       }
 
-      FileStatus base = nnproxy.getFileInfo(path);
+      HdfsFileStatus base = nnproxy.getFileInfo(path);
       if ((base != null) && base.isDir()) {
-        writeInfo(base, doc);
+        writeInfo(path, base, doc);
       }
 
       Stack<String> pathstack = new Stack<String>();
@@ -158,20 +159,21 @@ public class ListPathsServlet extends Df
       while (!pathstack.empty()) {
         String p = pathstack.pop();
         try {
-          FileStatus[] listing = nnproxy.getListing(p);
+          HdfsFileStatus[] listing = nnproxy.getListing(p);
           if (listing == null) {
             LOG.warn("ListPathsServlet - Path " + p + " does not exist");
             continue;
           }
-          for (FileStatus i : listing) {
-            if (exclude.matcher(i.getPath().getName()).matches()
-                || !filter.matcher(i.getPath().getName()).matches()) {
+          for (HdfsFileStatus i : listing) {
+            String localName = i.getLocalName();
+            if (exclude.matcher(localName).matches()
+                || !filter.matcher(localName).matches()) {
               continue;
             }
             if (recur && i.isDir()) {
-              pathstack.push(i.getPath().toUri().getPath());
+              pathstack.push(new Path(p, localName).toUri().getPath());
             }
-            writeInfo(i, doc);
+            writeInfo(p, i, doc);
           }
         }
         catch(RemoteException re) {re.writeXml(p, doc);}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
Fri Mar  4 03:51:49 2011
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.server.na
 import org.apache.commons.logging.*;
 
 import org.apache.hadoop.fs.ContentSummary;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Trash;
 import org.apache.hadoop.fs.FileSystem;
@@ -584,8 +583,8 @@ public class NameNode implements ClientP
 
   /**
    */
-  public FileStatus[] getListing(String src) throws IOException {
-    FileStatus[] files = namesystem.getListing(src);
+  public HdfsFileStatus[] getListing(String src) throws IOException {
+    HdfsFileStatus[] files = namesystem.getListing(src);
     if (files != null) {
       myMetrics.numGetListingOps.inc();
     }
@@ -599,7 +598,7 @@ public class NameNode implements ClientP
    * @return object containing information regarding the file
    *         or null if file not found
    */
-  public FileStatus getFileInfo(String src)  throws IOException {
+  public HdfsFileStatus getFileInfo(String src)  throws IOException {
     myMetrics.numFileInfoOps.inc();
     return namesystem.getFileInfo(src);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
Fri Mar  4 03:51:49 2011
@@ -32,11 +32,11 @@ import java.util.TreeSet;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants;
@@ -140,10 +140,10 @@ public class NamenodeFsck {
     try {
       Result res = new Result(conf);
 
-      final FileStatus[] files = namenode.getListing(path);
+      final HdfsFileStatus[] files = namenode.getListing(path);
       if (files != null) {
         for (int i = 0; i < files.length; i++) {
-          check(files[i], res);
+          check(path, files[i], res);
         }
         out.println(res);
         out.println(" Number of data-nodes:\t\t" + totalDatanodes);
@@ -170,12 +170,12 @@ public class NamenodeFsck {
     }
   }
   
-  private void check(FileStatus file, Result res) throws IOException {
-    String path = file.getPath().toString();
+  private void check(String parent, HdfsFileStatus file, Result res) throws IOException {
+    String path = file.getFullName(parent);
     boolean isOpen = false;
 
     if (file.isDir()) {
-      final FileStatus[] files = namenode.getListing(path);
+      final HdfsFileStatus[] files = namenode.getListing(path);
       if (files == null) {
         return;
       }
@@ -184,7 +184,7 @@ public class NamenodeFsck {
       }
       res.totalDirs++;
       for (int i = 0; i < files.length; i++) {
-        check(files[i], res);
+        check(path, files[i], res);
       }
       return;
     }
@@ -303,7 +303,7 @@ public class NamenodeFsck {
         break;
       case FIXING_MOVE:
         if (!isOpen)
-          lostFoundMove(file, blocks);
+          lostFoundMove(parent, file, blocks);
         break;
       case FIXING_DELETE:
         if (!isOpen)
@@ -322,7 +322,7 @@ public class NamenodeFsck {
     }
   }
   
-  private void lostFoundMove(FileStatus file, LocatedBlocks blocks)
+  private void lostFoundMove(String parent, HdfsFileStatus file, LocatedBlocks blocks)
     throws IOException {
     final DFSClient dfs = new DFSClient(NameNode.getAddress(conf), conf);
     try {
@@ -332,8 +332,9 @@ public class NamenodeFsck {
     if (!lfInitedOk) {
       return;
     }
-    String target = lostFound + file.getPath();
-    String errmsg = "Failed to move " + file.getPath() + " to /lost+found";
+    String fullName = file.getFullName(parent);
+    String target = lostFound + fullName;
+    String errmsg = "Failed to move " + fullName + " to /lost+found";
     try {
       if (!namenode.mkdirs(target, file.getPermission())) {
         LOG.warn(errmsg);
@@ -377,8 +378,8 @@ public class NamenodeFsck {
         }
       }
       if (fos != null) fos.close();
-      LOG.warn("\n - moved corrupted file " + file.getPath() + " to /lost+found");
-      dfs.delete(file.getPath().toString(), true);
+      LOG.warn("\n - moved corrupted file " + fullName + " to /lost+found");
+      dfs.delete(fullName, true);
     }  catch (Exception e) {
       e.printStackTrace();
       LOG.warn(errmsg + ": " + e.getMessage());

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java
Fri Mar  4 03:51:49 2011
@@ -189,7 +189,7 @@ public class TestDFSClientRetries extend
 
     public boolean mkdirs(String src, FsPermission masked) throws IOException { return false;
}
 
-    public FileStatus[] getListing(String src) throws IOException { return null; }
+    public HdfsFileStatus[] getListing(String src) throws IOException { return null; }
 
     public void renewLease(String clientName) throws IOException {}
 
@@ -213,7 +213,7 @@ public class TestDFSClientRetries extend
 
     public void metaSave(String filename) throws IOException {}
 
-    public FileStatus getFileInfo(String src) throws IOException { return null; }
+    public HdfsFileStatus getFileInfo(String src) throws IOException { return null; }
 
     public ContentSummary getContentSummary(String path) throws IOException { return null;
}
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java
Fri Mar  4 03:51:49 2011
@@ -28,9 +28,9 @@ import java.util.zip.CRC32;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSInputStream;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -130,11 +130,11 @@ public class TestDFSUpgradeFromImage ext
   private void verifyDir(DFSClient client, String dir) 
                                            throws IOException {
     
-    FileStatus[] fileArr = client.listPaths(dir);
+    HdfsFileStatus[] fileArr = client.listPaths(dir);
     TreeMap<String, Boolean> fileMap = new TreeMap<String, Boolean>();
     
-    for(FileStatus file : fileArr) {
-      String path = file.getPath().toString();
+    for(HdfsFileStatus file : fileArr) {
+      String path = file.getFullName(dir);
       fileMap.put(path, Boolean.valueOf(file.isDir()));
     }
     

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
Fri Mar  4 03:51:49 2011
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 
 /**
@@ -81,7 +82,7 @@ public class TestFileStatus extends Test
                  fs.getFileStatus(path).isDir() == true);
       
       // make sure getFileInfo returns null for files which do not exist
-      FileStatus fileInfo = dfsClient.getFileInfo("/noSuchFile");
+      HdfsFileStatus fileInfo = dfsClient.getFileInfo("/noSuchFile");
       assertTrue(fileInfo == null);
 
       // create a file in home directory
@@ -91,21 +92,48 @@ public class TestFileStatus extends Test
       System.out.println("Created file filestatus.dat with one "
                          + " replicas.");
       checkFile(fs, file1, 1);
-      assertTrue(file1 + " should be a file", 
-                  fs.getFileStatus(file1).isDir() == false);
-      assertTrue(fs.getFileStatus(file1).getBlockSize() == blockSize);
-      assertTrue(fs.getFileStatus(file1).getReplication() == 1);
-      assertTrue(fs.getFileStatus(file1).getLen() == fileSize);
       System.out.println("Path : \"" + file1 + "\"");
 
+      // test getFileStatus on a file
+      FileStatus status = fs.getFileStatus(file1);
+      assertTrue(file1 + " should be a file", 
+          status.isDir() == false);
+      assertTrue(status.getBlockSize() == blockSize);
+      assertTrue(status.getReplication() == 1);
+      assertTrue(status.getLen() == fileSize);
+      assertEquals(fs.makeQualified(file1), 
+          status.getPath().toString());
+
+      // test listStatus on a file
+      FileStatus[] stats = fs.listStatus(file1);
+      assertEquals(1, stats.length);
+      status = stats[0];
+      assertTrue(file1 + " should be a file", 
+          status.isDir() == false);
+      assertTrue(status.getBlockSize() == blockSize);
+      assertTrue(status.getReplication() == 1);
+      assertTrue(status.getLen() == fileSize);
+      assertEquals(fs.makeQualified(file1).toString(), 
+          status.getPath().toString());
+
       // create an empty directory
       //
       Path parentDir = new Path("/test");
       Path dir = new Path("/test/mkdirs");
       assertTrue(fs.mkdirs(dir));
       assertTrue(fs.exists(dir));
-      assertTrue(dir + " should be a directory", 
-                 fs.getFileStatus(path).isDir() == true);
+      System.out.println("Dir : \"" + dir + "\"");
+
+      // test getFileStatus on an empty directory
+      status = fs.getFileStatus(dir);
+      assertTrue(dir + " should be a directory", status.isDir());
+      assertTrue(dir + " should be zero size ", status.getLen() == 0);
+      assertEquals(fs.makeQualified(dir).toString(), 
+          status.getPath().toString());
+
+      // test listStatus on an empty directory
+      stats = fs.listStatus(dir);
+      assertEquals(dir + " should be empty", 0, stats.length);
       assertTrue(dir + " should be zero size ",
                  fs.getContentSummary(dir).getLength() == 0);
       assertTrue(dir + " should be zero size ",
@@ -114,7 +142,7 @@ public class TestFileStatus extends Test
 
       // create another file that is smaller than a block.
       //
-      Path file2 = new Path("/test/mkdirs/filestatus2.dat");
+      Path file2 = new Path(dir, "filestatus2.dat");
       writeFile(fs, file2, 1, blockSize/4, blockSize);
       System.out.println("Created file filestatus2.dat with one "
                          + " replicas.");
@@ -122,11 +150,14 @@ public class TestFileStatus extends Test
       System.out.println("Path : \"" + file2 + "\"");
 
       // verify file attributes
-      assertTrue(fs.getFileStatus(file2).getBlockSize() == blockSize);
-      assertTrue(fs.getFileStatus(file2).getReplication() == 1);
+      status = fs.getFileStatus(file2);
+      assertTrue(status.getBlockSize() == blockSize);
+      assertTrue(status.getReplication() == 1);
+      assertEquals(fs.makeQualified(file2).toString(), 
+          status.getPath().toString());
 
       // create another file in the same directory
-      Path file3 = new Path("/test/mkdirs/filestatus3.dat");
+      Path file3 = new Path(dir, "filestatus3.dat");
       writeFile(fs, file3, 1, blockSize/4, blockSize);
       System.out.println("Created file filestatus3.dat with one "
                          + " replicas.");
@@ -136,7 +167,17 @@ public class TestFileStatus extends Test
       // of the two files
       assertTrue(dir + " size should be " + (blockSize/2), 
                  blockSize/2 == fs.getContentSummary(dir).getLength());
-    } finally {
+       
+       // test listStatus on a non-empty directory
+       stats = fs.listStatus(dir);
+       assertEquals(dir + " should have two entries", 2, stats.length);
+       String qualifiedFile2 = fs.makeQualified(file2).toString();
+       String qualifiedFile3 = fs.makeQualified(file3).toString();
+       for(FileStatus stat:stats) {
+         String statusFullName = stat.getPath().toString();
+         assertTrue(qualifiedFile2.equals(statusFullName)
+           || qualifiedFile3.toString().equals(statusFullName));
+       }    } finally {
       fs.close();
       cluster.shutdown();
     }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/webapps/datanode/browseDirectory.jsp?rev=1077206&r1=1077205&r2=1077206&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/webapps/datanode/browseDirectory.jsp
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/webapps/datanode/browseDirectory.jsp
Fri Mar  4 03:51:49 2011
@@ -76,7 +76,7 @@
         return;
       }
       // directory
-      FileStatus[] files = dfs.listPaths(target);
+      HdfsFileStatus[] files = dfs.listPaths(target);
       //generate a table and dump the info
       String [] headings = { "Name", "Type", "Size", "Replication", 
                               "Block Size", "Modification Time",
@@ -104,7 +104,8 @@
         String cols [] = new String[headings.length];
         for (int i = 0; i < files.length; i++) {
           //Get the location of the first block of the file
-          if (files[i].getPath().toString().endsWith(".crc")) continue;
+          String localname = files[i].getLocalName();
+          if (localname.endsWith(".crc")) continue;
           if (!files[i].isDir()) {
             cols[1] = "file";
             cols[2] = StringUtils.byteDesc(files[i].getLen());
@@ -118,9 +119,9 @@
             cols[4] = "";
           }
           String datanodeUrl = req.getRequestURL()+"?dir="+
-              URLEncoder.encode(files[i].getPath().toString(), "UTF-8") + 
+              URLEncoder.encode(files[i].getFullName(target).toString(), "UTF-8") + 
               "&namenodeInfoPort=" + namenodeInfoPort;
-          cols[0] = "<a href=\""+datanodeUrl+"\">"+files[i].getPath().getName()+"</a>";
+          cols[0] = "<a href=\""+datanodeUrl+"\">"+localname+"</a>";
           cols[5] = FsShell.dateForm.format(new Date((files[i].getModificationTime())));
           cols[6] = files[i].getPermission().toString();
           cols[7] = files[i].getOwner();



Mime
View raw message