hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r494137 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java src/java/org/apache/hadoop/fs/s3/S3FileSystem.java src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java
Date Mon, 08 Jan 2007 18:21:22 GMT
Author: cutting
Date: Mon Jan  8 10:21:21 2007
New Revision: 494137

URL: http://svn.apache.org/viewvc?view=rev&rev=494137
Log:
HADOOP-857.  Fix S3 FileSystem implementation to permit its use for MapReduce input and output.
 Contributed by Tom White.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3FileSystem.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=494137&r1=494136&r2=494137
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Jan  8 10:21:21 2007
@@ -1,6 +1,12 @@
 Hadoop Change Log
 
 
+Trunk (unreleased changes)
+
+ 1. HADOOP-857.  Fix S3 FileSystem implementation to permit its use
+    for MapReduce input and output.  (Tom White via cutting)
+
+
 Release 0.10.0 - 2007-01-05
 
  1. HADOOP-763. Change DFS namenode benchmark to not use MapReduce.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java?view=diff&rev=494137&r1=494136&r2=494137
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java Mon Jan
 8 10:21:21 2007
@@ -216,7 +216,7 @@
     if (!path.isAbsolute()) {
       throw new IllegalArgumentException("Path must be absolute: " + path);
     }
-    return urlEncode(path.toString());
+    return urlEncode(path.toUri().getPath());
   }
 
   private Path keyToPath(String key) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3FileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3FileSystem.java?view=diff&rev=494137&r1=494136&r2=494137
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3FileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/S3FileSystem.java Mon Jan  8 10:21:21
2007
@@ -83,12 +83,12 @@
     Path absolutePath = makeAbsolute(path);
     INode inode = store.getINode(absolutePath);
     if (inode == null) {
-      store.storeINode(path, INode.DIRECTORY_INODE);
+      store.storeINode(absolutePath, INode.DIRECTORY_INODE);
     } else if (inode.isFile()) {
       throw new IOException(String.format(
-          "Can't make directory for path %s since it is a file.", path));
+          "Can't make directory for path %s since it is a file.", absolutePath));
     }
-    Path parent = path.getParent();
+    Path parent = absolutePath.getParent();
     return (parent == null || mkdirs(parent));
   }
 
@@ -123,13 +123,14 @@
 
   @Override
   public Path[] listPathsRaw(Path path) throws IOException {
-    INode inode = store.getINode(makeAbsolute(path));
+    Path absolutePath = makeAbsolute(path);
+    INode inode = store.getINode(absolutePath);
     if (inode == null) {
       return null;
     } else if (inode.isFile()) {
-      return new Path[] { path };
+      return new Path[] { absolutePath };
     } else { // directory
-      Set<Path> paths = store.listSubPaths(path);
+      Set<Path> paths = store.listSubPaths(absolutePath);
       return paths.toArray(new Path[0]);
     }
   }
@@ -146,10 +147,6 @@
       short replication, long blockSize, Progressable progress)
       throws IOException {
 
-    if (!isDirectory(file.getParent())) {
-      throw new IOException("Cannot create file " + file
-          + " since parent directory does not exist.");
-    }
     INode inode = store.getINode(makeAbsolute(file));
     if (inode != null) {
       if (overwrite) {
@@ -157,6 +154,13 @@
       } else {
         throw new IOException("File already exists: " + file);
       }
+    } else {
+      Path parent = file.getParent();
+      if (parent != null) {
+        if (!mkdirs(parent)) {
+          throw new IOException("Mkdirs failed to create " + parent.toString());
+        }
+      }      
     }
     return new S3OutputStream(getConf(), store, makeAbsolute(file),
         blockSize, progress);

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java?view=diff&rev=494137&r1=494136&r2=494137
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java Mon Jan
 8 10:21:21 2007
@@ -189,15 +189,17 @@
     
   }
 
-  public void testWriteInNonExistentDirectory() {
+  public void testWriteInNonExistentDirectory() throws IOException {
     Path path = new Path("/test/hadoop/file");    
-    try {
-      s3FileSystem.createRaw(path, false, (short) 1, 128);
-      fail("Should throw IOException.");
-    } catch (IOException e) {
-      // Expected
-    }
+    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
+    out.write(data, 0, BLOCK_SIZE);
+    out.close();
+    
+    assertTrue("Exists", s3FileSystem.exists(path));
+    assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
+    assertTrue("Parent exists", s3FileSystem.exists(path.getParent()));
   }
+
 
   public void testRename() throws Exception {
     int len = BLOCK_SIZE;



Mime
View raw message