hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cmcc...@apache.org
Subject svn commit: r1539253 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/java/org/apache/hadoop/fs/s3/ src/main/java/org/apache/hadoop/fs/s3native/
Date Wed, 06 Nov 2013 07:57:02 GMT
Author: cmccabe
Date: Wed Nov  6 07:57:01 2013
New Revision: 1539253

URL: http://svn.apache.org/r1539253
Log:
HADOOP-9623. Update jet3t dependency to 0.9.0 (Amandeep Khurana via Colin Patrick McCabe)

Modified:
    hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1539253&r1=1539252&r2=1539253&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Nov  6 07:57:01
2013
@@ -105,6 +105,9 @@ Trunk (Unreleased)
 
     HADOOP-9833 move slf4j to version 1.7.5 (Kousuke Saruta via stevel)
 
+    HADOOP-9623 Update jets3t dependency to 0.9.0.  (Amandeep Khurana via Colin
+    Patrick McCabe)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java?rev=1539253&r1=1539252&r2=1539253&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
Wed Nov  6 07:57:01 2013
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.s3.INode.FileType;
 import org.jets3t.service.S3Service;
 import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
 import org.jets3t.service.impl.rest.httpclient.RestS3Service;
 import org.jets3t.service.model.S3Bucket;
 import org.jets3t.service.model.S3Object;
@@ -60,8 +61,8 @@ class Jets3tFileSystemStore implements F
   private static final String FILE_SYSTEM_VERSION_NAME = "fs-version";
   private static final String FILE_SYSTEM_VERSION_VALUE = "1";
   
-  private static final Map<String, String> METADATA =
-    new HashMap<String, String>();
+  private static final Map<String, Object> METADATA =
+    new HashMap<String, Object>();
   
   static {
     METADATA.put(FILE_SYSTEM_NAME, FILE_SYSTEM_VALUE);
@@ -165,7 +166,7 @@ class Jets3tFileSystemStore implements F
       throws IOException {
     
     try {
-      S3Object object = s3Service.getObject(bucket, key);
+      S3Object object = s3Service.getObject(bucket.getName(), key);
       if (checkMetadata) {
         checkMetadata(object);
       }
@@ -178,6 +179,9 @@ class Jets3tFileSystemStore implements F
         throw (IOException) e.getCause();
       }
       throw new S3Exception(e);
+    } catch (ServiceException e) {
+      handleServiceException(e);
+      return null;
     }
   }
 
@@ -194,6 +198,9 @@ class Jets3tFileSystemStore implements F
         throw (IOException) e.getCause();
       }
       throw new S3Exception(e);
+    } catch (ServiceException e) {
+      handleServiceException(e);
+      return null;
     }
   }
 
@@ -276,7 +283,7 @@ class Jets3tFileSystemStore implements F
       if (!prefix.endsWith(PATH_DELIMITER)) {
         prefix += PATH_DELIMITER;
       }
-      S3Object[] objects = s3Service.listObjects(bucket, prefix, PATH_DELIMITER);
+      S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, PATH_DELIMITER);
       Set<Path> prefixes = new TreeSet<Path>();
       for (int i = 0; i < objects.length; i++) {
         prefixes.add(keyToPath(objects[i].getKey()));
@@ -298,7 +305,7 @@ class Jets3tFileSystemStore implements F
       if (!prefix.endsWith(PATH_DELIMITER)) {
         prefix += PATH_DELIMITER;
       }
-      S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
+      S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null);
       Set<Path> prefixes = new TreeSet<Path>();
       for (int i = 0; i < objects.length; i++) {
         prefixes.add(keyToPath(objects[i].getKey()));
@@ -385,7 +392,7 @@ class Jets3tFileSystemStore implements F
   @Override
   public void purge() throws IOException {
     try {
-      S3Object[] objects = s3Service.listObjects(bucket);
+      S3Object[] objects = s3Service.listObjects(bucket.getName());
       for (int i = 0; i < objects.length; i++) {
         s3Service.deleteObject(bucket, objects[i].getKey());
       }
@@ -402,7 +409,7 @@ class Jets3tFileSystemStore implements F
     StringBuilder sb = new StringBuilder("S3 Filesystem, ");
     sb.append(bucket.getName()).append("\n");
     try {
-      S3Object[] objects = s3Service.listObjects(bucket, PATH_DELIMITER, null);
+      S3Object[] objects = s3Service.listObjects(bucket.getName(), PATH_DELIMITER, null);
       for (int i = 0; i < objects.length; i++) {
         Path path = keyToPath(objects[i].getKey());
         sb.append(path).append("\n");
@@ -424,4 +431,15 @@ class Jets3tFileSystemStore implements F
     System.out.println(sb);
   }
 
+  private void handleServiceException(ServiceException e) throws IOException {
+      if (e.getCause() instanceof IOException) {
+        throw (IOException) e.getCause();
+      }
+      else {
+        if(LOG.isDebugEnabled()) {
+          LOG.debug("Got ServiceException with Error code: " + e.getErrorCode() + ";and Error
message: " + e.getErrorMessage());
+        }
+      }
+    }
+
 }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java?rev=1539253&r1=1539252&r2=1539253&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3/MigrationTool.java
Wed Nov  6 07:57:01 2013
@@ -34,6 +34,7 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.jets3t.service.S3Service;
 import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
 import org.jets3t.service.impl.rest.httpclient.RestS3Service;
 import org.jets3t.service.model.S3Bucket;
 import org.jets3t.service.model.S3Object;
@@ -177,7 +178,7 @@ public class MigrationTool extends Confi
   
   private S3Object get(String key) {
     try {
-      return s3Service.getObject(bucket, key);
+      return s3Service.getObject(bucket.getName(), key);
     } catch (S3ServiceException e) {
       if ("NoSuchKey".equals(e.getS3ErrorCode())) {
         return null;
@@ -200,7 +201,7 @@ public class MigrationTool extends Confi
     public Set<Path> listAllPaths() throws IOException {
       try {
         String prefix = urlEncode(Path.SEPARATOR);
-        S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
+        S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null);
         Set<Path> prefixes = new TreeSet<Path>();
         for (int i = 0; i < objects.length; i++) {
           prefixes.add(keyToPath(objects[i].getKey()));
@@ -237,7 +238,7 @@ public class MigrationTool extends Confi
 
     private InputStream get(String key) throws IOException {
       try {
-        S3Object object = s3Service.getObject(bucket, key);
+        S3Object object = s3Service.getObject(bucket.getName(), key);
         return object.getDataInputStream();
       } catch (S3ServiceException e) {
         if ("NoSuchKey".equals(e.getS3ErrorCode())) {
@@ -247,6 +248,8 @@ public class MigrationTool extends Confi
           throw (IOException) e.getCause();
         }
         throw new S3Exception(e);
+      } catch (ServiceException e) {
+        return null;
       }
     }
     

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java?rev=1539253&r1=1539252&r2=1539253&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/Jets3tNativeFileSystemStore.java
Wed Nov  6 07:57:01 2013
@@ -29,17 +29,21 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.URI;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.s3.S3Credentials;
 import org.apache.hadoop.fs.s3.S3Exception;
-import org.jets3t.service.S3ObjectsChunk;
 import org.jets3t.service.S3Service;
 import org.jets3t.service.S3ServiceException;
+import org.jets3t.service.ServiceException;
+import org.jets3t.service.StorageObjectsChunk;
 import org.jets3t.service.impl.rest.httpclient.RestS3Service;
 import org.jets3t.service.model.S3Bucket;
 import org.jets3t.service.model.S3Object;
+import org.jets3t.service.model.StorageObject;
 import org.jets3t.service.security.AWSCredentials;
 
 @InterfaceAudience.Private
@@ -48,7 +52,9 @@ class Jets3tNativeFileSystemStore implem
   
   private S3Service s3Service;
   private S3Bucket bucket;
-  
+  public static final Log LOG =
+      LogFactory.getLog(Jets3tNativeFileSystemStore.class);
+
   @Override
   public void initialize(URI uri, Configuration conf) throws IOException {
     S3Credentials s3Credentials = new S3Credentials();
@@ -59,7 +65,7 @@ class Jets3tNativeFileSystemStore implem
             s3Credentials.getSecretAccessKey());
       this.s3Service = new RestS3Service(awsCredentials);
     } catch (S3ServiceException e) {
-      handleServiceException(e);
+      handleS3ServiceException(e);
     }
     bucket = new S3Bucket(uri.getHost());
   }
@@ -80,7 +86,7 @@ class Jets3tNativeFileSystemStore implem
       }
       s3Service.putObject(bucket, object);
     } catch (S3ServiceException e) {
-      handleServiceException(e);
+      handleS3ServiceException(e);
     } finally {
       if (in != null) {
         try {
@@ -101,53 +107,85 @@ class Jets3tNativeFileSystemStore implem
       object.setContentLength(0);
       s3Service.putObject(bucket, object);
     } catch (S3ServiceException e) {
-      handleServiceException(e);
+      handleS3ServiceException(e);
     }
   }
   
   @Override
   public FileMetadata retrieveMetadata(String key) throws IOException {
     try {
-      S3Object object = s3Service.getObjectDetails(bucket, key);
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Getting metadata for key: " + key + " from bucket:" + bucket.getName());
+      }
+      S3Object object = s3Service.getObject(bucket.getName(), key);
       return new FileMetadata(key, object.getContentLength(),
           object.getLastModifiedDate().getTime());
     } catch (S3ServiceException e) {
       // Following is brittle. Is there a better way?
-      if (e.getMessage().contains("ResponseCode=404")) {
-        return null;
+      if (e.getS3ErrorCode().matches("NoSuchKey")) {
+        return null; //return null if key not found
       }
-      handleServiceException(e);
+      handleS3ServiceException(e);
       return null; //never returned - keep compiler happy
     }
   }
-  
+
+  /**
+   * @param key
+   * The key is the object name that is being retrieved from the S3 bucket
+   * @return
+   * This method returns null if the key is not found
+   * @throws IOException
+   */
+
   @Override
   public InputStream retrieve(String key) throws IOException {
     try {
-      S3Object object = s3Service.getObject(bucket, key);
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Getting key: " + key + " from bucket:" + bucket.getName());
+      }
+      S3Object object = s3Service.getObject(bucket.getName(), key);
       return object.getDataInputStream();
     } catch (S3ServiceException e) {
-      handleServiceException(key, e);
+      handleS3ServiceException(key, e);
       return null; //never returned - keep compiler happy
+    } catch (ServiceException e) {
+      handleServiceException(e);
+      return null; //return null if key not found
     }
   }
-  
+
+  /**
+   *
+   * @param key
+   * The key is the object name that is being retrieved from the S3 bucket
+   * @return
+   * This method returns null if the key is not found
+   * @throws IOException
+   */
+
   @Override
   public InputStream retrieve(String key, long byteRangeStart)
-    throws IOException {
+          throws IOException {
     try {
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Getting key: " + key + " from bucket:" + bucket.getName() + " with byteRangeStart:
" + byteRangeStart);
+      }
       S3Object object = s3Service.getObject(bucket, key, null, null, null,
                                             null, byteRangeStart, null);
       return object.getDataInputStream();
     } catch (S3ServiceException e) {
-      handleServiceException(key, e);
+      handleS3ServiceException(key, e);
       return null; //never returned - keep compiler happy
+    } catch (ServiceException e) {
+      handleServiceException(e);
+      return null; //return null if key not found
     }
   }
 
   @Override
   public PartialListing list(String prefix, int maxListingLength)
-    throws IOException {
+          throws IOException {
     return list(prefix, maxListingLength, null, false);
   }
   
@@ -158,6 +196,13 @@ class Jets3tNativeFileSystemStore implem
     return list(prefix, recurse ? null : PATH_DELIMITER, maxListingLength, priorLastKey);
   }
 
+  /**
+   *
+   * @return
+   * This method returns null if the list could not be populated
+   * due to S3 giving ServiceException
+   * @throws IOException
+   */
 
   private PartialListing list(String prefix, String delimiter,
       int maxListingLength, String priorLastKey) throws IOException {
@@ -165,52 +210,63 @@ class Jets3tNativeFileSystemStore implem
       if (prefix.length() > 0 && !prefix.endsWith(PATH_DELIMITER)) {
         prefix += PATH_DELIMITER;
       }
-      S3ObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(),
+      StorageObjectsChunk chunk = s3Service.listObjectsChunked(bucket.getName(),
           prefix, delimiter, maxListingLength, priorLastKey);
       
       FileMetadata[] fileMetadata =
         new FileMetadata[chunk.getObjects().length];
       for (int i = 0; i < fileMetadata.length; i++) {
-        S3Object object = chunk.getObjects()[i];
+        StorageObject object = chunk.getObjects()[i];
         fileMetadata[i] = new FileMetadata(object.getKey(),
             object.getContentLength(), object.getLastModifiedDate().getTime());
       }
       return new PartialListing(chunk.getPriorLastKey(), fileMetadata,
           chunk.getCommonPrefixes());
     } catch (S3ServiceException e) {
-      handleServiceException(e);
+      handleS3ServiceException(e);
       return null; //never returned - keep compiler happy
+    } catch (ServiceException e) {
+      handleServiceException(e);
+      return null; //return null if list could not be populated
     }
   }
 
   @Override
   public void delete(String key) throws IOException {
     try {
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Deleting key:" + key + "from bucket" + bucket.getName());
+      }
       s3Service.deleteObject(bucket, key);
     } catch (S3ServiceException e) {
-      handleServiceException(key, e);
+      handleS3ServiceException(key, e);
     }
   }
   
   @Override
   public void copy(String srcKey, String dstKey) throws IOException {
     try {
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Copying srcKey: " + srcKey + "to dstKey: " + dstKey + "in bucket: " +
bucket.getName());
+      }
       s3Service.copyObject(bucket.getName(), srcKey, bucket.getName(),
           new S3Object(dstKey), false);
     } catch (S3ServiceException e) {
-      handleServiceException(srcKey, e);
+      handleS3ServiceException(srcKey, e);
+    } catch (ServiceException e) {
+      handleServiceException(e);
     }
   }
 
   @Override
   public void purge(String prefix) throws IOException {
     try {
-      S3Object[] objects = s3Service.listObjects(bucket, prefix, null);
+      S3Object[] objects = s3Service.listObjects(bucket.getName(), prefix, null);
       for (S3Object object : objects) {
         s3Service.deleteObject(bucket, object.getKey());
       }
     } catch (S3ServiceException e) {
-      handleServiceException(e);
+      handleS3ServiceException(e);
     }
   }
 
@@ -219,30 +275,44 @@ class Jets3tNativeFileSystemStore implem
     StringBuilder sb = new StringBuilder("S3 Native Filesystem, ");
     sb.append(bucket.getName()).append("\n");
     try {
-      S3Object[] objects = s3Service.listObjects(bucket);
+      S3Object[] objects = s3Service.listObjects(bucket.getName());
       for (S3Object object : objects) {
         sb.append(object.getKey()).append("\n");
       }
     } catch (S3ServiceException e) {
-      handleServiceException(e);
+      handleS3ServiceException(e);
     }
     System.out.println(sb);
   }
 
-  private void handleServiceException(String key, S3ServiceException e) throws IOException
{
+  private void handleS3ServiceException(String key, S3ServiceException e) throws IOException
{
     if ("NoSuchKey".equals(e.getS3ErrorCode())) {
       throw new FileNotFoundException("Key '" + key + "' does not exist in S3");
     } else {
-      handleServiceException(e);
+      handleS3ServiceException(e);
     }
   }
 
-  private void handleServiceException(S3ServiceException e) throws IOException {
+  private void handleS3ServiceException(S3ServiceException e) throws IOException {
     if (e.getCause() instanceof IOException) {
       throw (IOException) e.getCause();
     }
     else {
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("S3 Error code: " + e.getS3ErrorCode() + "; S3 Error message: " + e.getS3ErrorMessage());
+      }
       throw new S3Exception(e);
     }
   }
+
+  private void handleServiceException(ServiceException e) throws IOException {
+    if (e.getCause() instanceof IOException) {
+      throw (IOException) e.getCause();
+    }
+    else {
+      if(LOG.isDebugEnabled()) {
+        LOG.debug("Got ServiceException with Error code: " + e.getErrorCode() + ";and Error
message: " + e.getErrorMessage());
+      }
+    }
+  }
 }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java?rev=1539253&r1=1539252&r2=1539253&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java
Wed Nov  6 07:57:01 2013
@@ -273,7 +273,7 @@ public class NativeS3FileSystem extends 
     setConf(conf);
     this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority());
     this.workingDir =
-      new Path("/user", System.getProperty("user.name")).makeQualified(this);
+      new Path("/user", System.getProperty("user.name")).makeQualified(this.uri, this.getWorkingDirectory());
   }
   
   private static NativeFileSystemStore createDefaultStore(Configuration conf) {
@@ -511,11 +511,11 @@ public class NativeS3FileSystem extends 
   
   private FileStatus newFile(FileMetadata meta, Path path) {
     return new FileStatus(meta.getLength(), false, 1, getDefaultBlockSize(),
-        meta.getLastModified(), path.makeQualified(this));
+        meta.getLastModified(), path.makeQualified(this.getUri(), this.getWorkingDirectory()));
   }
   
   private FileStatus newDirectory(Path path) {
-    return new FileStatus(0, true, 1, 0, 0, path.makeQualified(this));
+    return new FileStatus(0, true, 1, 0, 0, path.makeQualified(this.getUri(), this.getWorkingDirectory()));
   }
 
   @Override



Mime
View raw message