hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject svn commit: r648417 - in /hadoop/core/trunk: ./ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/permission/ src/java/org/apache/hadoop/ipc/ src/test/org/apache/hadoop/dfs/
Date Tue, 15 Apr 2008 21:29:06 GMT
Author: shv
Date: Tue Apr 15 14:29:03 2008
New Revision: 648417

URL: http://svn.apache.org/viewvc?rev=648417&view=rev
Log:
HADOOP-3225. Unwrapping methods of RemoteException should initialize detailedMassage field.
Contributed by Mahadev Konar, Konstantin Shvachko, Chris Douglas.

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/java/org/apache/hadoop/fs/FsShell.java
    hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/AccessControlException.java
    hadoop/core/trunk/src/java/org/apache/hadoop/ipc/RemoteException.java
    hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestCheckpoint.java
    hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=648417&r1=648416&r2=648417&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Apr 15 14:29:03 2008
@@ -595,6 +595,9 @@
     HADOOP-3229. Report progress when collecting records from the mapper and
     the combiner. (Doug Cutting via cdouglas)
 
+    HADOOP-3225. Unwrapping methods of RemoteException should initialize
+    detailedMassage field. (Mahadev Konar, shv, cdouglas)
+
 Release 0.16.3 - 2008-04-16
 
   BUG FIXES

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/fs/FsShell.java?rev=648417&r1=648416&r2=648417&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/fs/FsShell.java Tue Apr 15 14:29:03 2008
@@ -915,8 +915,7 @@
         try {
           String[] content;
           content = e.getLocalizedMessage().split("\n");
-          System.err.println(cmd.substring(1) + ": " +
-                             content[0]);
+          System.err.println(cmd.substring(1) + ": " + content[0]);
         } catch (Exception ex) {
           System.err.println(cmd.substring(1) + ": " +
                              ex.getLocalizedMessage());
@@ -1563,8 +1562,12 @@
         // IO exception encountered locally.
         //
         exitCode = -1;
+        String content = e.getLocalizedMessage();
+        if (content != null) {
+          content = content.split("\n")[0];
+        }
         System.err.println(cmd.substring(1) + ": " +
-                           e.getLocalizedMessage());
+                          content);
       }
     }
     return exitCode;

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/AccessControlException.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/AccessControlException.java?rev=648417&r1=648416&r2=648417&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/AccessControlException.java
(original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/fs/permission/AccessControlException.java
Tue Apr 15 14:29:03 2008
@@ -28,7 +28,9 @@
    * Default constructor is needed for unwrapping from 
    * {@link org.apache.hadoop.ipc.RemoteException}.
    */
-  public AccessControlException() {}
+  public AccessControlException() {
+    super("Permission denied.");
+  }
 
   /**
    * Constructs an {@link AccessControlException}

Modified: hadoop/core/trunk/src/java/org/apache/hadoop/ipc/RemoteException.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/java/org/apache/hadoop/ipc/RemoteException.java?rev=648417&r1=648416&r2=648417&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/ipc/RemoteException.java (original)
+++ hadoop/core/trunk/src/java/org/apache/hadoop/ipc/RemoteException.java Tue Apr 15 14:29:03
2008
@@ -19,6 +19,7 @@
 package org.apache.hadoop.ipc;
 
 import java.io.IOException;
+import java.lang.reflect.Constructor;
 
 public class RemoteException extends IOException {
   private String className;
@@ -36,50 +37,53 @@
    * If this remote exception wraps up one of the lookupTypes
    * then return this exception.
    * <p>
-   * Unwraps any IOException that has a default constructor.
+   * Unwraps any IOException.
    * 
    * @param lookupTypes the desired exception class.
    * @return IOException, which is either the lookupClass exception or this.
    */
-  public IOException unwrapRemoteException(Class... lookupTypes) {
+  public IOException unwrapRemoteException(Class<?>... lookupTypes) {
     if(lookupTypes == null)
       return this;
-    for(Class lookupClass : lookupTypes) {
-      if(!IOException.class.isAssignableFrom(lookupClass))
+    for(Class<?> lookupClass : lookupTypes) {
+      if(!lookupClass.getName().equals(getClassName()))
         continue;
-      if(lookupClass.getName().equals(getClassName())) {
-        try {
-          IOException ex = (IOException)lookupClass.newInstance();
-          ex.initCause(this);
-          return ex;
-        } catch(Exception e) {
-          // cannot instantiate lookupClass, just return this
-          return this;
-        }
-      } 
+      try {
+        return instantiateException(lookupClass.asSubclass(IOException.class));
+      } catch(Exception e) {
+        // cannot instantiate lookupClass, just return this
+        return this;
+      }
     }
+    // wrapped up exception is not in lookupTypes, just return this
     return this;
   }
 
   /**
-   * If this remote exception wraps an IOException that has a default
-   * contructor then instantiate and return the original exception.
-   * Otherwise return this.
+   * Instantiate and return the exception wrapped up by this remote exception.
+   * 
+   * <p> This unwraps any <code>Throwable</code> that has a constructor
taking
+   * a <code>String</code> as a parameter.
+   * Otherwise it returns this.
    * 
-   * @return IOException
+   * @return <code>Throwable
    */
   public IOException unwrapRemoteException() {
-    IOException ex;
     try {
-      Class realClass = Class.forName(getClassName());
-      if(!IOException.class.isAssignableFrom(realClass))
-        return this;
-      ex = (IOException)realClass.newInstance();
-      ex.initCause(this);
-      return ex;
+      Class<?> realClass = Class.forName(getClassName());
+      return instantiateException(realClass.asSubclass(IOException.class));
     } catch(Exception e) {
-      // cannot instantiate the original exception, just throw this
+      // cannot instantiate the original exception, just return this
     }
     return this;
+  }
+
+  private IOException instantiateException(Class<? extends IOException> cls)
+      throws Exception {
+    Constructor<? extends IOException> cn = cls.getConstructor(String.class);
+    cn.setAccessible(true);
+    IOException ex = cn.newInstance(this.getMessage());
+    ex.initCause(this);
+    return ex;
   }
 }

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestCheckpoint.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestCheckpoint.java?rev=648417&r1=648416&r2=648417&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestCheckpoint.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestCheckpoint.java Tue Apr 15 14:29:03
2008
@@ -485,6 +485,7 @@
     nn.stop();
 
     // recover failed checkpoint
+    conf.set("dfs.http.address", "0.0.0.0:0");  
     conf.set("dfs.name.dir", primaryDirs);
     args = new String[]{};
     nn = NameNode.createNameNode(args, conf);
@@ -493,6 +494,7 @@
       Storage.rename(new File(dir, "current"), 
                      new File(dir, "lastcheckpoint.tmp"));
     }
+    conf.set("dfs.secondary.http.address", "0.0.0.0:0");
     secondary = new SecondaryNameNode(conf);
     secondary.shutdown();
     for(File dir : secondaryDirs) {
@@ -505,6 +507,7 @@
       Storage.rename(new File(dir, "previous.checkpoint"), 
                      new File(dir, "lastcheckpoint.tmp"));
     }
+    conf.set("dfs.secondary.http.address", "0.0.0.0:0");
     secondary = new SecondaryNameNode(conf);
     secondary.shutdown();
     for(File dir : secondaryDirs) {

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java?rev=648417&r1=648416&r2=648417&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java Tue Apr 15 14:29:03
2008
@@ -25,7 +25,9 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.fs.shell.*;
+import org.apache.hadoop.security.UnixUserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -1095,6 +1097,44 @@
     String run(int exitcode, String... options) throws IOException;
   }
 
+  public void testRemoteException() throws Exception {
+    UnixUserGroupInformation tmpUGI = new UnixUserGroupInformation("tmpname",
+        new String[] {
+        "mygroup"});
+    MiniDFSCluster dfs = null;
+    PrintStream bak = null;
+    try {
+      Configuration conf = new Configuration();
+      dfs = new MiniDFSCluster(conf, 2, true, null);
+      FileSystem fs = dfs.getFileSystem();
+      Path p = new Path("/foo");
+      fs.mkdirs(p);
+      fs.setPermission(p, new FsPermission((short)0700));
+      UnixUserGroupInformation.saveToConf(conf,
+          UnixUserGroupInformation.UGI_PROPERTY_NAME, tmpUGI);
+      FsShell fshell = new FsShell(conf);
+      bak = System.err;
+      ByteArrayOutputStream out = new ByteArrayOutputStream();
+      PrintStream tmp = new PrintStream(out);
+      System.setErr(tmp);
+      String[] args = new String[2];
+      args[0] = "-ls";
+      args[1] = "/foo";
+      int ret = ToolRunner.run(fshell, args);
+      assertTrue("returned should be -1", (ret == -1));
+      String str = out.toString();
+      assertTrue("permission denied printed", str.indexOf("Permission denied") != -1);
+      out.reset();
+    } finally {
+      if (bak != null) {
+        System.setErr(bak);
+      }
+      if (dfs != null) {
+        dfs.shutdown();
+      }
+    }
+  }
+  
   public void testGet() throws IOException {
     DFSTestUtil.setLogLevel2All(FSInputChecker.LOG);
     final Configuration conf = new Configuration();



Mime
View raw message