hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cmcc...@apache.org
Subject svn commit: r1586790 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
Date Fri, 11 Apr 2014 22:42:49 GMT
Author: cmccabe
Date: Fri Apr 11 22:42:48 2014
New Revision: 1586790

URL: http://svn.apache.org/r1586790
Log:
HDFS-6232. OfflineEditsViewer throws a NPE on edits containing ACL modifications (ajisakaa
via cmccabe)

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1586790&r1=1586789&r2=1586790&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Fri Apr 11 22:42:48 2014
@@ -384,6 +384,9 @@ Release 2.4.1 - UNRELEASED
     HDFS-6229. Race condition in failover can cause RetryCache fail to work.
     (jing9)
 
+    HDFS-6232. OfflineEditsViewer throws a NPE on edits containing ACL
+    modifications (ajisakaa via cmccabe)
+
 Release 2.4.0 - 2014-04-07 
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java?rev=1586790&r1=1586789&r2=1586790&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
Fri Apr 11 22:42:48 2014
@@ -4082,7 +4082,9 @@ public abstract class FSEditLogOp {
       contentHandler.startElement("", "", "ENTRY", new AttributesImpl());
       XMLUtils.addSaxString(contentHandler, "SCOPE", e.getScope().name());
       XMLUtils.addSaxString(contentHandler, "TYPE", e.getType().name());
-      XMLUtils.addSaxString(contentHandler, "NAME", e.getName());
+      if (e.getName() != null) {
+        XMLUtils.addSaxString(contentHandler, "NAME", e.getName());
+      }
       fsActionToXml(contentHandler, e.getPermission());
       contentHandler.endElement("", "", "ENTRY");
     }
@@ -4098,7 +4100,7 @@ public abstract class FSEditLogOp {
       AclEntry e = new AclEntry.Builder()
         .setScope(AclEntryScope.valueOf(s.getValue("SCOPE")))
         .setType(AclEntryType.valueOf(s.getValue("TYPE")))
-        .setName(s.getValue("NAME"))
+        .setName(s.getValueOrNull("NAME"))
         .setPermission(fsActionFromXml(s)).build();
       aclEntries.add(e);
     }

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java?rev=1586790&r1=1586789&r2=1586790&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
(original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
Fri Apr 11 22:42:48 2014
@@ -32,6 +32,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystem.Statistics;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster.NameNodeInfo;
 import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
@@ -57,7 +60,6 @@ import org.apache.hadoop.hdfs.server.nam
         .ConfiguredFailoverProxyProvider;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
-import org.apache.hadoop.hdfs.web.TestWebHDFSForHA;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.net.NetUtils;
@@ -1104,7 +1106,33 @@ public class DFSTestUtil {
     // OP_REMOVE_CACHE_POOL
     filesystem.removeCachePool("pool1");
     // OP_SET_ACL
-    filesystem.setAcl(pathConcatTarget, Lists.<AclEntry> newArrayList());
+    List<AclEntry> aclEntryList = Lists.newArrayList();
+    aclEntryList.add(
+        new AclEntry.Builder()
+            .setPermission(FsAction.READ_WRITE)
+            .setScope(AclEntryScope.ACCESS)
+            .setType(AclEntryType.USER)
+            .build());
+    aclEntryList.add(
+        new AclEntry.Builder()
+            .setName("user")
+            .setPermission(FsAction.READ_WRITE)
+            .setScope(AclEntryScope.ACCESS)
+            .setType(AclEntryType.USER)
+            .build());
+    aclEntryList.add(
+        new AclEntry.Builder()
+            .setPermission(FsAction.WRITE)
+            .setScope(AclEntryScope.ACCESS)
+            .setType(AclEntryType.GROUP)
+            .build());
+    aclEntryList.add(
+        new AclEntry.Builder()
+            .setPermission(FsAction.NONE)
+            .setScope(AclEntryScope.ACCESS)
+            .setType(AclEntryType.OTHER)
+            .build());
+    filesystem.setAcl(pathConcatTarget, aclEntryList);
   }
 
   public static void abortStream(DFSOutputStream out) throws IOException {



Mime
View raw message