hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hair...@apache.org
Subject svn commit: r725801 - in /hadoop/core/branches/branch-0.18: CHANGES.txt src/test/org/apache/hadoop/dfs/TestDiskError.java
Date Thu, 11 Dec 2008 20:19:22 GMT
Author: hairong
Date: Thu Dec 11 12:19:21 2008
New Revision: 725801

URL: http://svn.apache.org/viewvc?rev=725801&view=rev
Log:
HADOOP-4824. Should not use File.setWritable() in 0.18. Contributed by Hairong Kuang.

Modified:
    hadoop/core/branches/branch-0.18/CHANGES.txt
    hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDiskError.java

Modified: hadoop/core/branches/branch-0.18/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/CHANGES.txt?rev=725801&r1=725800&r2=725801&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.18/CHANGES.txt Thu Dec 11 12:19:21 2008
@@ -97,6 +97,8 @@
     HADOOP-4823. Use SortedMap instead of NavigableMap in 0.18 due to the
     Java 5 requirement.  (szetszwo)
 
+    HADOOP-4824. Should not use File.setWritable() in 0.18. (hairong)
+
 Release 0.18.2 - 2008-11-03
 
   BUG FIXES

Modified: hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDiskError.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDiskError.java?rev=725801&r1=725800&r2=725801&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDiskError.java (original)
+++ hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDiskError.java Thu
Dec 11 12:19:21 2008
@@ -25,6 +25,8 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.dfs.DFSTestUtil;
 import org.apache.hadoop.dfs.FSConstants;
 import org.apache.hadoop.dfs.MiniDFSCluster;
@@ -43,15 +45,21 @@
     cluster.waitActive();
     FileSystem fs = cluster.getFileSystem();
     final int dnIndex = 0;
-    File dataDir = new File(
+
+    FileSystem localFs = FileSystem.getLocal(conf);
+    Path dataDir = new Path(
       System.getProperty("test.build.data", "build/test/data"), "dfs");
-    dataDir = new File(dataDir, "data");
-    File dir1 = new File(new File(dataDir, "data"+(2*dnIndex+1)), "tmp");
-    File dir2 = new File(new File(dataDir, "data"+(2*dnIndex+2)), "tmp");
+    dataDir = new Path(dataDir, "data");
+    Path dir1 = new Path(new Path(dataDir, "data"+(2*dnIndex+1)), "tmp");
+    Path dir2 = new Path(new Path(dataDir, "data"+(2*dnIndex+2)), "tmp");
+    FsPermission oldPerm1 = localFs.getFileStatus(dir1).getPermission();
+    FsPermission oldPerm2 = localFs.getFileStatus(dir2).getPermission();
     try {
       // make the data directory of the first datanode to be readonly
-      assertTrue(dir1.setReadOnly());
-      assertTrue(dir2.setReadOnly());
+      final FsPermission readPermission =
+        new FsPermission(FsAction.READ, FsAction.READ, FsAction.READ);
+      localFs.setPermission(dir1, readPermission);
+      localFs.setPermission(dir2, readPermission);
 
       // create files and make sure that first datanode will be down
       DataNode dn = cluster.getDataNodes().get(dnIndex);
@@ -63,8 +71,8 @@
       }
     } finally {
       // restore its old permission
-      dir1.setWritable(true);
-      dir2.setWritable(true);
+      localFs.setPermission(dir1, oldPerm1);
+      localFs.setPermission(dir2, oldPerm2);
       cluster.shutdown();
     }
   }



Mime
View raw message