hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From maha...@apache.org
Subject svn commit: r722572 - in /hadoop/core/branches/branch-0.18: CHANGES.txt src/contrib/fuse-dfs/src/fuse_dfs.c src/contrib/fuse-dfs/src/test/TestFuseDFS.java
Date Tue, 02 Dec 2008 19:08:10 GMT
Author: mahadev
Date: Tue Dec  2 11:08:10 2008
New Revision: 722572

URL: http://svn.apache.org/viewvc?rev=722572&view=rev
Log:
HADOOP-4635. Fix a memory leak in fuse dfs. (pete wyckoff via mahadev)

Modified:
    hadoop/core/branches/branch-0.18/CHANGES.txt
    hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/fuse_dfs.c
    hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/test/TestFuseDFS.java

Modified: hadoop/core/branches/branch-0.18/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/CHANGES.txt?rev=722572&r1=722571&r2=722572&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.18/CHANGES.txt Tue Dec  2 11:08:10 2008
@@ -54,6 +54,8 @@
 
     HADOOP-4713. Fix librecordio to handle records larger than 64k. (Christian
     Kunz via cdouglas)
+   
+   HADOOP-4635. Fix a memory leak in fuse dfs. (pete wyckoff via mahadev)
 
 Release 0.18.2 - 2008-11-03
 

Modified: hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/fuse_dfs.c
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/fuse_dfs.c?rev=722572&r1=722571&r2=722572&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/fuse_dfs.c (original)
+++ hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/fuse_dfs.c Tue Dec  2 11:08:10
2008
@@ -618,6 +618,8 @@
   assert(user != NULL);
   groupnames[i] = user;
 
+  // increment num_groups to include the user being added to the group list
+  *num_groups = *num_groups + 1;
 #else
 
   int i = 0;
@@ -1000,7 +1002,7 @@
   st->f_blocks  =  cap/bsize;
 
   st->f_bfree   =  (cap-used)/bsize;
-  st->f_bavail  =  cap/bsize;
+  st->f_bavail  =  (cap-used)/bsize;
 
   st->f_files   =  1000;
   st->f_ffree   =  500;

Modified: hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/test/TestFuseDFS.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/test/TestFuseDFS.java?rev=722572&r1=722571&r2=722572&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/test/TestFuseDFS.java (original)
+++ hadoop/core/branches/branch-0.18/src/contrib/fuse-dfs/src/test/TestFuseDFS.java Tue Dec
 2 11:08:10 2008
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-import org.apache.hadoop.dfs.*;
+import org.apache.hadoop.hdfs.*;
 import junit.framework.TestCase;
 import java.io.*;
 import org.apache.hadoop.conf.Configuration;
@@ -58,7 +58,7 @@
     System.err.println("LD_LIBRARY_PATH=" + lp);
     String cmd[] =  {  fuse_cmd, "dfs://" + dfs.getHost() + ":" + String.valueOf(dfs.getPort()),

                        mountpoint, "-obig_writes", "-odebug", "-oentry_timeout=1",  "-oattribute_timeout=1",
"-ousetrash", "rw", "-oinitchecks",
-                       "-ordbuffer=5000"};
+                       "-ordbuffer=32768"};
     final String [] envp = {
       "CLASSPATH="+  cp,
       "LD_LIBRARY_PATH=" + lp,
@@ -271,23 +271,9 @@
 
       DistributedFileSystem.DiskStatus d = fileSys.getDiskStatus();
 
-      System.err.println("DEBUG:f.total=" + f.getTotalSpace());
-      System.err.println("DEBUG:d.capacity=" + d.getCapacity());
-
-      System.err.println("DEBUG:f.usable=" + f.getUsableSpace());
-
-      System.err.println("DEBUG:f.free=" + f.getFreeSpace());
-      System.err.println("DEBUG:d.remaining = " + d.getRemaining());
-
-      System.err.println("DEBUG:d.used = " + d.getDfsUsed());
-      System.err.println("DEBUG:f.total - f.free = " + (f.getTotalSpace() - f.getFreeSpace()));
-
       long fileUsedBlocks =  (f.getTotalSpace() - f.getFreeSpace())/(64 * 1024 * 1024);
       long dfsUsedBlocks = (long)Math.ceil((double)d.getDfsUsed()/(64 * 1024 * 1024));
-      System.err.println("DEBUG: fileUsedBlocks = " + fileUsedBlocks);
-      System.err.println("DEBUG: dfsUsedBlocks =  " + dfsUsedBlocks);
 
-      assertTrue(f.getTotalSpace() == f.getUsableSpace());
       assertTrue(fileUsedBlocks == dfsUsedBlocks);
       assertTrue(d.getCapacity() == f.getTotalSpace());
 



Mime
View raw message