You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ma...@apache.org on 2008/12/02 20:05:52 UTC
svn commit: r722571 - in /hadoop/core/branches/branch-0.19: CHANGES.txt
src/contrib/fuse-dfs/src/fuse_dfs.c
src/contrib/fuse-dfs/src/test/TestFuseDFS.java
Author: mahadev
Date: Tue Dec 2 11:05:52 2008
New Revision: 722571
URL: http://svn.apache.org/viewvc?rev=722571&view=rev
Log:
HADOOP-4635. Fix a memory leak in fuse dfs. (pete wyckoff via mahadev)
Modified:
hadoop/core/branches/branch-0.19/CHANGES.txt
hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/fuse_dfs.c
hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/test/TestFuseDFS.java
Modified: hadoop/core/branches/branch-0.19/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/CHANGES.txt?rev=722571&r1=722570&r2=722571&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.19/CHANGES.txt Tue Dec 2 11:05:52 2008
@@ -1039,6 +1039,8 @@
HADOOP-4713. Fix librecordio to handle records larger than 64k. (Christian
Kunz via cdouglas)
+
+ HADOOP-4635. Fix a memory leak in fuse dfs. (pete wyckoff via mahadev)
Release 0.18.2 - 2008-11-03
Modified: hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/fuse_dfs.c
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/fuse_dfs.c?rev=722571&r1=722570&r2=722571&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/fuse_dfs.c (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/fuse_dfs.c Tue Dec 2 11:05:52 2008
@@ -618,6 +618,8 @@
assert(user != NULL);
groupnames[i] = user;
+ // increment num_groups to include the user being added to the group list
+ *num_groups = *num_groups + 1;
#else
int i = 0;
@@ -1000,7 +1002,7 @@
st->f_blocks = cap/bsize;
st->f_bfree = (cap-used)/bsize;
- st->f_bavail = cap/bsize;
+ st->f_bavail = (cap-used)/bsize;
st->f_files = 1000;
st->f_ffree = 500;
Modified: hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/test/TestFuseDFS.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/test/TestFuseDFS.java?rev=722571&r1=722570&r2=722571&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/test/TestFuseDFS.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/fuse-dfs/src/test/TestFuseDFS.java Tue Dec 2 11:05:52 2008
@@ -271,23 +271,9 @@
DistributedFileSystem.DiskStatus d = fileSys.getDiskStatus();
- System.err.println("DEBUG:f.total=" + f.getTotalSpace());
- System.err.println("DEBUG:d.capacity=" + d.getCapacity());
-
- System.err.println("DEBUG:f.usable=" + f.getUsableSpace());
-
- System.err.println("DEBUG:f.free=" + f.getFreeSpace());
- System.err.println("DEBUG:d.remaining = " + d.getRemaining());
-
- System.err.println("DEBUG:d.used = " + d.getDfsUsed());
- System.err.println("DEBUG:f.total - f.free = " + (f.getTotalSpace() - f.getFreeSpace()));
-
long fileUsedBlocks = (f.getTotalSpace() - f.getFreeSpace())/(64 * 1024 * 1024);
long dfsUsedBlocks = (long)Math.ceil((double)d.getDfsUsed()/(64 * 1024 * 1024));
- System.err.println("DEBUG: fileUsedBlocks = " + fileUsedBlocks);
- System.err.println("DEBUG: dfsUsedBlocks = " + dfsUsedBlocks);
- assertTrue(f.getTotalSpace() == f.getUsableSpace());
assertTrue(fileUsedBlocks == dfsUsedBlocks);
assertTrue(d.getCapacity() == f.getTotalSpace());