hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tomwh...@apache.org
Subject svn commit: r1076314 - in /hadoop/common/trunk: CHANGES.txt src/java/org/apache/hadoop/io/compress/DefaultCodec.java
Date Wed, 02 Mar 2011 18:01:12 GMT
Author: tomwhite
Date: Wed Mar  2 18:01:11 2011
New Revision: 1076314

URL: http://svn.apache.org/viewvc?rev=1076314&view=rev
Log:
HADOOP-6754. DefaultCodec.createOutputStream() leaks memory. Contributed by Aaron Kimball.

Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1076314&r1=1076313&r2=1076314&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Wed Mar  2 18:01:11 2011
@@ -88,6 +88,9 @@ Trunk (unreleased changes)
     HADOOP-7153. MapWritable violates contract of Map interface for equals()
     and hashCode(). (Nicholas Telford via todd)
 
+    HADOOP-6754. DefaultCodec.createOutputStream() leaks memory.
+    (Aaron Kimball via tomwhite)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java?rev=1076314&r1=1076313&r2=1076314&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/compress/DefaultCodec.java Wed Mar 
2 18:01:11 2011
@@ -19,18 +19,21 @@
 package org.apache.hadoop.io.compress;
 
 import java.io.IOException;
-import java.io.OutputStream;
 import java.io.InputStream;
+import java.io.OutputStream;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.compress.zlib.*;
+import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
 public class DefaultCodec implements Configurable, CompressionCodec {
+  private static final Log LOG = LogFactory.getLog(DefaultCodec.class);
   
   Configuration conf;
 
@@ -44,6 +47,12 @@ public class DefaultCodec implements Con
   
   public CompressionOutputStream createOutputStream(OutputStream out) 
   throws IOException {
+    // This may leak memory if called in a loop. The createCompressor() call
+    // may cause allocation of an untracked direct-backed buffer if native
+    // libs are being used (even if you close the stream).  A Compressor
+    // object should be reused between successive calls.
+    LOG.warn("DefaultCodec.createOutputStream() may leak memory. "
+        + "Create a compressor first.");
     return new CompressorStream(out, createCompressor(), 
                                 conf.getInt("io.file.buffer.size", 4*1024));
   }



Mime
View raw message