hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cdoug...@apache.org
Subject svn commit: r755345 - in /hadoop/core/trunk: CHANGES.txt src/core/org/apache/hadoop/io/compress/GzipCodec.java src/test/org/apache/hadoop/io/compress/TestCodec.java
Date Tue, 17 Mar 2009 18:40:27 GMT
Author: cdouglas
Date: Tue Mar 17 18:40:26 2009
New Revision: 755345

URL: http://svn.apache.org/viewvc?rev=755345&view=rev
Log:
HADOOP-5281. Prevent sharing incompatible ZlibCompressor instances between GzipCodec and DefaultCodec.

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/GzipCodec.java
    hadoop/core/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=755345&r1=755344&r2=755345&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Mar 17 18:40:26 2009
@@ -1016,6 +1016,9 @@
     HADOOP-5483. Fixes a problem in the Directory Cleanup Thread due to which
     TestMiniMRWithDFS sometimes used to fail. (ddas) 
 
+    HADOOP-5281. Prevent sharing incompatible ZlibCompressor instances between
+    GzipCodec and DefaultCodec. (cdouglas)
+
 Release 0.19.2 - Unreleased
 
   BUG FIXES

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/GzipCodec.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/GzipCodec.java?rev=755345&r1=755344&r2=755345&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/GzipCodec.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/GzipCodec.java Tue Mar 17 18:40:26
2009
@@ -153,16 +153,15 @@
   }
 
   public Compressor createCompressor() {
-    return (ZlibFactory.isNativeZlibLoaded(conf)) ?
-               new ZlibCompressor(ZlibCompressor.CompressionLevel.DEFAULT_COMPRESSION,
-                                  ZlibCompressor.CompressionStrategy.DEFAULT_STRATEGY,
-                                  ZlibCompressor.CompressionHeader.GZIP_FORMAT,
-                                  64*1024) :
-               null;
+    return (ZlibFactory.isNativeZlibLoaded(conf))
+      ? new GzipZlibCompressor()
+      : null;
   }
 
   public Class<? extends Compressor> getCompressorType() {
-    return ZlibFactory.getZlibCompressorType(conf);
+    return ZlibFactory.isNativeZlibLoaded(conf)
+      ? GzipZlibCompressor.class
+      : BuiltInZlibDeflater.class;
   }
 
   public CompressionInputStream createInputStream(InputStream in) 
@@ -185,18 +184,33 @@
   }
 
   public Decompressor createDecompressor() {
-    return (ZlibFactory.isNativeZlibLoaded(conf)) ?
-               new ZlibDecompressor(ZlibDecompressor.CompressionHeader.AUTODETECT_GZIP_ZLIB,
-                                    64*1024) :
-               null;                               
+    return (ZlibFactory.isNativeZlibLoaded(conf))
+      ? new GzipZlibDecompressor()
+      : null;
   }
 
   public Class<? extends Decompressor> getDecompressorType() {
-    return ZlibFactory.getZlibDecompressorType(conf);
+    return ZlibFactory.isNativeZlibLoaded(conf)
+      ? GzipZlibDecompressor.class
+      : BuiltInZlibInflater.class;
   }
 
   public String getDefaultExtension() {
     return ".gz";
   }
 
+  static final class GzipZlibCompressor extends ZlibCompressor {
+    public GzipZlibCompressor() {
+      super(ZlibCompressor.CompressionLevel.DEFAULT_COMPRESSION,
+          ZlibCompressor.CompressionStrategy.DEFAULT_STRATEGY,
+          ZlibCompressor.CompressionHeader.GZIP_FORMAT, 64*1024);
+    }
+  }
+
+  static final class GzipZlibDecompressor extends ZlibDecompressor {
+    public GzipZlibDecompressor() {
+      super(ZlibDecompressor.CompressionHeader.AUTODETECT_GZIP_ZLIB, 64*1024);
+    }
+  }
+
 }

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java?rev=755345&r1=755344&r2=755345&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/io/compress/TestCodec.java Tue Mar 17 18:40:26
2009
@@ -41,6 +41,7 @@
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 
 public class TestCodec extends TestCase {
 
@@ -129,6 +130,21 @@
     LOG.info("SUCCESS! Completed checking " + count + " records");
   }
 
+  public void testCodecPoolGzipReuse() throws Exception {
+    Configuration conf = new Configuration();
+    conf.setBoolean("hadoop.native.lib", true);
+    if (!ZlibFactory.isNativeZlibLoaded(conf)) {
+      LOG.warn("testCodecPoolGzipReuse skipped: native libs not loaded");
+      return;
+    }
+    GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
+    DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
+    Compressor c1 = CodecPool.getCompressor(gzc);
+    Compressor c2 = CodecPool.getCompressor(dfc);
+    CodecPool.returnCompressor(c1);
+    CodecPool.returnCompressor(c2);
+    assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc));
+  }
 
   public void testSequenceFileDefaultCodec() throws IOException, ClassNotFoundException,

       InstantiationException, IllegalAccessException {



Mime
View raw message