hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bo...@apache.org
Subject svn commit: r1360264 - in /hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java src/test/java/org/apache/hadoop/io/compress/TestCodec.java
Date Wed, 11 Jul 2012 16:19:18 GMT
Author: bobby
Date: Wed Jul 11 16:19:17 2012
New Revision: 1360264

URL: http://svn.apache.org/viewvc?rev=1360264&view=rev
Log:
svn merge -c 1359866 FIXES: HADOOP-8423. MapFile.Reader.get() crashes jvm or throws EOFException
on Snappy or LZO block-compressed data. Contributed by Todd Lipcon.

Modified:
    hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
    hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
    hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java

Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1360264&r1=1360263&r2=1360264&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/CHANGES.txt Wed
Jul 11 16:19:17 2012
@@ -105,6 +105,10 @@ Release 0.23.3 - UNRELEASED
     HADOOP-8573. Configuration tries to read from an inputstream resource 
     multiple times (Robert Evans via tgraves)
 
+    HADOOP-8423. MapFile.Reader.get() crashes jvm or throws
+    EOFException on Snappy or LZO block-compressed data
+    (todd via harsh)
+
 Release 0.23.2 - UNRELEASED 
 
   NEW FEATURES

Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java?rev=1360264&r1=1360263&r2=1360264&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
Wed Jul 11 16:19:17 2012
@@ -127,6 +127,8 @@ public class BlockDecompressorStream ext
   }
 
   public void resetState() throws IOException {
+    originalBlockSize = 0;
+    noUncompressedBytes = 0;
     super.resetState();
   }
 

Modified: hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1360264&r1=1360263&r2=1360264&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
(original)
+++ hadoop/common/branches/branch-0.23/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
Wed Jul 11 16:19:17 2012
@@ -46,6 +46,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.RandomDatum;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
@@ -68,6 +69,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.junit.Assert;
+import org.junit.Assume;
 import org.junit.Test;
 import static org.junit.Assert.*;
 
@@ -514,6 +516,50 @@ public class TestCodec {
     LOG.info("SUCCESS! Completed SequenceFileCodecTest with codec \"" + codecClass + "\"");
   }
   
+  /**
+   * Regression test for HADOOP-8423: seeking in a block-compressed
+   * stream would not properly reset the block decompressor state.
+   */
+  @Test
+  public void testSnappyMapFile() throws Exception {
+    Assume.assumeTrue(SnappyCodec.isNativeCodeLoaded());
+    codecTestMapFile(SnappyCodec.class, CompressionType.BLOCK, 100);
+  }
+  
+  private void codecTestMapFile(Class<? extends CompressionCodec> clazz,
+      CompressionType type, int records) throws Exception {
+    
+    FileSystem fs = FileSystem.get(conf);
+    LOG.info("Creating MapFiles with " + records  + 
+            " records using codec " + clazz.getSimpleName());
+    Path path = new Path(new Path(
+        System.getProperty("test.build.data", "/tmp")),
+      clazz.getSimpleName() + "-" + type + "-" + records);
+
+    LOG.info("Writing " + path);
+    createMapFile(conf, fs, path, clazz.newInstance(), type, records);
+    MapFile.Reader reader = new MapFile.Reader(path, conf);
+    Text key1 = new Text("002");
+    assertNotNull(reader.get(key1, new Text()));
+    Text key2 = new Text("004");
+    assertNotNull(reader.get(key2, new Text()));
+  }
+  
+  private static void createMapFile(Configuration conf, FileSystem fs, Path path, 
+      CompressionCodec codec, CompressionType type, int records) throws IOException {
+    MapFile.Writer writer = 
+        new MapFile.Writer(conf, path,
+            MapFile.Writer.keyClass(Text.class),
+            MapFile.Writer.valueClass(Text.class),
+            MapFile.Writer.compression(type, codec));
+    Text key = new Text();
+    for (int j = 0; j < records; j++) {
+        key.set(String.format("%03d", j));
+        writer.append(key, key);
+    }
+    writer.close();
+  }
+
   public static void main(String[] args) throws IOException {
     int count = 10000;
     String codecClass = "org.apache.hadoop.io.compress.DefaultCodec";



Mime
View raw message