hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cutt...@apache.org
Subject svn commit: r495049 [4/4] - in /lucene/hadoop/branches/branch-0.10: ./ bin/ docs/ src/docs/src/documentation/content/xdocs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/s3/ src/java/org/apache/hadoop/io/compress/ src/java/org/apache/had...
Date Wed, 10 Jan 2007 23:59:26 GMT
Modified: lucene/hadoop/branches/branch-0.10/src/native/configure.ac
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/native/configure.ac?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/native/configure.ac (original)
+++ lucene/hadoop/branches/branch-0.10/src/native/configure.ac Wed Jan 10 15:59:23 2007
@@ -64,6 +64,9 @@
 dnl Check for '-lz'
 AC_CHECK_LIB([z], [deflate])
 
+dnl Check for '-llzo2'
+AC_CHECK_LIB([lzo2], [lzo_init])
+
 # Checks for header files.
 dnl Check for Ansi C headers
 AC_HEADER_STDC
@@ -89,6 +92,9 @@
 dnl Check for zlib headers
 AC_CHECK_HEADERS([zlib.h zconf.h], AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib
headers were not found... native-hadoop library needs zlib to build. Please install the requisite
zlib development package.))
 
+dnl Check for lzo headers
+AC_CHECK_HEADERS([lzo/lzo1.h lzo/lzo1a.h lzo/lzo1b.h lzo/lzo1c.h lzo/lzo1f.h lzo/lzo1x.h
lzo/lzo1y.h lzo/lzo1z.h lzo/lzo2a.h lzo/lzo_asm.h], AC_COMPUTE_NEEDED_DSO(lzo2,HADOOP_LZO_LIBRARY),
AC_MSG_ERROR(lzo headers were not found... native-hadoop library needs lzo to build. Please
install the requisite lzo development package.))
+
 # Checks for typedefs, structures, and compiler characteristics.
 AC_C_CONST
 
@@ -97,6 +103,7 @@
 
 AC_CONFIG_FILES([Makefile
                  src/org/apache/hadoop/io/compress/zlib/Makefile
+                 src/org/apache/hadoop/io/compress/lzo/Makefile
                  lib/Makefile])
 AC_OUTPUT
 

Modified: lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
(original)
+++ lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
Wed Jan 10 15:59:23 2007
@@ -75,11 +75,11 @@
 
 	// Locate the requisite symbols from libz.so
 	dlerror();                                 // Clear any existing error
-	LOAD_ZLIB_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
-	LOAD_ZLIB_SYMBOL(dlsym_deflate, env, libz, "deflate");
-	LOAD_ZLIB_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
-	LOAD_ZLIB_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
-	LOAD_ZLIB_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
 
 	// Initialize the requisite fieldIds
     ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");

Modified: lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
(original)
+++ lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
Wed Jan 10 15:59:23 2007
@@ -75,11 +75,11 @@
 
 	// Locate the requisite symbols from libz.so
 	dlerror();                                 // Clear any existing error
-	LOAD_ZLIB_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
-	LOAD_ZLIB_SYMBOL(dlsym_inflate, env, libz, "inflate");
-	LOAD_ZLIB_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
-	LOAD_ZLIB_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
-	LOAD_ZLIB_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
+	LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
 
 	// Initialize the requisite fieldIds
     ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");

Modified: lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
(original)
+++ lucene/hadoop/branches/branch-0.10/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
Wed Jan 10 15:59:23 2007
@@ -55,12 +55,6 @@
 
 #include "org_apache_hadoop.h"
 
-/* A helper macro to dlsym the requisite zlib symbol. */
-#define LOAD_ZLIB_SYMBOL(func_ptr, env, handle, symbol) \
-  if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
-  	return; \
-  }
-
 /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
 #define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
 

Modified: lucene/hadoop/branches/branch-0.10/src/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/native/src/org_apache_hadoop.h?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/native/src/org_apache_hadoop.h (original)
+++ lucene/hadoop/branches/branch-0.10/src/native/src/org_apache_hadoop.h Wed Jan 10 15:59:23
2007
@@ -73,6 +73,13 @@
   return func_ptr;
 }
 
+/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
+#define LOAD_DYNAMIC_SYMBOL(func_ptr, env, handle, symbol) \
+  if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
+    return; \
+  }
+
+
 #endif
 
 //vim: sw=2: ts=2: et

Modified: lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java
(original)
+++ lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/fs/s3/S3FileSystemBaseTest.java
Wed Jan 10 15:59:23 2007
@@ -166,9 +166,7 @@
     
     s3FileSystem.mkdirs(path.getParent());
 
-    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
-    out.write(data, 0, BLOCK_SIZE);
-    out.close();
+    createEmptyFile(path);
     
     assertTrue("Exists", s3FileSystem.exists(path));
     assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
@@ -180,7 +178,7 @@
       // Expected
     }
     
-    out = s3FileSystem.createRaw(path, true, (short) 1, BLOCK_SIZE);
+    FSOutputStream out = s3FileSystem.createRaw(path, true, (short) 1, BLOCK_SIZE);
     out.write(data, 0, BLOCK_SIZE / 2);
     out.close();
     
@@ -189,14 +187,46 @@
     
   }
 
-  public void testWriteInNonExistentDirectory() {
+  public void testWriteInNonExistentDirectory() throws IOException {
     Path path = new Path("/test/hadoop/file");    
-    try {
-      s3FileSystem.createRaw(path, false, (short) 1, 128);
-      fail("Should throw IOException.");
-    } catch (IOException e) {
-      // Expected
-    }
+    createEmptyFile(path);
+    
+    assertTrue("Exists", s3FileSystem.exists(path));
+    assertEquals("Length", BLOCK_SIZE, s3FileSystem.getLength(path));
+    assertTrue("Parent exists", s3FileSystem.exists(path.getParent()));
+  }
+
+  public void testDeleteNonExistentFile() throws IOException {
+    Path path = new Path("/test/hadoop/file");    
+    assertFalse("Doesn't exist", s3FileSystem.exists(path));
+    assertFalse("No deletion", s3FileSystem.delete(path));
+  }
+
+  public void testDeleteDirectory() throws IOException {
+    Path subdir = new Path("/test/hadoop");
+    Path dir = subdir.getParent();
+    Path root = dir.getParent();
+    s3FileSystem.mkdirs(subdir);
+    Path file1 = new Path(dir, "file1");
+    Path file2 = new Path(subdir, "file2");
+    
+    createEmptyFile(file1);
+    createEmptyFile(file2);
+    
+    assertTrue("root exists", s3FileSystem.exists(root));
+    assertTrue("dir exists", s3FileSystem.exists(dir));
+    assertTrue("file1 exists", s3FileSystem.exists(file1));
+    assertTrue("subdir exists", s3FileSystem.exists(subdir));
+    assertTrue("file2 exists", s3FileSystem.exists(file2));
+    
+    assertTrue("Delete", s3FileSystem.delete(dir));
+
+    assertTrue("root exists", s3FileSystem.exists(root));
+    assertFalse("dir exists", s3FileSystem.exists(dir));
+    assertFalse("file1 exists", s3FileSystem.exists(file1));
+    assertFalse("subdir exists", s3FileSystem.exists(subdir));
+    assertFalse("file2 exists", s3FileSystem.exists(file2));
+    
   }
 
   public void testRename() throws Exception {
@@ -206,9 +236,7 @@
     
     s3FileSystem.mkdirs(path.getParent());
 
-    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
-    out.write(data, 0, len);
-    out.close();
+    createEmptyFile(path);
 
     assertTrue("Exists", s3FileSystem.exists(path));
 
@@ -228,5 +256,10 @@
     }
   }
 
+  private void createEmptyFile(Path path) throws IOException {
+    FSOutputStream out = s3FileSystem.createRaw(path, false, (short) 1, BLOCK_SIZE);
+    out.write(data, 0, BLOCK_SIZE);
+    out.close();
+  }
 
 }

Modified: lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/TestSequenceFile.java?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/TestSequenceFile.java
(original)
+++ lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/TestSequenceFile.java
Wed Jan 10 15:59:23 2007
@@ -28,6 +28,7 @@
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.io.compress.LzoCodec;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.conf.*;
 
@@ -41,8 +42,26 @@
   public TestSequenceFile(String name) { super(name); }
 
   /** Unit tests for SequenceFile. */
-  public void testSequenceFile() throws Exception {
+  public void testZlibSequenceFile() throws Exception {
+    LOG.info("Testing SequenceFile with DefaultCodec");
     compressedSeqFileTest(new DefaultCodec());
+    LOG.info("Successfully tested SequenceFile with DefaultCodec");
+  }
+  
+  public void testLzoSequenceFile() throws Exception {
+    if (LzoCodec.isNativeLzoLoaded()) {
+      LOG.info("Testing SequenceFile with LzoCodec");
+      CompressionCodec lzoCodec = null;
+      try {
+        lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(
+                conf.getClassByName(LzoCodec.class.getName()), conf);
+      } catch (ClassNotFoundException cnfe) {
+        throw new IOException("Cannot find LzoCodec!");
+      }
+
+      compressedSeqFileTest(lzoCodec);
+      LOG.info("Successfully tested SequenceFile with LzoCodec");
+    }
   }
   
   public void compressedSeqFileTest(CompressionCodec codec) throws Exception {
@@ -60,8 +79,6 @@
 
     FileSystem fs = new LocalFileSystem(conf);
     try {
-        //LOG.setLevel(Level.FINE);
-
         // SequenceFile.Writer
         writeTest(fs, count, seed, file, CompressionType.NONE, null);
         readTest(fs, count, seed, file);

Modified: lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/compress/TestCodec.java?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/compress/TestCodec.java
(original)
+++ lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/io/compress/TestCodec.java
Wed Jan 10 15:59:23 2007
@@ -51,6 +51,12 @@
     codecTest(seed, count, "org.apache.hadoop.io.compress.GzipCodec");
   }
   
+  public void testLzoCodec() throws IOException {
+    if (LzoCodec.isNativeLzoLoaded()) {
+      codecTest(seed, count, "org.apache.hadoop.io.compress.LzoCodec");
+    }
+  }
+  
   private static void codecTest(int seed, int count, String codecClass) 
   throws IOException {
     

Modified: lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/mapred/TestMapRed.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/mapred/TestMapRed.java?view=diff&rev=495049&r1=495048&r2=495049
==============================================================================
--- lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/mapred/TestMapRed.java (original)
+++ lucene/hadoop/branches/branch-0.10/src/test/org/apache/hadoop/mapred/TestMapRed.java Wed
Jan 10 15:59:23 2007
@@ -455,7 +455,7 @@
         checkJob.setOutputPath(intermediateOuts);
         checkJob.setOutputKeyClass(IntWritable.class);
         checkJob.setOutputValueClass(IntWritable.class);
-        checkJob.setOutputFormat(SequenceFileOutputFormat.class);
+        checkJob.setOutputFormat(MapFileOutputFormat.class);
         checkJob.setReducerClass(RandomCheckReducer.class);
         checkJob.setNumReduceTasks(intermediateReduces);
 



Mime
View raw message