hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aajis...@apache.org
Subject hadoop git commit: HADOOP-11627. Remove io.native.lib.available. Contributed by Brahma Reddy Battula.
Date Thu, 23 Apr 2015 23:10:47 GMT
Repository: hadoop
Updated Branches:
  refs/heads/trunk ef4e9963b -> ac281e3fc


HADOOP-11627. Remove io.native.lib.available. Contributed by Brahma Reddy Battula.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ac281e3f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ac281e3f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ac281e3f

Branch: refs/heads/trunk
Commit: ac281e3fc8681e9b421cb5fb442851293766e949
Parents: ef4e996
Author: Akira Ajisaka <aajisaka@apache.org>
Authored: Fri Apr 24 08:08:55 2015 +0900
Committer: Akira Ajisaka <aajisaka@apache.org>
Committed: Fri Apr 24 08:08:55 2015 +0900

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../org/apache/hadoop/conf/Configuration.java   |  2 -
 .../fs/CommonConfigurationKeysPublic.java       |  5 --
 .../hadoop/io/compress/bzip2/Bzip2Factory.java  | 13 +----
 .../hadoop/io/compress/zlib/ZlibFactory.java    | 25 +++++++--
 .../apache/hadoop/util/NativeCodeLoader.java    | 26 ---------
 .../src/main/resources/core-default.xml         |  9 ----
 .../src/site/markdown/DeprecatedProperties.md   |  1 +
 .../apache/hadoop/io/compress/TestCodec.java    | 56 +++++++++++---------
 .../zlib/TestZlibCompressorDecompressor.java    |  2 -
 .../file/tfile/TestTFileSeqFileComparison.java  |  2 -
 .../mapred/TestConcatenatedCompressedInput.java | 47 ++++++++--------
 12 files changed, 81 insertions(+), 110 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 777828e..22ef212 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -23,6 +23,9 @@ Trunk (Unreleased)
 
     HADOOP-11731. Rework the changelog and releasenotes (aw)
 
+    HADOOP-11627. Remove io.native.lib.available.
+    (Brahma Reddy Battula via aajisaka)
+
   NEW FEATURES
 
     HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 7c25e6c..54e07c6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -447,8 +447,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
         CommonConfigurationKeys.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY),
       new DeprecationDelta("dfs.df.interval", 
         CommonConfigurationKeys.FS_DF_INTERVAL_KEY),
-      new DeprecationDelta("hadoop.native.lib", 
-        CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY),
       new DeprecationDelta("fs.default.name", 
         CommonConfigurationKeys.FS_DEFAULT_NAME_KEY),
       new DeprecationDelta("dfs.umaskmode",

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index 87c2aba..90c6934 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -38,11 +38,6 @@ public class CommonConfigurationKeysPublic {
   
   // The Keys
   /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
-  public static final String  IO_NATIVE_LIB_AVAILABLE_KEY =
-    "io.native.lib.available";
-  /** Default value for IO_NATIVE_LIB_AVAILABLE_KEY */
-  public static final boolean IO_NATIVE_LIB_AVAILABLE_DEFAULT = true;
-  /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
   public static final String  NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY =
     "net.topology.script.number.args";
   /** Default value for NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
index c82ac99..32fe910 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java
@@ -21,15 +21,9 @@ package org.apache.hadoop.io.compress.bzip2;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.util.NativeCodeLoader;
-
 import org.apache.hadoop.io.compress.Compressor;
 import org.apache.hadoop.io.compress.Decompressor;
-import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor;
-import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor;
-import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor;
-import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor;
+import org.apache.hadoop.util.NativeCodeLoader;
 
 /**
  * A collection of factories to create the right 
@@ -58,10 +52,7 @@ public class Bzip2Factory {
       bzip2LibraryName = libname;
       if (libname.equals("java-builtin")) {
         LOG.info("Using pure-Java version of bzip2 library");
-      } else if (conf.getBoolean(
-                CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, 
-                CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT) &&
-          NativeCodeLoader.isNativeCodeLoaded()) {
+      } else if (NativeCodeLoader.isNativeCodeLoaded()) {
         try {
           // Initialize the native library.
           Bzip2Compressor.initSymbols(libname);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
index 4112d27..9d8e1d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
@@ -27,7 +27,8 @@ import org.apache.hadoop.io.compress.DirectDecompressor;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
+
+import com.google.common.annotations.VisibleForTesting;
 
 /**
  * A collection of factories to create the right 
@@ -41,6 +42,15 @@ public class ZlibFactory {
   private static boolean nativeZlibLoaded = false;
   
   static {
+    loadNativeZLib();
+  }
+
+  /**
+   * Load native library and set the flag whether to use native library. The
+   * method is also used for reset the flag modified by setNativeZlibLoaded
+   */
+  @VisibleForTesting
+  public static void loadNativeZLib() {
     if (NativeCodeLoader.isNativeCodeLoaded()) {
       nativeZlibLoaded = ZlibCompressor.isNativeZlibLoaded() &&
         ZlibDecompressor.isNativeZlibLoaded();
@@ -54,6 +64,15 @@ public class ZlibFactory {
   }
   
   /**
+   * Set the flag whether to use native library. Used for testing non-native
+   * libraries
+   *
+   */
+  @VisibleForTesting
+  public static void setNativeZlibLoaded(final boolean isLoaded) {
+    ZlibFactory.nativeZlibLoaded = isLoaded;
+  }
+  /**
    * Check if native-zlib code is loaded & initialized correctly and 
    * can be loaded for this job.
    * 
@@ -62,9 +81,7 @@ public class ZlibFactory {
    *         and can be loaded for this job, else <code>false</code>
    */
   public static boolean isNativeZlibLoaded(Configuration conf) {
-    return nativeZlibLoaded && conf.getBoolean(
-                          CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, 
-                          CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
+    return nativeZlibLoaded;
   }
 
   public static String getLibraryName() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
index 533fc07..79d4c0c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java
@@ -22,8 +22,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
 
 /**
  * A helper to load the native hadoop code i.e. libhadoop.so.
@@ -86,28 +84,4 @@ public class NativeCodeLoader {
 
   public static native String getLibraryName();
 
-  /**
-   * Return if native hadoop libraries, if present, can be used for this job.
-   * @param conf configuration
-   * 
-   * @return <code>true</code> if native hadoop libraries, if present, can be

-   *         used for this job; <code>false</code> otherwise.
-   */
-  public boolean getLoadNativeLibraries(Configuration conf) {
-    return conf.getBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, 
-                           CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
-  }
-  
-  /**
-   * Set if native hadoop libraries, if present, can be used for this job.
-   * 
-   * @param conf configuration
-   * @param loadNativeLibraries can native hadoop libraries be loaded
-   */
-  public void setLoadNativeLibraries(Configuration conf, 
-                                     boolean loadNativeLibraries) {
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
-                    loadNativeLibraries);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 46eae0a..34284d1 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -39,15 +39,6 @@
 </property>
 
 <property>
-  <name>io.native.lib.available</name>
-  <value>true</value>
-  <description>Controls whether to use native libraries for bz2 and zlib
-    compression codecs or not. The property does not control any other native
-    libraries.
-  </description>
-</property>
-
-<property>
   <name>hadoop.http.filter.initializers</name>
   <value>org.apache.hadoop.http.lib.StaticUserWebFilter</value>
   <description>A comma separated list of class names. Each class in the list 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md
b/hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md
index e9b82ad..f964735 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/DeprecatedProperties.md
@@ -283,5 +283,6 @@ The following table lists additional changes to some configuration properties:
 |:---- |:---- |
 | mapred.create.symlink | NONE - symlinking is always on |
 | mapreduce.job.cache.symlink.create | NONE - symlinking is always on |
+| io.native.lib.available | NONE - Always use native libraries if available. |
 
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
index 5d4af91..1e3809e 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
@@ -17,6 +17,14 @@
  */
 package org.apache.hadoop.io.compress;
 
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.BufferedReader;
@@ -40,6 +48,9 @@ import java.util.Random;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileStatus;
@@ -51,9 +62,10 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.MapFile;
 import org.apache.hadoop.io.RandomDatum;
 import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
 import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
@@ -61,20 +73,13 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
-import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.ReflectionUtils;
-
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
+import org.junit.After;
 import org.junit.Assert;
 import org.junit.Assume;
 import org.junit.Test;
-import static org.junit.Assert.*;
-import static org.junit.Assume.*;
 
 public class TestCodec {
 
@@ -84,6 +89,10 @@ public class TestCodec {
   private int count = 10000;
   private int seed = new Random().nextInt();
 
+  @After
+  public void after() {
+    ZlibFactory.loadNativeZLib();
+  }
   @Test
   public void testDefaultCodec() throws IOException {
     codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
@@ -364,7 +373,6 @@ public class TestCodec {
   @Test
   public void testCodecPoolGzipReuse() throws Exception {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
     assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
     GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
     DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
@@ -446,7 +454,6 @@ public class TestCodec {
   @Test
   public void testCodecInitWithCompressionLevel() throws Exception {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
     if (ZlibFactory.isNativeZlibLoaded(conf)) {
       LOG.info("testCodecInitWithCompressionLevel with native");
       codecTestWithNOCompression(conf,
@@ -458,7 +465,8 @@ public class TestCodec {
                + ": native libs not loaded");
     }
     conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
+    // don't use native libs
+    ZlibFactory.setNativeZlibLoaded(false);
     codecTestWithNOCompression( conf,
                          "org.apache.hadoop.io.compress.DefaultCodec");
   }
@@ -466,14 +474,14 @@ public class TestCodec {
   @Test
   public void testCodecPoolCompressorReinit() throws Exception {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
     if (ZlibFactory.isNativeZlibLoaded(conf)) {
       GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
       gzipReinitTest(conf, gzc);
     } else {
       LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded");
     }
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
+    // don't use native libs
+    ZlibFactory.setNativeZlibLoaded(false);
     DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
     gzipReinitTest(conf, dfc);
   }
@@ -660,7 +668,8 @@ public class TestCodec {
     gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
 
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
+    // don't use native libs
+    ZlibFactory.setNativeZlibLoaded(false);
     CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
     Decompressor decom = codec.createDecompressor();
     assertNotNull(decom);
@@ -713,14 +722,14 @@ public class TestCodec {
   @Test
   public void testBuiltInGzipConcat() throws IOException {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
+    // don't use native libs
+    ZlibFactory.setNativeZlibLoaded(false);
     GzipConcatTest(conf, BuiltInGzipDecompressor.class);
   }
 
   @Test
   public void testNativeGzipConcat() throws IOException {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
     assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
     GzipConcatTest(conf, GzipCodec.GzipZlibDecompressor.class);
   }
@@ -732,10 +741,7 @@ public class TestCodec {
 
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
-    assertFalse("ZlibFactory is using native libs against request",
-        ZlibFactory.isNativeZlibLoaded(conf));
-
+    ZlibFactory.setNativeZlibLoaded(false);
     // Ensure that the CodecPool has a BuiltInZlibInflater in it.
     Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
     assertNotNull("zlibDecompressor is null!", zlibDecompressor);
@@ -784,7 +790,7 @@ public class TestCodec {
 
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
+    ZlibFactory.setNativeZlibLoaded(false);
     assertFalse("ZlibFactory is using native libs against request",
         ZlibFactory.isNativeZlibLoaded(conf));
 
@@ -833,7 +839,6 @@ public class TestCodec {
 
     // Use native libs per the parameter
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, useNative);
     if (useNative) {
       assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
     } else {
@@ -887,6 +892,8 @@ public class TestCodec {
 
   @Test
   public void testGzipCodecWriteJava() throws IOException {
+    // don't use native libs
+    ZlibFactory.setNativeZlibLoaded(false);
     testGzipCodecWrite(false);
   }
 
@@ -901,8 +908,7 @@ public class TestCodec {
 
     // Don't use native libs for this test.
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
-                    false);
+    ZlibFactory.setNativeZlibLoaded(false);
     assertFalse("ZlibFactory is using native libs against request",
                 ZlibFactory.isNativeZlibLoaded(conf));
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java
index e751125..6a4fc60 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java
@@ -100,7 +100,6 @@ public class TestZlibCompressorDecompressor {
   @Test
   public void testZlibCompressorDecompressorWithConfiguration() {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
     if (ZlibFactory.isNativeZlibLoaded(conf)) {
       byte[] rawData;
       int tryNumber = 5;
@@ -214,7 +213,6 @@ public class TestZlibCompressorDecompressor {
   @Test
   public void testZlibCompressorDecompressorSetDictionary() {
     Configuration conf = new Configuration();
-    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
     if (ZlibFactory.isNativeZlibLoaded(conf)) {
       Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
       Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
index 3502198..e89d0e4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
@@ -247,8 +247,6 @@ public class TestTFileSeqFileComparison extends TestCase {
     public SeqFileAppendable(FileSystem fs, Path path, int osBufferSize,
         String compress, int minBlkSize) throws IOException {
       Configuration conf = new Configuration();
-      conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
-                      true);
 
       CompressionCodec codec = null;
       if ("lzo".equals(compress)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac281e3f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
index 99221cb..22a05c5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
@@ -18,30 +18,34 @@
 
 package org.apache.hadoop.mapred;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.ByteArrayInputStream;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.zip.Inflater;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.compress.*;
+import org.apache.hadoop.io.compress.BZip2Codec;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionInputStream;
+import org.apache.hadoop.io.compress.GzipCodec;
+import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.ReflectionUtils;
-
+import org.junit.After;
 import org.junit.Ignore;
 import org.junit.Test;
-import static org.junit.Assert.*;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 @Ignore
 public class TestConcatenatedCompressedInput {
   private static final Log LOG =
@@ -76,6 +80,10 @@ public class TestConcatenatedCompressedInput {
     }
   }
 
+  @After
+  public void after() {
+    ZlibFactory.loadNativeZLib();
+  }
   private static Path workDir =
     new Path(new Path(System.getProperty("test.build.data", "/tmp")),
              "TestConcatenatedCompressedInput").makeQualified(localFs);
@@ -302,12 +310,12 @@ public class TestConcatenatedCompressedInput {
   @Test
   public void testBuiltInGzipDecompressor() throws IOException {
     JobConf jobConf = new JobConf(defaultConf);
-    jobConf.setBoolean("io.native.lib.available", false);
 
     CompressionCodec gzip = new GzipCodec();
     ReflectionUtils.setConf(gzip, jobConf);
     localFs.delete(workDir, true);
-
+    // Don't use native libs for this test
+    ZlibFactory.setNativeZlibLoaded(false);
     assertEquals("[non-native (Java) codec]",
       org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class,
       gzip.getDecompressorType());
@@ -351,9 +359,7 @@ public class TestConcatenatedCompressedInput {
     assertEquals("total uncompressed lines in concatenated test file",
                  84, lineNum);
 
-    // test BuiltInGzipDecompressor with lots of different input-buffer sizes
-    doMultipleGzipBufferSizes(jobConf, false);
-
+    ZlibFactory.loadNativeZLib();
     // test GzipZlibDecompressor (native), just to be sure
     // (FIXME?  could move this call to testGzip(), but would need filename
     // setup above) (alternatively, maybe just nuke testGzip() and extend this?)
@@ -370,7 +376,6 @@ public class TestConcatenatedCompressedInput {
       (useNative? "GzipZlibDecompressor" : "BuiltInGzipDecompressor") +
       COLOR_NORMAL);
 
-    jConf.setBoolean("io.native.lib.available", useNative);
 
     int bufferSize;
 
@@ -575,23 +580,17 @@ public class TestConcatenatedCompressedInput {
  */
 
     // test CBZip2InputStream with lots of different input-buffer sizes
-    doMultipleBzip2BufferSizes(jobConf, false);
-
-    // no native version of bzip2 codec (yet?)
-    //doMultipleBzip2BufferSizes(jobConf, true);
+    doMultipleBzip2BufferSizes(jobConf);
   }
 
-  // this tests either the native or the non-native gzip decoder with more than
+  // this tests native bzip2 decoder with more than
   // three dozen input-buffer sizes in order to try to catch any parser/state-
   // machine errors at buffer boundaries
-  private static void doMultipleBzip2BufferSizes(JobConf jConf,
-                                                boolean useNative)
+  private static void doMultipleBzip2BufferSizes(JobConf jConf)
   throws IOException {
     System.out.println(COLOR_MAGENTA + "doMultipleBzip2BufferSizes() using " +
       "default bzip2 decompressor" + COLOR_NORMAL);
 
-    jConf.setBoolean("io.native.lib.available", useNative);
-
     int bufferSize;
 
     // ideally would add some offsets/shifts in here (e.g., via extra header


Mime
View raw message