Return-Path: X-Original-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1E3D5DC8E for ; Mon, 3 Sep 2012 18:55:13 +0000 (UTC) Received: (qmail 60979 invoked by uid 500); 3 Sep 2012 18:55:12 -0000 Delivered-To: apmail-hadoop-mapreduce-commits-archive@hadoop.apache.org Received: (qmail 60928 invoked by uid 500); 3 Sep 2012 18:55:12 -0000 Mailing-List: contact mapreduce-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: mapreduce-dev@hadoop.apache.org Delivered-To: mailing list mapreduce-commits@hadoop.apache.org Received: (qmail 60920 invoked by uid 99); 3 Sep 2012 18:55:12 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 03 Sep 2012 18:55:12 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 03 Sep 2012 18:55:11 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 9A9CE2388962; Mon, 3 Sep 2012 18:54:28 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1380310 - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/ Date: Mon, 03 Sep 2012 18:54:28 -0000 To: mapreduce-commits@hadoop.apache.org From: shv@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20120903185428.9A9CE2388962@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: shv Date: Mon Sep 3 18:54:27 2012 New Revision: 1380310 URL: http://svn.apache.org/viewvc?rev=1380310&view=rev Log: MAPREDUCE-2786. Add compression option for TestDFSIO. Contributed by Plamen Jeliazkov. Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/IOMapperBase.java hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1380310&r1=1380309&r2=1380310&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Mon Sep 3 18:54:27 2012 @@ -147,6 +147,9 @@ Branch-2 ( Unreleased changes ) MAPREDUCE-4408. allow jobs to set a JAR that is in the distributed cached (rkanter via tucu) + MAPREDUCE-2786. Add compression option for TestDFSIO. + (Plamen Jeliazkov via shv) + BUG FIXES MAPREDUCE-4422. YARN_APPLICATION_CLASSPATH needs a documented default value in Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/IOMapperBase.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/IOMapperBase.java?rev=1380310&r1=1380309&r2=1380310&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/IOMapperBase.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/IOMapperBase.java Mon Sep 3 18:54:27 2012 @@ -22,7 +22,9 @@ import java.net.InetAddress; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.mapred.*; +import org.apache.hadoop.util.ReflectionUtils; /** * Base mapper class for IO operations. @@ -41,6 +43,7 @@ public abstract class IOMapperBase ex protected int bufferSize; protected FileSystem fs; protected String hostName; + protected CompressionCodec compressionCodec; public IOMapperBase() { } @@ -59,6 +62,22 @@ public abstract class IOMapperBase ex } catch(Exception e) { hostName = "localhost"; } + + //grab compression + String compression = getConf().get("test.io.compression.class", null); + Class codec; + + //try to initialize codec + try { + codec = (compression == null) ? null : + Class.forName(compression).asSubclass(CompressionCodec.class); + } catch(Exception e) { + throw new RuntimeException("Compression codec not found: ", e); + } + + if(codec != null) { + compressionCodec = (CompressionCodec) ReflectionUtils.newInstance(codec, getConf()); + } } public void close() throws IOException { Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java?rev=1380310&r1=1380309&r2=1380310&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java Mon Sep 3 18:54:27 2012 @@ -23,6 +23,7 @@ import java.io.DataInputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; @@ -295,6 +296,8 @@ public class TestDFSIO extends TestCase // create file OutputStream out; out = fs.create(new Path(getDataDir(getConf()), name), true, bufferSize); + + if(compressionCodec != null) out = compressionCodec.createOutputStream(out); try { // write to the file @@ -358,6 +361,8 @@ public class TestDFSIO extends TestCase OutputStream out; out = fs.append(new Path(getDataDir(getConf()), name), bufferSize); + if(compressionCodec != null) out = compressionCodec.createOutputStream(out); + try { // write to the file long nrRemaining; @@ -394,7 +399,10 @@ public class TestDFSIO extends TestCase long totalSize // in bytes ) throws IOException { // open file - DataInputStream in = fs.open(new Path(getDataDir(getConf()), name)); + InputStream in = fs.open(new Path(getDataDir(getConf()), name)); + + if(compressionCodec != null) in = compressionCodec.createInputStream(in); + long actualSize = 0; try { while (actualSize < totalSize) { @@ -459,6 +467,7 @@ public class TestDFSIO extends TestCase long fileSize = 1*MEGA; int nrFiles = 1; String resFileName = DEFAULT_RES_FILE_NAME; + String compressionClass = null; boolean isSequential = false; String version = TestDFSIO.class.getSimpleName() + ".0.0.6"; @@ -479,6 +488,8 @@ public class TestDFSIO extends TestCase testType = TEST_TYPE_CLEANUP; } else if (args[i].startsWith("-seq")) { isSequential = true; + } else if (args[i].startsWith("-compression")) { + compressionClass = args[++i]; } else if (args[i].equals("-nrFiles")) { nrFiles = Integer.parseInt(args[++i]); } else if (args[i].equals("-fileSize")) { @@ -497,6 +508,11 @@ public class TestDFSIO extends TestCase LOG.info("fileSize (MB) = " + toMB(fileSize)); LOG.info("bufferSize = " + bufferSize); LOG.info("baseDir = " + getBaseDir(config)); + + if(compressionClass != null) { + config.set("test.io.compression.class", compressionClass); + LOG.info("compressionClass = " + compressionClass); + } config.setInt("test.io.file.buffer.size", bufferSize); config.setBoolean(DFSConfigKeys.DFS_SUPPORT_APPEND_KEY, true);