Return-Path: X-Original-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Delivered-To: apmail-hadoop-mapreduce-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id D9F559C70 for ; Tue, 10 Apr 2012 22:09:25 +0000 (UTC) Received: (qmail 77482 invoked by uid 500); 10 Apr 2012 22:09:25 -0000 Delivered-To: apmail-hadoop-mapreduce-commits-archive@hadoop.apache.org Received: (qmail 77422 invoked by uid 500); 10 Apr 2012 22:09:25 -0000 Mailing-List: contact mapreduce-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: mapreduce-dev@hadoop.apache.org Delivered-To: mailing list mapreduce-commits@hadoop.apache.org Received: (qmail 77414 invoked by uid 99); 10 Apr 2012 22:09:25 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 10 Apr 2012 22:09:25 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 10 Apr 2012 22:09:22 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id D76D7238899C; Tue, 10 Apr 2012 22:09:00 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1312018 - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/ hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/ Date: Tue, 10 Apr 2012 22:09:00 -0000 To: mapreduce-commits@hadoop.apache.org From: tgraves@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20120410220900.D76D7238899C@eris.apache.org> Author: tgraves Date: Tue Apr 10 22:09:00 2012 New Revision: 1312018 URL: http://svn.apache.org/viewvc?rev=1312018&view=rev Log: MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar (Devaraj K via tgraves) Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java - copied, changed from r1312013, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java Removed: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1312018&r1=1312017&r2=1312018&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Tue Apr 10 22:09:00 2012 @@ -229,6 +229,9 @@ Release 2.0.0 - UNRELEASED MAPREDUCE-4076. Stream job fails with ZipException when use yarn jar command (Devaraj K via bobby) + + MAPREDUCE-4108. Fix tests in org.apache.hadoop.util.TestRunJar + (Devaraj K via tgraves) Release 0.23.3 - UNRELEASED Copied: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java (from r1312013, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java) URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java?p2=hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java&p1=hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java&r1=1312013&r2=1312018&rev=1312018&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/testjar/Hello.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/Hello.java Tue Apr 10 22:09:00 2012 @@ -16,25 +16,24 @@ * limitations under the License. */ -package testjar; +package org.apache.hadoop.util; import java.io.FileOutputStream; import java.io.IOException; /** - * A simple Hello class that is called from TestRunJar - * + * A simple Hello class that is called from TestRunJar + * */ public class Hello { - public static void main(String[] args){ + public static void main(String[] args) { try { System.out.println("Creating file" + args[0]); FileOutputStream fstream = new FileOutputStream(args[0]); fstream.write("Hello Hadoopers".getBytes()); fstream.close(); - } - catch (IOException e) { - //do nothing + } catch (IOException e) { + // do nothing } } } Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java?rev=1312018&r1=1312017&r2=1312018&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java (original) +++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/util/TestRunJar.java Tue Apr 10 22:09:00 2012 @@ -18,34 +18,63 @@ package org.apache.hadoop.util; +import java.io.BufferedInputStream; import java.io.File; -import org.apache.hadoop.fs.Path; -import org.junit.Ignore; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.jar.JarOutputStream; +import java.util.zip.ZipEntry; -import junit.framework.TestCase; +import org.apache.hadoop.fs.Path; +import org.junit.Assert; +import org.junit.Test; /** * A test to rest the RunJar class. */ -@Ignore -public class TestRunJar extends TestCase { - +public class TestRunJar { + private static String TEST_ROOT_DIR = new Path(System.getProperty( "test.build.data", "/tmp")).toString(); - + + private static final String TEST_JAR_NAME = "testjar.jar"; + private static final String CLASS_NAME = "Hello.class"; + + @Test public void testRunjar() throws Throwable { - - File outFile = new File(TEST_ROOT_DIR, "out"); - // delete if output file already exists. + File outFile = new File(TEST_ROOT_DIR, "out"); + // delete if output file already exists. if (outFile.exists()) { outFile.delete(); } - + File makeTestJar = makeTestJar(); + String[] args = new String[3]; - args[0] = "build/test/mapred/testjar/testjob.jar"; - args[1] = "testjar.Hello"; + args[0] = makeTestJar.getAbsolutePath(); + args[1] = "org.apache.hadoop.util.Hello"; args[2] = outFile.toString(); RunJar.main(args); - assertTrue("RunJar failed", outFile.exists()); + Assert.assertTrue("RunJar failed", outFile.exists()); + } + + private File makeTestJar() throws IOException { + File jarFile = new File(TEST_ROOT_DIR, TEST_JAR_NAME); + JarOutputStream jstream = new JarOutputStream(new FileOutputStream(jarFile)); + InputStream entryInputStream = this.getClass().getResourceAsStream( + CLASS_NAME); + ZipEntry entry = new ZipEntry("org/apache/hadoop/util/" + CLASS_NAME); + jstream.putNextEntry(entry); + BufferedInputStream bufInputStream = new BufferedInputStream( + entryInputStream, 2048); + int count; + byte[] data = new byte[2048]; + while ((count = bufInputStream.read(data, 0, 2048)) != -1) { + jstream.write(data, 0, count); + } + jstream.closeEntry(); + jstream.close(); + + return jarFile; } -} +} \ No newline at end of file