Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id AE82C113E9 for ; Sat, 13 Sep 2014 03:54:23 +0000 (UTC) Received: (qmail 37550 invoked by uid 500); 13 Sep 2014 03:54:09 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 37238 invoked by uid 500); 13 Sep 2014 03:54:09 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 35790 invoked by uid 99); 13 Sep 2014 03:54:08 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 13 Sep 2014 03:54:08 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 5E9A4A12E96; Sat, 13 Sep 2014 03:54:08 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: arp@apache.org To: common-commits@hadoop.apache.org Date: Sat, 13 Sep 2014 03:54:34 -0000 Message-Id: <6f8bba61a0aa46bf9582bc0246092eeb@git.apache.org> In-Reply-To: <5ddf86ce500a423cab68e464a1555b74@git.apache.org> References: <5ddf86ce500a423cab68e464a1555b74@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [28/50] [abbrv] git commit: MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Contributed by Binglin Chang) MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Contributed by Binglin Chang) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/17cd0faa Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/17cd0faa Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/17cd0faa Branch: refs/heads/HDFS-6581 Commit: 17cd0faaceb8f9ce00b8c2c200e810f78f36c671 Parents: bfd1d75 Author: Binglin Chang Authored: Tue Sep 2 11:00:48 2014 +0800 Committer: Binglin Chang Committed: Tue Sep 2 11:00:48 2014 +0800 ---------------------------------------------------------------------- .../CHANGES.MAPREDUCE-2841.txt | 1 + .../hadoop-mapreduce-client-nativetask/pom.xml | 28 ++------------------ .../nativetask/combinertest/CombinerTest.java | 10 +++++-- .../combinertest/LargeKVCombinerTest.java | 15 +++++++++-- .../combinertest/OldAPICombinerTest.java | 5 ++++ .../nativetask/compresstest/CompressTest.java | 18 ++++++++----- .../hadoop/mapred/nativetask/kvtest/KVJob.java | 5 ++-- .../hadoop/mapred/nativetask/kvtest/KVTest.java | 15 +++++++++-- .../mapred/nativetask/kvtest/LargeKVTest.java | 15 +++++++++-- .../nativetask/nonsorttest/NonSortTest.java | 5 ++++ 10 files changed, 74 insertions(+), 43 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt index 6384757..baa88c1 100644 --- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt +++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt @@ -17,3 +17,4 @@ MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd) MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang via todd) MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd) MAPREDUCE-6054. native-task: Speed up tests (todd) +MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Binglin Chang) http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml index bb7d7bb..f62743e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml @@ -83,10 +83,8 @@ - **/TestTaskContext.java - **/buffer/Test*.java - **/handlers/Test*.java - **/serde/Test*.java + **/*Test.java + **/Test*.java @@ -201,28 +199,6 @@ - - org.apache.maven.plugins - maven-surefire-plugin - - - - listener - org.apache.hadoop.test.TimedOutTestsListener - - - - **/TestTaskContext.java - **/buffer/Test*.java - **/handlers/Test*.java - **/serde/Test*.java - **/combinertest/*Test.java - **/compresstest/*Test.java - **/nonsorttest/*Test.java - **/kvtest/*Test.java - - - http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java index 8a4aa6f..abbe28e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapred.nativetask.combinertest; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -25,6 +26,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Task; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.combinertest.WordCount.IntSumReducer; import org.apache.hadoop.mapred.nativetask.combinertest.WordCount.TokenizerMapper; import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile; @@ -36,6 +38,8 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -58,11 +62,11 @@ public class CombinerTest { final Job normaljob = getJob("normalwordcount", commonConf, inputpath, hadoopoutputpath); - nativejob.waitForCompletion(true); + assertTrue(nativejob.waitForCompletion(true)); Counter nativeReduceGroups = nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS); - normaljob.waitForCompletion(true); + assertTrue(normaljob.waitForCompletion(true)); Counter normalReduceGroups = normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS); assertEquals(true, ResultVerifier.verify(nativeoutputpath, hadoopoutputpath)); @@ -77,6 +81,8 @@ public class CombinerTest { @Before public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); final ScenarioConfiguration conf = new ScenarioConfiguration(); conf.addcombinerConf(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/LargeKVCombinerTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/LargeKVCombinerTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/LargeKVCombinerTest.java index ff444db..ccf0272 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/LargeKVCombinerTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/LargeKVCombinerTest.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapred.nativetask.combinertest; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -27,17 +28,27 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.Task; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; public class LargeKVCombinerTest { private static final Log LOG = LogFactory.getLog(LargeKVCombinerTest.class); + @Before + public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); + } + @Test public void testLargeValueCombiner(){ final Configuration normalConf = ScenarioConfiguration.getNormalConfiguration(); @@ -74,10 +85,10 @@ public class LargeKVCombinerTest { final Job normaljob = CombinerTest.getJob("normalwordcount", normalConf, inputPath, hadoopOutputPath); final Job nativejob = CombinerTest.getJob("nativewordcount", nativeConf, inputPath, nativeOutputPath); - nativejob.waitForCompletion(true); + assertTrue(nativejob.waitForCompletion(true)); Counter nativeReduceGroups = nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS); - normaljob.waitForCompletion(true); + assertTrue(normaljob.waitForCompletion(true)); Counter normalReduceGroups = normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS); final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath); http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/OldAPICombinerTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/OldAPICombinerTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/OldAPICombinerTest.java index 5691e02..19493c7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/OldAPICombinerTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/OldAPICombinerTest.java @@ -32,11 +32,14 @@ import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.Task; import org.apache.hadoop.mapred.TextOutputFormat; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; import org.apache.hadoop.mapreduce.Counter; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -70,6 +73,8 @@ public class OldAPICombinerTest { @Before public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); final ScenarioConfiguration conf = new ScenarioConfiguration(); conf.addcombinerConf(); this.fs = FileSystem.get(conf); http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java index b98e2de..67321d6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java @@ -18,16 +18,20 @@ package org.apache.hadoop.mapred.nativetask.compresstest; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -38,12 +42,12 @@ public class CompressTest { final Configuration conf = ScenarioConfiguration.getNativeConfiguration(); conf.addResource(TestConstants.SNAPPY_COMPRESS_CONF_PATH); final Job job = CompressMapper.getCompressJob("nativesnappy", conf); - job.waitForCompletion(true); + assertTrue(job.waitForCompletion(true)); final Configuration hadoopconf = ScenarioConfiguration.getNormalConfiguration(); hadoopconf.addResource(TestConstants.SNAPPY_COMPRESS_CONF_PATH); final Job hadoopjob = CompressMapper.getCompressJob("hadoopsnappy", hadoopconf); - hadoopjob.waitForCompletion(true); + assertTrue(hadoopjob.waitForCompletion(true)); final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativesnappy", CompressMapper.outputFileDir + "hadoopsnappy"); @@ -55,12 +59,12 @@ public class CompressTest { final Configuration conf = ScenarioConfiguration.getNativeConfiguration(); conf.addResource(TestConstants.GZIP_COMPRESS_CONF_PATH); final Job job = CompressMapper.getCompressJob("nativegzip", conf); - job.waitForCompletion(true); + assertTrue(job.waitForCompletion(true)); final Configuration hadoopconf = ScenarioConfiguration.getNormalConfiguration(); hadoopconf.addResource(TestConstants.GZIP_COMPRESS_CONF_PATH); final Job hadoopjob = CompressMapper.getCompressJob("hadoopgzip", hadoopconf); - hadoopjob.waitForCompletion(true); + assertTrue(hadoopjob.waitForCompletion(true)); final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativegzip", CompressMapper.outputFileDir + "hadoopgzip"); @@ -72,12 +76,12 @@ public class CompressTest { final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration(); nativeConf.addResource(TestConstants.LZ4_COMPRESS_CONF_PATH); final Job nativeJob = CompressMapper.getCompressJob("nativelz4", nativeConf); - nativeJob.waitForCompletion(true); + assertTrue(nativeJob.waitForCompletion(true)); final Configuration hadoopConf = ScenarioConfiguration.getNormalConfiguration(); hadoopConf.addResource(TestConstants.LZ4_COMPRESS_CONF_PATH); final Job hadoopJob = CompressMapper.getCompressJob("hadooplz4", hadoopConf); - hadoopJob.waitForCompletion(true); + assertTrue(hadoopJob.waitForCompletion(true)); final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativelz4", CompressMapper.outputFileDir + "hadooplz4"); assertEquals("file compare result: if they are the same ,then return true", true, compareRet); @@ -85,6 +89,8 @@ public class CompressTest { @Before public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); final ScenarioConfiguration conf = new ScenarioConfiguration(); final FileSystem fs = FileSystem.get(conf); final Path path = new Path(CompressMapper.inputFile); http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java index 3215d0b..9e7bb5e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java @@ -96,8 +96,7 @@ public class KVJob { FileOutputFormat.setOutputPath(job, new Path(outputpath)); } - public void runJob() throws Exception { - - job.waitForCompletion(true); + public boolean runJob() throws Exception { + return job.waitForCompletion(true); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java index 893a422..1657c04 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVTest.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapred.nativetask.kvtest; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; @@ -28,9 +29,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -100,6 +105,12 @@ public class KVTest { this.valueclass = valueclass; } + @Before + public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); + } + @Test public void testKVCompability() { try { @@ -139,7 +150,7 @@ public class KVTest { nativekvtestconf.set(TestConstants.NATIVETASK_KVTEST_CREATEFILE, "true"); try { final KVJob keyJob = new KVJob(jobname, nativekvtestconf, keyclass, valueclass, inputpath, outputpath); - keyJob.runJob(); + assertTrue("job should complete successfully", keyJob.runJob()); } catch (final Exception e) { return "native testcase run time error."; } @@ -161,7 +172,7 @@ public class KVTest { hadoopkvtestconf.set(TestConstants.NATIVETASK_KVTEST_CREATEFILE, "false"); try { final KVJob keyJob = new KVJob(jobname, hadoopkvtestconf, keyclass, valueclass, inputpath, outputpath); - keyJob.runJob(); + assertTrue("job should complete successfully", keyJob.runJob()); } catch (final Exception e) { return "normal testcase run time error."; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/LargeKVTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/LargeKVTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/LargeKVTest.java index 1e120ab..6b99907 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/LargeKVTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/LargeKVTest.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapred.nativetask.kvtest; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.IOException; @@ -28,14 +29,24 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; import org.apache.hadoop.mapred.nativetask.testutil.TestConstants; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; +import org.junit.Before; import org.junit.Test; public class LargeKVTest { private static final Log LOG = LogFactory.getLog(LargeKVTest.class); + @Before + public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); + } + @Test public void testKeySize() { runKVSizeTests(Text.class, IntWritable.class); @@ -104,7 +115,7 @@ public class LargeKVTest { fs.close(); try { final KVJob keyJob = new KVJob(jobname, conf, keyclass, valueclass, inputpath, outputpath); - keyJob.runJob(); + assertTrue("job should complete successfully", keyJob.runJob()); } catch (final Exception e) { return "normal testcase run time error."; } @@ -123,7 +134,7 @@ public class LargeKVTest { fs.close(); try { final KVJob keyJob = new KVJob(jobname, conf, keyclass, valueclass, inputpath, outputpath); - keyJob.runJob(); + assertTrue("job should complete successfully", keyJob.runJob()); } catch (final Exception e) { return "normal testcase run time error."; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java index b7b03e7..b1bb5b1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.nativetask.NativeRuntime; import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile; import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier; import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration; @@ -36,6 +37,8 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; +import org.apache.hadoop.util.NativeCodeLoader; +import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -65,6 +68,8 @@ public class NonSortTest { @Before public void startUp() throws Exception { + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded()); final ScenarioConfiguration configuration = new ScenarioConfiguration(); configuration.addNonSortTestConf(); final FileSystem fs = FileSystem.get(configuration);