Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 6B2B210F23 for ; Tue, 18 Jun 2013 18:56:04 +0000 (UTC) Received: (qmail 88577 invoked by uid 500); 18 Jun 2013 18:56:04 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 88541 invoked by uid 500); 18 Jun 2013 18:56:04 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 88533 invoked by uid 99); 18 Jun 2013 18:56:04 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 18 Jun 2013 18:56:04 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 18 Jun 2013 18:56:00 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 4771F2388AC8; Tue, 18 Jun 2013 18:55:40 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1494257 - in /hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase: HBaseTestingUtility.java mapreduce/TestImportExport.java Date: Tue, 18 Jun 2013 18:55:40 -0000 To: commits@hbase.apache.org From: stack@apache.org X-Mailer: svnmailer-1.0.8-patched Message-Id: <20130618185540.4771F2388AC8@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: stack Date: Tue Jun 18 18:55:39 2013 New Revision: 1494257 URL: http://svn.apache.org/r1494257 Log: HBASE-6891 TestImportExport failing again due to configuration issues Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java?rev=1494257&r1=1494256&r2=1494257&view=diff ============================================================================== --- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (original) +++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java Tue Jun 18 18:55:39 2013 @@ -1663,6 +1663,10 @@ public class HBaseTestingUtility { this.dfsCluster = cluster; } + public MiniMRCluster getMRCluster() { + return mrCluster; + } + public FileSystem getTestFileSystem() throws IOException { return HFileSystem.get(conf); } Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java?rev=1494257&r1=1494256&r2=1494257&view=diff ============================================================================== --- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (original) +++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java Tue Jun 18 18:55:39 2013 @@ -26,6 +26,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; @@ -42,6 +43,7 @@ import org.apache.hadoop.hbase.client.Sc import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.GenericOptionsParser; import org.junit.After; @@ -64,12 +66,11 @@ public class TestImportExport { private static final byte[] QUAL = Bytes.toBytes("q"); private static final String OUTPUT_DIR = "outputdir"; - private static MiniHBaseCluster cluster; private static long now = System.currentTimeMillis(); @BeforeClass public static void beforeClass() throws Exception { - cluster = UTIL.startMiniCluster(); + UTIL.startMiniCluster(); UTIL.startMiniMapReduceCluster(); } @@ -111,16 +112,16 @@ public class TestImportExport { "1000" }; - GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args); Configuration conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - Job job = Export.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + JobConf jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + Job job = Export.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); - String IMPORT_TABLE = "importTableSimpleCase"; t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), FAMILYB); args = new String[] { @@ -129,12 +130,13 @@ public class TestImportExport { OUTPUT_DIR }; - opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args); conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - job = Import.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + job = Import.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); @@ -158,12 +160,13 @@ public class TestImportExport { String EXPORT_TABLE = ".META."; String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1", "0", "0" }; GenericOptionsParser opts = new GenericOptionsParser(new Configuration( - cluster.getConfiguration()), args); + UTIL.getConfiguration()), args); Configuration conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - Job job = Export.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + JobConf jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + Job job = Export.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); } @@ -200,12 +203,13 @@ public class TestImportExport { "1000" }; - GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args); Configuration conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - Job job = Export.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + JobConf jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + Job job = Export.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); @@ -224,12 +228,13 @@ public class TestImportExport { OUTPUT_DIR }; - opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args); conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - job = Import.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + job = Import.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); @@ -268,12 +273,13 @@ public class TestImportExport { String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1000" }; GenericOptionsParser opts = new GenericOptionsParser(new Configuration( - cluster.getConfiguration()), args); + UTIL.getConfiguration()), args); Configuration conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - Job job = Export.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + JobConf jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + Job job = Export.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); @@ -287,12 +293,13 @@ public class TestImportExport { "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, OUTPUT_DIR, "1000" }; - opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args); conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - job = Import.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + job = Import.createSubmittableJob(jobConf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); @@ -310,14 +317,15 @@ public class TestImportExport { "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE, OUTPUT_DIR, "1000" }; - opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); + opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args); conf = opts.getConfiguration(); args = opts.getRemainingArgs(); - job = Import.createSubmittableJob(conf, args); - job.getConfiguration().set("mapreduce.framework.name", "yarn"); + jobConf = UTIL.getMRCluster().createJobConf(); + HBaseConfiguration.merge(jobConf, conf); + job = Import.createSubmittableJob(jobConf, args); job.waitForCompletion(false); - assertFalse("Job succeeedd, but it had a non-instantiable filter!", job.isSuccessful()); + assertFalse("Job succeeded, but it had a non-instantiable filter!", job.isSuccessful()); // cleanup exportTable.close();