Return-Path: X-Original-To: apmail-accumulo-commits-archive@www.apache.org Delivered-To: apmail-accumulo-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 150E110BDF for ; Thu, 6 Feb 2014 05:40:44 +0000 (UTC) Received: (qmail 95044 invoked by uid 500); 6 Feb 2014 05:40:33 -0000 Delivered-To: apmail-accumulo-commits-archive@accumulo.apache.org Received: (qmail 94802 invoked by uid 500); 6 Feb 2014 05:40:27 -0000 Mailing-List: contact commits-help@accumulo.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@accumulo.apache.org Delivered-To: mailing list commits@accumulo.apache.org Received: (qmail 92938 invoked by uid 99); 6 Feb 2014 05:39:45 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 06 Feb 2014 05:39:45 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id C264691D60F; Thu, 6 Feb 2014 05:39:44 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: ujustgotbilld@apache.org To: commits@accumulo.apache.org Date: Thu, 06 Feb 2014 05:40:27 -0000 Message-Id: <7a91dc00328846ec8e9ca7d3c6b49f39@git.apache.org> In-Reply-To: <37f261b2d3784811a628f92ade61a1b5@git.apache.org> References: <37f261b2d3784811a628f92ade61a1b5@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [48/50] git commit: ACCUMULO-286 added context factory stuff ACCUMULO-286 added context factory stuff git-svn-id: https://svn.apache.org/repos/asf/accumulo/trunk@1328104 13f79535-47bb-0310-9956-ffa450edef68 (cherry picked from commit 0680b04bf03e2d6ad19ae3c368f6cb23f4e30056) Reason: Testing Author: Billie Rinaldi Ref: ACCUMULO-1792 Signed-off-by: Eric Newton Project: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/repo Commit: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/commit/0c429f98 Tree: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/tree/0c429f98 Diff: http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/diff/0c429f98 Branch: refs/heads/1.4.5-SNAPSHOT Commit: 0c429f986b9e106cf3598cd37726bb40470ffa95 Parents: f0b42c7 Author: Billie Rinaldi Authored: Thu Apr 19 20:22:16 2012 +0000 Committer: Eric Newton Committed: Mon Nov 25 16:06:42 2013 -0500 ---------------------------------------------------------------------- .../wikisearch/ingest/WikipediaMapperTest.java | 9 +++----- .../reader/AggregatingRecordReaderTest.java | 24 +++++++++++--------- .../wikisearch/logic/TestQueryLogic.java | 9 ++++---- 3 files changed, 20 insertions(+), 22 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/0c429f98/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java ---------------------------------------------------------------------- diff --git a/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java b/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java index a924aee..c659ec4 100644 --- a/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java +++ b/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java @@ -34,8 +34,7 @@ import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration; -import org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapper; +import org.apache.accumulo.core.util.ContextFactory; import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -47,7 +46,6 @@ import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.junit.Before; @@ -118,8 +116,7 @@ public class WikipediaMapperTest { writerMap.put(new Text(INDEX_TABLE_NAME), c.createBatchWriter(INDEX_TABLE_NAME, 1000L, 1000L, 1)); writerMap.put(new Text(RINDEX_TABLE_NAME), c.createBatchWriter(RINDEX_TABLE_NAME, 1000L, 1000L, 1)); - TaskAttemptID id = new TaskAttemptID(); - TaskAttemptContext context = new TaskAttemptContext(conf, id); + TaskAttemptContext context = ContextFactory.createTaskAttemptContext(conf); RawLocalFileSystem fs = new RawLocalFileSystem(); fs.setConf(conf); @@ -141,7 +138,7 @@ public class WikipediaMapperTest { WikipediaMapper mapper = new WikipediaMapper(); // Load data into Mock Accumulo - Mapper.Context con = mapper.new Context(conf, id, rr, rw, oc, sr, split); + Mapper.Context con = ContextFactory.createMapContext(mapper, context, rr, rw, oc, sr, split); mapper.run(con); // Flush and close record writers. http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/0c429f98/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java ---------------------------------------------------------------------- diff --git a/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java b/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java index c1cb263..c842da7 100644 --- a/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java +++ b/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java @@ -16,7 +16,10 @@ */ package org.apache.accumulo.examples.wikisearch.reader; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.File; import java.io.FileWriter; @@ -28,13 +31,12 @@ import javax.xml.xpath.XPath; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathFactory; +import org.apache.accumulo.core.util.ContextFactory; import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit; -import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.junit.Before; import org.junit.Test; @@ -95,7 +97,7 @@ public class AggregatingRecordReaderTest { conf.set(AggregatingRecordReader.START_TOKEN, ""); conf.set(AggregatingRecordReader.RETURN_PARTIAL_MATCHES, Boolean.toString(true)); - ctx = new TaskAttemptContext(conf, new TaskAttemptID()); + ctx = ContextFactory.createTaskAttemptContext(conf); XPath xp = xpFactory.newXPath(); EXPR_A = xp.compile("/doc/a"); EXPR_B = xp.compile("/doc/b"); @@ -141,7 +143,7 @@ public class AggregatingRecordReaderTest { // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); AggregatingRecordReader reader = new AggregatingRecordReader(); try { // Clear the values for BEGIN and STOP TOKEN @@ -163,7 +165,7 @@ public class AggregatingRecordReaderTest { // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); // Initialize the RecordReader AggregatingRecordReader reader = new AggregatingRecordReader(); @@ -184,7 +186,7 @@ public class AggregatingRecordReaderTest { // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); // Initialize the RecordReader AggregatingRecordReader reader = new AggregatingRecordReader(); @@ -202,7 +204,7 @@ public class AggregatingRecordReaderTest { // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); // Initialize the RecordReader AggregatingRecordReader reader = new AggregatingRecordReader(); @@ -220,7 +222,7 @@ public class AggregatingRecordReaderTest { // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); // Initialize the RecordReader AggregatingRecordReader reader = new AggregatingRecordReader(); @@ -245,7 +247,7 @@ public class AggregatingRecordReaderTest { // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); // Initialize the RecordReader AggregatingRecordReader reader = new AggregatingRecordReader(); @@ -264,7 +266,7 @@ public class AggregatingRecordReaderTest { File f = createFile(xml5); // Create FileSplit Path p = new Path(f.toURI().toString()); - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null), 0); // Initialize the RecordReader AggregatingRecordReader reader = new AggregatingRecordReader(); http://git-wip-us.apache.org/repos/asf/accumulo-wikisearch/blob/0c429f98/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java ---------------------------------------------------------------------- diff --git a/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java b/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java index 4b7aaee..938f01b 100644 --- a/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java +++ b/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java @@ -36,6 +36,7 @@ import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; +import org.apache.accumulo.core.util.ContextFactory; import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration; import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit; import org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapper; @@ -53,7 +54,6 @@ import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.log4j.Level; @@ -125,8 +125,7 @@ public class TestQueryLogic { writerMap.put(new Text(table), c.createBatchWriter(table, 1000L, 1000L, 1)); } - TaskAttemptID id = new TaskAttemptID(); - TaskAttemptContext context = new TaskAttemptContext(conf, id); + TaskAttemptContext context = ContextFactory.createTaskAttemptContext(conf); RawLocalFileSystem fs = new RawLocalFileSystem(); fs.setConf(conf); @@ -137,7 +136,7 @@ public class TestQueryLogic { Path tmpFile = new Path(data.getAbsolutePath()); // Setup the Mapper - WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(), null),0); + WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(), null), 0); AggregatingRecordReader rr = new AggregatingRecordReader(); Path ocPath = new Path(tmpFile, "oc"); OutputCommitter oc = new FileOutputCommitter(ocPath, context); @@ -148,7 +147,7 @@ public class TestQueryLogic { WikipediaMapper mapper = new WikipediaMapper(); // Load data into Mock Accumulo - Mapper.Context con = mapper.new Context(conf, id, rr, rw, oc, sr, split); + Mapper.Context con = ContextFactory.createMapContext(mapper, context, rr, rw, oc, sr, split); mapper.run(con); // Flush and close record writers.