accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bil...@apache.org
Subject svn commit: r1328104 - in /accumulo/trunk: core/src/main/java/org/apache/accumulo/core/util/ examples/simple/src/main/java/org/apache/accumulo/examples/simple/helloworld/ examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ examp...
Date Thu, 19 Apr 2012 20:22:17 GMT
Author: billie
Date: Thu Apr 19 20:22:16 2012
New Revision: 1328104

URL: http://svn.apache.org/viewvc?rev=1328104&view=rev
Log:
ACCUMULO-286 added context factory stuff

Modified:
    accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/ContextFactory.java
    accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/helloworld/InsertWithOutputFormat.java
    accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
    accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java
    accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java
    accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/ContextFactory.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/ContextFactory.java?rev=1328104&r1=1328103&r2=1328104&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/ContextFactory.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/util/ContextFactory.java Thu
Apr 19 20:22:16 2012
@@ -147,15 +147,21 @@ public class ContextFactory {
     }
   }
   
-  @SuppressWarnings({"unchecked", "rawtypes"})
   public static <K1,V1,K2,V2> Mapper<K1,V1,K2,V2>.Context createMapContext(Mapper<K1,V1,K2,V2>
m, TaskAttemptContext tac, RecordReader<K1,V1> reader,
       RecordWriter<K2,V2> writer, InputSplit split) {
+    return createMapContext(m, tac, reader, writer, null, null, split);
+  }
+  
+  @SuppressWarnings({"unchecked", "rawtypes"})
+  public static <K1,V1,K2,V2> Mapper<K1,V1,K2,V2>.Context createMapContext(Mapper<K1,V1,K2,V2>
m, TaskAttemptContext tac, RecordReader<K1,V1> reader,
+      RecordWriter<K2,V2> writer, OutputCommitter committer, StatusReporter reporter,
InputSplit split) {
     try {
       if (useV21) {
-        Object basis = MAP_CONTEXT_IMPL_CONSTRUCTOR.newInstance(tac.getConfiguration(), tac.getTaskAttemptID(),
reader, writer, null, null, split);
+        Object basis = MAP_CONTEXT_IMPL_CONSTRUCTOR.newInstance(tac.getConfiguration(), tac.getTaskAttemptID(),
reader, writer, committer, reporter, split);
         return (Mapper.Context) MAP_CONTEXT_CONSTRUCTOR.newInstance((Mapper<K1,V1,K2,V2>)
MAP_CONSTRUCTOR.newInstance(), basis);
       } else {
-        return (Mapper.Context) MAP_CONTEXT_CONSTRUCTOR.newInstance(m, tac.getConfiguration(),
tac.getTaskAttemptID(), reader, writer, null, null, split);
+        return (Mapper.Context) MAP_CONTEXT_CONSTRUCTOR.newInstance(m, tac.getConfiguration(),
tac.getTaskAttemptID(), reader, writer, committer, reporter,
+            split);
       }
     } catch (InstantiationException e) {
       throw new IllegalArgumentException("Can't create object", e);

Modified: accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/helloworld/InsertWithOutputFormat.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/helloworld/InsertWithOutputFormat.java?rev=1328104&r1=1328103&r2=1328104&view=diff
==============================================================================
--- accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/helloworld/InsertWithOutputFormat.java
(original)
+++ accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/helloworld/InsertWithOutputFormat.java
Thu Apr 19 20:22:16 2012
@@ -20,13 +20,12 @@ import org.apache.accumulo.core.client.m
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.util.CachedConfiguration;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.accumulo.core.util.ContextFactory;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -46,7 +45,6 @@ public class InsertWithOutputFormat exte
     }
     Text tableName = new Text(args[4]);
     Job job = new Job(getConf());
-    Configuration conf = job.getConfiguration();
     AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
     AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(),
true, null);
     job.setOutputFormatClass(AccumuloOutputFormat.class);
@@ -55,7 +53,7 @@ public class InsertWithOutputFormat exte
     // format and record writer
     // mapreduce will do that for you, and you will just use
     // output.collect(tableName, mutation)
-    TaskAttemptContext context = new TaskAttemptContext(conf, new TaskAttemptID());
+    TaskAttemptContext context = ContextFactory.createTaskAttemptContext(job);
     RecordWriter<Text,Mutation> rw = new AccumuloOutputFormat().getRecordWriter(context);
     
     Text colf = new Text("colfam");

Modified: accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java?rev=1328104&r1=1328103&r2=1328104&view=diff
==============================================================================
--- accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
(original)
+++ accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
Thu Apr 19 20:22:16 2012
@@ -37,14 +37,10 @@ import org.apache.accumulo.core.data.Mut
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.ColumnVisibility;
-import org.apache.accumulo.examples.simple.filedata.ChunkInputFormat;
-import org.apache.accumulo.examples.simple.filedata.ChunkInputStream;
-import org.apache.hadoop.conf.Configuration;
+import org.apache.accumulo.core.util.ContextFactory;
 import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.log4j.Logger;
 
 public class ChunkInputFormatTest extends TestCase {
@@ -89,12 +85,12 @@ public class ChunkInputFormatTest extend
     }
     bw.close();
     
-    JobContext job = new JobContext(new Configuration(), new JobID());
+    JobContext job = ContextFactory.createJobContext();
     ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test",
new Authorizations("A", "B", "C", "D"));
     ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance1");
     ChunkInputFormat cif = new ChunkInputFormat();
     RangeInputSplit ris = new RangeInputSplit();
-    TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
+    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
     RecordReader<List<Entry<Key,Value>>,InputStream> rr = cif.createRecordReader(ris,
tac);
     rr.initialize(ris, tac);
     
@@ -140,12 +136,12 @@ public class ChunkInputFormatTest extend
     }
     bw.close();
     
-    JobContext job = new JobContext(new Configuration(), new JobID());
+    JobContext job = ContextFactory.createJobContext();
     ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test",
new Authorizations("A", "B", "C", "D"));
     ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance2");
     ChunkInputFormat cif = new ChunkInputFormat();
     RangeInputSplit ris = new RangeInputSplit();
-    TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
+    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
     RecordReader<List<Entry<Key,Value>>,InputStream> crr = cif.createRecordReader(ris,
tac);
     crr.initialize(ris, tac);
     
@@ -179,12 +175,12 @@ public class ChunkInputFormatTest extend
     }
     bw.close();
     
-    JobContext job = new JobContext(new Configuration(), new JobID());
+    JobContext job = ContextFactory.createJobContext();
     ChunkInputFormat.setInputInfo(job.getConfiguration(), "root", "".getBytes(), "test",
new Authorizations("A", "B", "C", "D"));
     ChunkInputFormat.setMockInstance(job.getConfiguration(), "instance3");
     ChunkInputFormat cif = new ChunkInputFormat();
     RangeInputSplit ris = new RangeInputSplit();
-    TaskAttemptContext tac = new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID());
+    TaskAttemptContext tac = ContextFactory.createTaskAttemptContext(job.getConfiguration());
     RecordReader<List<Entry<Key,Value>>,InputStream> crr = cif.createRecordReader(ris,
tac);
     crr.initialize(ris, tac);
     

Modified: accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java?rev=1328104&r1=1328103&r2=1328104&view=diff
==============================================================================
--- accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java
(original)
+++ accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java
Thu Apr 19 20:22:16 2012
@@ -34,8 +34,7 @@ import org.apache.accumulo.core.data.Mut
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration;
-import org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapper;
+import org.apache.accumulo.core.util.ContextFactory;
 import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -47,7 +46,6 @@ import org.apache.hadoop.mapreduce.Mappe
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 import org.junit.Before;
@@ -118,8 +116,7 @@ public class WikipediaMapperTest {
     writerMap.put(new Text(INDEX_TABLE_NAME), c.createBatchWriter(INDEX_TABLE_NAME, 1000L,
1000L, 1));
     writerMap.put(new Text(RINDEX_TABLE_NAME), c.createBatchWriter(RINDEX_TABLE_NAME, 1000L,
1000L, 1));
     
-    TaskAttemptID id = new TaskAttemptID();
-    TaskAttemptContext context = new TaskAttemptContext(conf, id);
+    TaskAttemptContext context = ContextFactory.createTaskAttemptContext(conf);
     
     RawLocalFileSystem fs = new RawLocalFileSystem();
     fs.setConf(conf);
@@ -141,7 +138,7 @@ public class WikipediaMapperTest {
     WikipediaMapper mapper = new WikipediaMapper();
     
     // Load data into Mock Accumulo
-    Mapper<LongWritable,Text,Text,Mutation>.Context con = mapper.new Context(conf,
id, rr, rw, oc, sr, split);
+    Mapper<LongWritable,Text,Text,Mutation>.Context con = ContextFactory.createMapContext(mapper,
context, rr, rw, oc, sr, split);
     mapper.run(con);
     
     // Flush and close record writers.

Modified: accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java?rev=1328104&r1=1328103&r2=1328104&view=diff
==============================================================================
--- accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java
(original)
+++ accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/reader/AggregatingRecordReaderTest.java
Thu Apr 19 20:22:16 2012
@@ -16,7 +16,10 @@
  */
 package org.apache.accumulo.examples.wikisearch.reader;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.io.FileWriter;
@@ -28,13 +31,12 @@ import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathExpression;
 import javax.xml.xpath.XPathFactory;
 
+import org.apache.accumulo.core.util.ContextFactory;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
-import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.junit.Before;
 import org.junit.Test;
@@ -95,7 +97,7 @@ public class AggregatingRecordReaderTest
     conf.set(AggregatingRecordReader.START_TOKEN, "<doc");
     conf.set(AggregatingRecordReader.END_TOKEN, "</doc>");
     conf.set(AggregatingRecordReader.RETURN_PARTIAL_MATCHES, Boolean.toString(true));
-    ctx = new TaskAttemptContext(conf, new TaskAttemptID());
+    ctx = ContextFactory.createTaskAttemptContext(conf);
     XPath xp = xpFactory.newXPath();
     EXPR_A = xp.compile("/doc/a");
     EXPR_B = xp.compile("/doc/b");
@@ -141,7 +143,7 @@ public class AggregatingRecordReaderTest
     
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     AggregatingRecordReader reader = new AggregatingRecordReader();
     try {
       // Clear the values for BEGIN and STOP TOKEN
@@ -163,7 +165,7 @@ public class AggregatingRecordReaderTest
     
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     
     // Initialize the RecordReader
     AggregatingRecordReader reader = new AggregatingRecordReader();
@@ -184,7 +186,7 @@ public class AggregatingRecordReaderTest
     
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     
     // Initialize the RecordReader
     AggregatingRecordReader reader = new AggregatingRecordReader();
@@ -202,7 +204,7 @@ public class AggregatingRecordReaderTest
     
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     
     // Initialize the RecordReader
     AggregatingRecordReader reader = new AggregatingRecordReader();
@@ -220,7 +222,7 @@ public class AggregatingRecordReaderTest
     
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     
     // Initialize the RecordReader
     AggregatingRecordReader reader = new AggregatingRecordReader();
@@ -245,7 +247,7 @@ public class AggregatingRecordReaderTest
     
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     
     // Initialize the RecordReader
     AggregatingRecordReader reader = new AggregatingRecordReader();
@@ -264,7 +266,7 @@ public class AggregatingRecordReaderTest
     File f = createFile(xml5);
     // Create FileSplit
     Path p = new Path(f.toURI().toString());
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(p, 0, f.length(), null),
0);
     
     // Initialize the RecordReader
     AggregatingRecordReader reader = new AggregatingRecordReader();

Modified: accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java?rev=1328104&r1=1328103&r2=1328104&view=diff
==============================================================================
--- accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
(original)
+++ accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
Thu Apr 19 20:22:16 2012
@@ -36,6 +36,7 @@ import org.apache.accumulo.core.data.Mut
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.util.ContextFactory;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapper;
@@ -53,7 +54,6 @@ import org.apache.hadoop.mapreduce.Mappe
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
 import org.apache.log4j.Level;
@@ -125,8 +125,7 @@ public class TestQueryLogic {
       writerMap.put(new Text(table), c.createBatchWriter(table, 1000L, 1000L, 1));
     }
     
-    TaskAttemptID id = new TaskAttemptID();
-    TaskAttemptContext context = new TaskAttemptContext(conf, id);
+    TaskAttemptContext context = ContextFactory.createTaskAttemptContext(conf);
     
     RawLocalFileSystem fs = new RawLocalFileSystem();
     fs.setConf(conf);
@@ -137,7 +136,7 @@ public class TestQueryLogic {
     Path tmpFile = new Path(data.getAbsolutePath());
     
     // Setup the Mapper
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(),
null),0);
+    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(),
null), 0);
     AggregatingRecordReader rr = new AggregatingRecordReader();
     Path ocPath = new Path(tmpFile, "oc");
     OutputCommitter oc = new FileOutputCommitter(ocPath, context);
@@ -148,7 +147,7 @@ public class TestQueryLogic {
     WikipediaMapper mapper = new WikipediaMapper();
     
     // Load data into Mock Accumulo
-    Mapper<LongWritable,Text,Text,Mutation>.Context con = mapper.new Context(conf,
id, rr, rw, oc, sr, split);
+    Mapper<LongWritable,Text,Text,Mutation>.Context con = ContextFactory.createMapContext(mapper,
context, rr, rw, oc, sr, split);
     mapper.run(con);
     
     // Flush and close record writers.



Mime
View raw message