accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ctubb...@apache.org
Subject svn commit: r1433745 [3/3] - in /accumulo/trunk: core/src/main/java/org/apache/accumulo/core/cli/ core/src/main/java/org/apache/accumulo/core/client/mapreduce/ core/src/main/java/org/apache/accumulo/core/iterators/user/ core/src/test/java/org/apache/ac...
Date Tue, 15 Jan 2013 23:50:42 GMT
Modified: accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java
(original)
+++ accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/TeraSortIngest.java
Tue Jan 15 23:50:42 2013
@@ -63,7 +63,7 @@ import com.beust.jcommander.Parameter;
  * the same way TeraSort does use 10000000000 rows and 10/10 byte key length and 78/78 byte
value length. Along with the 10 byte row id and \r\n this gives you
  * 100 byte row * 10000000000 rows = 1tb. Min/Max ranges for key and value parameters are
inclusive/inclusive respectively.
  * 
- *  
+ * 
  */
 public class TeraSortIngest extends Configured implements Tool {
   /**
@@ -84,19 +84,23 @@ public class TeraSortIngest extends Conf
         rowCount = length;
       }
       
+      @Override
       public long getLength() throws IOException {
         return 0;
       }
       
+      @Override
       public String[] getLocations() throws IOException {
         return new String[] {};
       }
       
+      @Override
       public void readFields(DataInput in) throws IOException {
         firstRow = WritableUtils.readVLong(in);
         rowCount = WritableUtils.readVLong(in);
       }
       
+      @Override
       public void write(DataOutput out) throws IOException {
         WritableUtils.writeVLong(out, firstRow);
         WritableUtils.writeVLong(out, rowCount);
@@ -119,8 +123,10 @@ public class TeraSortIngest extends Conf
         totalRows = split.rowCount;
       }
       
+      @Override
       public void close() throws IOException {}
       
+      @Override
       public float getProgress() throws IOException {
         return finishedRows / (float) totalRows;
       }
@@ -148,6 +154,7 @@ public class TeraSortIngest extends Conf
       }
     }
     
+    @Override
     public RecordReader<LongWritable,NullWritable> createRecordReader(InputSplit split,
TaskAttemptContext context) throws IOException {
       // reporter.setStatus("Creating record reader");
       return new RangeRecordReader((RangeInputSplit) split);
@@ -156,6 +163,7 @@ public class TeraSortIngest extends Conf
     /**
      * Create the desired number of splits, dividing the number of rows between the mappers.
      */
+    @Override
     public List<InputSplit> getSplits(JobContext job) {
       long totalRows = job.getConfiguration().getLong(NUMROWS, 0);
       int numSplits = job.getConfiguration().getInt(NUMSPLITS, 1);
@@ -305,6 +313,7 @@ public class TeraSortIngest extends Conf
         value.append(filler[(base + valuelen) % 26], 0, valuelen);
     }
     
+    @Override
     public void map(LongWritable row, NullWritable ignored, Context context) throws IOException,
InterruptedException {
       context.setStatus("Entering");
       long rowId = row.get();
@@ -344,17 +353,17 @@ public class TeraSortIngest extends Conf
   }
   
   static class Opts extends ClientOnRequiredTable {
-    @Parameter(names="--count", description="number of rows to ingest", required=true)
+    @Parameter(names = "--count", description = "number of rows to ingest", required = true)
     long numRows;
-    @Parameter(names={"-nk", "--minKeySize"}, description="miniumum key size", required=true)
+    @Parameter(names = {"-nk", "--minKeySize"}, description = "miniumum key size", required
= true)
     int minKeyLength;
-    @Parameter(names={"-xk", "--maxKeySize"}, description="maximum key size", required=true)
+    @Parameter(names = {"-xk", "--maxKeySize"}, description = "maximum key size", required
= true)
     int maxKeyLength;
-    @Parameter(names={"-nv", "--minValueSize"}, description="minimum key size", required=true)
+    @Parameter(names = {"-nv", "--minValueSize"}, description = "minimum key size", required
= true)
     int minValueLength;
-    @Parameter(names={"-xv", "--maxValueSize"}, description="maximum key size", required=true)
+    @Parameter(names = {"-xv", "--maxValueSize"}, description = "maximum key size", required
= true)
     int maxValueLength;
-    @Parameter(names="--splits", description="number of splits to create in the table")
+    @Parameter(names = "--splits", description = "number of splits to create in the table")
     int splits = 0;
   }
   
@@ -374,7 +383,7 @@ public class TeraSortIngest extends Conf
     
     job.setOutputFormatClass(AccumuloOutputFormat.class);
     opts.setAccumuloConfigs(job);
-    AccumuloOutputFormat.setMaxMutationBufferSize(job.getConfiguration(), 10L * 1000 * 1000);
+    AccumuloOutputFormat.setMaxMutationBufferSize(job, 10L * 1000 * 1000);
     
     Configuration conf = job.getConfiguration();
     conf.setLong(NUMROWS, opts.numRows);

Modified: accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java
(original)
+++ accumulo/trunk/examples/simple/src/main/java/org/apache/accumulo/examples/simple/mapreduce/UniqueColumns.java
Tue Jan 15 23:50:42 2013
@@ -41,17 +41,19 @@ import com.beust.jcommander.Parameter;
  */
 
 /**
- * A simple map reduce job that computes the unique column families and column qualifiers
in a table.  This example shows one way to run against an offline table.
+ * A simple map reduce job that computes the unique column families and column qualifiers
in a table. This example shows one way to run against an offline
+ * table.
  */
 public class UniqueColumns extends Configured implements Tool {
   
   private static final Text EMPTY = new Text();
   
-  public static class UMapper extends Mapper<Key,Value,Text,Text> {    
+  public static class UMapper extends Mapper<Key,Value,Text,Text> {
     private Text temp = new Text();
     private static final Text CF = new Text("cf:");
     private static final Text CQ = new Text("cq:");
     
+    @Override
     public void map(Key key, Value value, Context context) throws IOException, InterruptedException
{
       temp.set(CF);
       ByteSequence cf = key.getColumnFamilyData();
@@ -66,21 +68,21 @@ public class UniqueColumns extends Confi
   }
   
   public static class UReducer extends Reducer<Text,Text,Text,Text> {
+    @Override
     public void reduce(Text key, Iterable<Text> values, Context context) throws IOException,
InterruptedException {
       context.write(key, EMPTY);
     }
   }
   
   static class Opts extends ClientOnRequiredTable {
-    @Parameter(names="--output", description="output directory")
+    @Parameter(names = "--output", description = "output directory")
     String output;
-    @Parameter(names="--reducers", description="number of reducers to use", required=true)
+    @Parameter(names = "--reducers", description = "number of reducers to use", required
= true)
     int reducers;
-    @Parameter(names="--offline", description="run against an offline table")
+    @Parameter(names = "--offline", description = "run against an offline table")
     boolean offline = false;
   }
   
-  
   @Override
   public int run(String[] args) throws Exception {
     Opts opts = new Opts();
@@ -90,7 +92,7 @@ public class UniqueColumns extends Confi
     
     Job job = new Job(getConf(), jobName);
     job.setJarByClass(this.getClass());
-
+    
     String clone = opts.tableName;
     Connector conn = null;
     if (opts.offline) {
@@ -104,11 +106,9 @@ public class UniqueColumns extends Confi
       conn.tableOperations().clone(opts.tableName, clone, true, new HashMap<String,String>(),
new HashSet<String>());
       conn.tableOperations().offline(clone);
       
-      AccumuloInputFormat.setScanOffline(job.getConfiguration(), true);
+      AccumuloInputFormat.setOfflineTableScan(job, true);
     }
     
-
-    
     job.setInputFormatClass(AccumuloInputFormat.class);
     opts.setAccumuloConfigs(job);
     
@@ -118,9 +118,9 @@ public class UniqueColumns extends Confi
     
     job.setCombinerClass(UReducer.class);
     job.setReducerClass(UReducer.class);
-
+    
     job.setNumReduceTasks(opts.reducers);
-
+    
     job.setOutputFormatClass(TextOutputFormat.class);
     TextOutputFormat.setOutputPath(job, new Path(opts.output));
     
@@ -129,11 +129,10 @@ public class UniqueColumns extends Confi
     if (opts.offline) {
       conn.tableOperations().delete(clone);
     }
-
+    
     return job.isSuccessful() ? 0 : 1;
   }
   
-  
   public static void main(String[] args) throws Exception {
     int res = ToolRunner.run(CachedConfiguration.getInstance(), new UniqueColumns(), args);
     System.exit(res);

Modified: accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
(original)
+++ accumulo/trunk/examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
Tue Jan 15 23:50:42 2013
@@ -197,10 +197,12 @@ public class ChunkInputFormatTest extend
       
       job.setInputFormatClass(ChunkInputFormat.class);
       
-      ChunkInputFormat.setInputInfo(job.getConfiguration(), user, pass.getBytes(), table,
AUTHS);
-      ChunkInputFormat.setMockInstance(job.getConfiguration(), instance);
+      ChunkInputFormat.setInputInfo(job, user, pass.getBytes(), table, AUTHS);
+      ChunkInputFormat.setMockInstance(job, instance);
       
-      job.setMapperClass((Class<? extends Mapper>) Class.forName(args[4]));
+      @SuppressWarnings("unchecked")
+      Class<? extends Mapper<?,?,?,?>> forName = (Class<? extends Mapper<?,?,?,?>>)
Class.forName(args[4]);
+      job.setMapperClass(forName);
       job.setMapOutputKeyClass(Key.class);
       job.setMapOutputValueClass(Value.class);
       job.setOutputFormatClass(NullOutputFormat.class);

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/metanalysis/IndexMeta.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/metanalysis/IndexMeta.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/metanalysis/IndexMeta.java
(original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/metanalysis/IndexMeta.java
Tue Jan 15 23:50:42 2013
@@ -24,7 +24,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.server.cli.ClientOpts;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.TableExistsException;
 import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
@@ -33,6 +32,7 @@ import org.apache.accumulo.core.data.Key
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.util.CachedConfiguration;
+import org.apache.accumulo.server.cli.ClientOpts;
 import org.apache.accumulo.server.logger.LogEvents;
 import org.apache.accumulo.server.logger.LogFileKey;
 import org.apache.accumulo.server.logger.LogFileValue;
@@ -115,9 +115,9 @@ public class IndexMeta extends Configure
       context.write(TABLET_EVENTS_TABLE, tabletEvent);
     }
   }
-
+  
   static class Opts extends ClientOpts {
-    @Parameter(description="<logfile> { <logfile> ...}")
+    @Parameter(description = "<logfile> { <logfile> ...}")
     List<String> logFiles = new ArrayList<String>();
   }
   
@@ -125,12 +125,12 @@ public class IndexMeta extends Configure
   public int run(String[] args) throws Exception {
     Opts opts = new Opts();
     opts.parseArgs(IndexMeta.class.getName(), args);
-
+    
     String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();
     
     Job job = new Job(getConf(), jobName);
     job.setJarByClass(this.getClass());
-
+    
     List<String> logFiles = Arrays.asList(args).subList(4, args.length);
     Path paths[] = new Path[logFiles.size()];
     int count = 0;
@@ -144,11 +144,11 @@ public class IndexMeta extends Configure
     job.setNumReduceTasks(0);
     
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), opts.instance, opts.zookeepers);
-    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), opts.user, opts.getPassword(),
false, null);
+    AccumuloOutputFormat.setZooKeeperInstance(job, opts.instance, opts.zookeepers);
+    AccumuloOutputFormat.setOutputInfo(job, opts.user, opts.getPassword(), false, null);
     
     job.setMapperClass(IndexMapper.class);
-
+    
     Connector conn = opts.getConnector();
     
     try {

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousMoru.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousMoru.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousMoru.java
(original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousMoru.java
Tue Jan 15 23:50:42 2013
@@ -44,8 +44,8 @@ import com.beust.jcommander.Parameter;
 import com.beust.jcommander.validators.PositiveInteger;
 
 /**
- * A map only job that reads a table created by continuous ingest and creates doubly linked
list. This map reduce job tests the ability of a map only job to read and
- * write to accumulo at the same time. This map reduce job mutates the table in such a way
that it should not create any undefined nodes.
+ * A map only job that reads a table created by continuous ingest and creates doubly linked
list. This map reduce job tests the ability of a map only job to
+ * read and write to accumulo at the same time. This map reduce job mutates the table in
such a way that it should not create any undefined nodes.
  * 
  */
 public class ContinuousMoru extends Configured implements Tool {
@@ -70,7 +70,8 @@ public class ContinuousMoru extends Conf
     private long count;
     
     private static final ColumnVisibility EMPTY_VIS = new ColumnVisibility();
-
+    
+    @Override
     public void setup(Context context) throws IOException, InterruptedException {
       int max_cf = context.getConfiguration().getInt(MAX_CF, -1);
       int max_cq = context.getConfiguration().getInt(MAX_CQ, -1);
@@ -88,6 +89,7 @@ public class ContinuousMoru extends Conf
       count = 0;
     }
     
+    @Override
     public void map(Key key, Value data, Context context) throws IOException, InterruptedException
{
       
       ContinuousWalk.validate(key, data);
@@ -100,8 +102,7 @@ public class ContinuousMoru extends Conf
         if (offset > 0) {
           long rowLong = Long.parseLong(new String(val, offset, 16), 16);
           Mutation m = ContinuousIngest.genMutation(rowLong, random.nextInt(max_cf), random.nextInt(max_cq),
EMPTY_VIS, iiId, count++, key.getRowData()
-              .toArray(), random,
-              true);
+              .toArray(), random, true);
           context.write(null, m);
         }
         
@@ -112,13 +113,13 @@ public class ContinuousMoru extends Conf
   }
   
   static class Opts extends BaseOpts {
-    @Parameter(names="--maxColF", description="maximum column family value to use")
+    @Parameter(names = "--maxColF", description = "maximum column family value to use")
     short maxColF = Short.MAX_VALUE;
     
-    @Parameter(names="--maxColQ", description="maximum column qualifier value to use")
+    @Parameter(names = "--maxColQ", description = "maximum column qualifier value to use")
     short maxColQ = Short.MAX_VALUE;
- 
-    @Parameter(names="--maxMappers", description="the maximum number of mappers to use",
required=true, validateWith=PositiveInteger.class)
+    
+    @Parameter(names = "--maxMappers", description = "the maximum number of mappers to use",
required = true, validateWith = PositiveInteger.class)
     int maxMaps = 0;
   }
   
@@ -137,8 +138,8 @@ public class ContinuousMoru extends Conf
     // set up ranges
     try {
       Set<Range> ranges = opts.getConnector().tableOperations().splitRangeByTablets(opts.getTableName(),
new Range(), opts.maxMaps);
-      AccumuloInputFormat.setRanges(job.getConfiguration(), ranges);
-      AccumuloInputFormat.disableAutoAdjustRanges(job.getConfiguration());
+      AccumuloInputFormat.setRanges(job, ranges);
+      AccumuloInputFormat.setAutoAdjustRanges(job, false);
     } catch (Exception e) {
       throw new IOException(e);
     }
@@ -148,9 +149,9 @@ public class ContinuousMoru extends Conf
     job.setNumReduceTasks(0);
     
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setMaxLatency(job.getConfiguration(), (int) (bwOpts.batchLatency
/ 1000.0));
-    AccumuloOutputFormat.setMaxMutationBufferSize(job.getConfiguration(), bwOpts.batchMemory);
-    AccumuloOutputFormat.setMaxWriteThreads(job.getConfiguration(), bwOpts.batchThreads);
+    AccumuloOutputFormat.setMaxLatency(job, Integer.parseInt(String.valueOf(bwOpts.batchLatency)));
+    AccumuloOutputFormat.setMaxMutationBufferSize(job, bwOpts.batchMemory);
+    AccumuloOutputFormat.setMaxWriteThreads(job, bwOpts.batchThreads);
     
     Configuration conf = job.getConfiguration();
     conf.setLong(MIN, opts.min);

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousVerify.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousVerify.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousVerify.java
(original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousVerify.java
Tue Jan 15 23:50:42 2013
@@ -62,6 +62,7 @@ public class ContinuousVerify extends Co
     
     private long corrupt = 0;
     
+    @Override
     public void map(Key key, Value data, Context context) throws IOException, InterruptedException
{
       long r = Long.parseLong(key.getRow().toString(), 16);
       if (r < 0)
@@ -101,6 +102,7 @@ public class ContinuousVerify extends Co
   public static class CReducer extends Reducer<LongWritable,VLongWritable,Text,Text>
{
     private ArrayList<Long> refs = new ArrayList<Long>();
     
+    @Override
     public void reduce(LongWritable key, Iterable<VLongWritable> values, Context context)
throws IOException, InterruptedException {
       
       int defCount = 0;
@@ -136,19 +138,21 @@ public class ContinuousVerify extends Co
   }
   
   static class Opts extends ClientOnDefaultTable {
-    @Parameter(names="--output", description="location in HDFS to store the results; must
not exist", required=true)
+    @Parameter(names = "--output", description = "location in HDFS to store the results;
must not exist", required = true)
     String outputDir = "/tmp/continuousVerify";
     
-    @Parameter(names="--maxMappers", description="the maximum number of mappers to use",
required=true, validateWith=PositiveInteger.class)
+    @Parameter(names = "--maxMappers", description = "the maximum number of mappers to use",
required = true, validateWith = PositiveInteger.class)
     int maxMaps = 0;
     
-    @Parameter(names="--reducers", description="the number of reducers to use", required=true,
validateWith=PositiveInteger.class)
+    @Parameter(names = "--reducers", description = "the number of reducers to use", required
= true, validateWith = PositiveInteger.class)
     int reducers = 0;
     
-    @Parameter(names="--offline", description="perform the verification directly on the files
while the table is offline")
+    @Parameter(names = "--offline", description = "perform the verification directly on the
files while the table is offline")
     boolean scanOffline = false;
     
-    public Opts() { super("ci"); }
+    public Opts() {
+      super("ci");
+    }
   }
   
   @Override
@@ -168,17 +172,17 @@ public class ContinuousVerify extends Co
       conn.tableOperations().clone(opts.getTableName(), clone, true, new HashMap<String,String>(),
new HashSet<String>());
       conn.tableOperations().offline(clone);
     }
-
+    
     job.setInputFormatClass(AccumuloInputFormat.class);
-
+    
     opts.setAccumuloConfigs(job);
-    AccumuloInputFormat.setScanOffline(job.getConfiguration(), opts.scanOffline);
-
+    AccumuloInputFormat.setOfflineTableScan(job, opts.scanOffline);
+    
     // set up ranges
     try {
       Set<Range> ranges = opts.getConnector().tableOperations().splitRangeByTablets(opts.getTableName(),
new Range(), opts.maxMaps);
-      AccumuloInputFormat.setRanges(job.getConfiguration(), ranges);
-      AccumuloInputFormat.disableAutoAdjustRanges(job.getConfiguration());
+      AccumuloInputFormat.setRanges(job, ranges);
+      AccumuloInputFormat.setAutoAdjustRanges(job, false);
     } catch (Exception e) {
       throw new IOException(e);
     }
@@ -193,7 +197,7 @@ public class ContinuousVerify extends Co
     job.setOutputFormatClass(TextOutputFormat.class);
     
     job.getConfiguration().setBoolean("mapred.map.tasks.speculative.execution", opts.scanOffline);
-
+    
     TextOutputFormat.setOutputPath(job, new Path(opts.outputDir));
     
     job.waitForCompletion(true);

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/multitable/CopyTool.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/multitable/CopyTool.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/multitable/CopyTool.java
(original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/multitable/CopyTool.java
Tue Jan 15 23:50:42 2013
@@ -35,6 +35,7 @@ public class CopyTool extends Configured
   protected final Logger log = Logger.getLogger(this.getClass());
   
   public static class SeqMapClass extends Mapper<Key,Value,Text,Mutation> {
+    @Override
     public void map(Key key, Value val, Context output) throws IOException, InterruptedException
{
       Mutation m = new Mutation(key.getRow());
       m.put(key.getColumnFamily(), key.getColumnQualifier(), val);
@@ -42,6 +43,7 @@ public class CopyTool extends Configured
     }
   }
   
+  @Override
   public int run(String[] args) throws Exception {
     Job job = new Job(getConf(), this.getClass().getSimpleName());
     job.setJarByClass(this.getClass());
@@ -52,8 +54,8 @@ public class CopyTool extends Configured
     }
     
     job.setInputFormatClass(AccumuloInputFormat.class);
-    AccumuloInputFormat.setInputInfo(job.getConfiguration(), args[0], args[1].getBytes(),
args[2], new Authorizations());
-    AccumuloInputFormat.setZooKeeperInstance(job.getConfiguration(), args[3], args[4]);
+    AccumuloInputFormat.setInputInfo(job, args[0], args[1].getBytes(), args[2], new Authorizations());
+    AccumuloInputFormat.setZooKeeperInstance(job, args[3], args[4]);
     
     job.setMapperClass(SeqMapClass.class);
     job.setMapOutputKeyClass(Text.class);
@@ -62,8 +64,8 @@ public class CopyTool extends Configured
     job.setNumReduceTasks(0);
     
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[0], args[1].getBytes(),
true, args[5]);
-    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[3], args[4]);
+    AccumuloOutputFormat.setOutputInfo(job, args[0], args[1].getBytes(), true, args[5]);
+    AccumuloOutputFormat.setZooKeeperInstance(job, args[3], args[4]);
     
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/sequential/MapRedVerifyTool.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/sequential/MapRedVerifyTool.java?rev=1433745&r1=1433744&r2=1433745&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/sequential/MapRedVerifyTool.java
(original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/test/randomwalk/sequential/MapRedVerifyTool.java
Tue Jan 15 23:50:42 2013
@@ -39,6 +39,7 @@ public class MapRedVerifyTool extends Co
   protected final Logger log = Logger.getLogger(this.getClass());
   
   public static class SeqMapClass extends Mapper<Key,Value,NullWritable,IntWritable>
{
+    @Override
     public void map(Key row, Value data, Context output) throws IOException, InterruptedException
{
       Integer num = Integer.valueOf(row.getRow().toString());
       output.write(NullWritable.get(), new IntWritable(num.intValue()));
@@ -74,6 +75,7 @@ public class MapRedVerifyTool extends Co
     }
   }
   
+  @Override
   public int run(String[] args) throws Exception {
     Job job = new Job(getConf(), this.getClass().getSimpleName());
     job.setJarByClass(this.getClass());
@@ -84,8 +86,8 @@ public class MapRedVerifyTool extends Co
     }
     
     job.setInputFormatClass(AccumuloInputFormat.class);
-    AccumuloInputFormat.setInputInfo(job.getConfiguration(), args[0], args[1].getBytes(),
args[2], new Authorizations());
-    AccumuloInputFormat.setZooKeeperInstance(job.getConfiguration(), args[3], args[4]);
+    AccumuloInputFormat.setInputInfo(job, args[0], args[1].getBytes(), args[2], new Authorizations());
+    AccumuloInputFormat.setZooKeeperInstance(job, args[3], args[4]);
     
     job.setMapperClass(SeqMapClass.class);
     job.setMapOutputKeyClass(NullWritable.class);
@@ -95,8 +97,8 @@ public class MapRedVerifyTool extends Co
     job.setNumReduceTasks(1);
     
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[0], args[1].getBytes(),
true, args[5]);
-    AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[3], args[4]);
+    AccumuloOutputFormat.setOutputInfo(job, args[0], args[1].getBytes(), true, args[5]);
+    AccumuloOutputFormat.setZooKeeperInstance(job, args[3], args[4]);
     
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;



Mime
View raw message