accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cjno...@apache.org
Subject [5/5] git commit: ACCUMULO-391 setters and getters for BatchScanConfigs on jobs now use Map<String, BatchScanConfig> instead of a vararg.
Date Wed, 16 Oct 2013 00:22:13 GMT
ACCUMULO-391 setters and getters for BatchScanConfigs on jobs now use Map<String,BatchScanConfig> instead of a vararg.


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/9a63ff4e
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/9a63ff4e
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/9a63ff4e

Branch: refs/heads/master
Commit: 9a63ff4ecf4b479403d16f1ee44b4f552f71719d
Parents: ebd1120
Author: Corey J. Nolet <cjnolet@gmail.com>
Authored: Mon Oct 14 21:02:58 2013 -0400
Committer: Corey J. Nolet <cjnolet@gmail.com>
Committed: Tue Oct 15 20:22:07 2013 -0400

----------------------------------------------------------------------
 .../core/client/mapred/InputFormatBase.java     |  27 +--
 .../client/mapreduce/AbstractInputFormat.java   |  48 ++---
 .../AccumuloMultiTableInputFormat.java          |  22 ++-
 .../core/client/mapreduce/BatchScanConfig.java  |  87 ++++-----
 .../core/client/mapreduce/InputFormatBase.java  |  19 +-
 .../mapreduce/lib/util/InputConfigurator.java   | 181 ++++++++++---------
 .../client/mapred/AccumuloInputFormatTest.java  |  44 +++--
 .../AccumuloMultiTableInputFormatTest.java      |  28 ++-
 .../core/conf/TableQueryConfigTest.java         |   3 +-
 9 files changed, 262 insertions(+), 197 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
index 258a13a..0b13966 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
@@ -536,9 +536,9 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    *          the table query configs to be set on the configuration.
    * @since 1.6.0
    */
-  public static void setTableQueryConfigs(JobConf job, BatchScanConfig... configs) {
+  public static void setTableQueryConfigs(JobConf job, Map<String, BatchScanConfig> configs) {
     checkNotNull(configs);
-    InputConfigurator.setTableQueryConfigs(CLASS, job, configs);
+    InputConfigurator.setBatchScanConfigs(CLASS, job, configs);
   }
   
   /**
@@ -554,8 +554,8 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    * @return
    * @since 1.6.0
    */
-  public static List<BatchScanConfig> getTableQueryConfigs(JobConf job) {
-    return InputConfigurator.getTableQueryConfigs(CLASS, job);
+  public static Map<String,BatchScanConfig> getTableQueryConfigs(JobConf job) {
+    return InputConfigurator.getBatchScanConfigs(CLASS, job);
   }
   
   /**
@@ -828,9 +828,10 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
     validateOptions(job);
     
     LinkedList<InputSplit> splits = new LinkedList<InputSplit>();
-    List<BatchScanConfig> tableConfigs = getTableQueryConfigs(job);
-    for (BatchScanConfig tableConfig : tableConfigs) {
-      
+    Map<String, BatchScanConfig> tableConfigs = getTableQueryConfigs(job);
+    for (Entry<String, BatchScanConfig> tableConfigEntry : tableConfigs.entrySet()) {
+      String tableName = tableConfigEntry.getKey();
+      BatchScanConfig tableConfig = tableConfigEntry.getValue();
       boolean autoAdjust = tableConfig.shouldAutoAdjustRanges();
       String tableId = null;
       List<Range> ranges = autoAdjust ? Range.mergeOverlapping(tableConfig.getRanges()) : tableConfig.getRanges();
@@ -844,15 +845,15 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
       TabletLocator tl;
       try {
         if (tableConfig.isOfflineScan()) {
-          binnedRanges = binOfflineTable(job, tableConfig.getTableName(), ranges);
+          binnedRanges = binOfflineTable(job, tableName, ranges);
           while (binnedRanges == null) {
             // Some tablets were still online, try again
             UtilWaitThread.sleep(100 + (int) (Math.random() * 100)); // sleep randomly between 100 and 200 ms
-            binnedRanges = binOfflineTable(job, tableConfig.getTableName(), ranges);
+            binnedRanges = binOfflineTable(job, tableName, ranges);
           }
         } else {
           Instance instance = getInstance(job);
-          tl = getTabletLocator(job, tableConfig.getTableName());
+          tl = getTabletLocator(job, tableName);
           // its possible that the cache could contain complete, but old information about a tables tablets... so clear it
           tl.invalidateCache();
           Credentials creds = new Credentials(getPrincipal(job), getAuthenticationToken(job));
@@ -863,7 +864,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
                 throw new TableDeletedException(tableId);
               if (Tables.getTableState(instance, tableId) == TableState.OFFLINE)
                 throw new TableOfflineException(instance, tableId);
-              tableId = Tables.getTableId(instance, tableConfig.getTableName());
+              tableId = Tables.getTableId(instance, tableName);
             }
             binnedRanges.clear();
             log.warn("Unable to locate bins for specified ranges. Retrying.");
@@ -894,7 +895,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
           for (Range r : extentRanges.getValue()) {
             if (autoAdjust) {
               // divide ranges into smaller ranges, based on the tablets
-              splits.add(new RangeInputSplit(tableConfig.getTableName(), tableId, ke.clip(r), new String[] {location}));
+              splits.add(new RangeInputSplit(tableName, tableId, ke.clip(r), new String[] {location}));
             } else {
               // don't divide ranges
               ArrayList<String> locations = splitsToAdd.get(r);
@@ -909,7 +910,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
       
       if (!autoAdjust)
         for (Entry<Range,ArrayList<String>> entry : splitsToAdd.entrySet())
-          splits.add(new RangeInputSplit(tableConfig.getTableName(), tableId, entry.getKey(), entry.getValue().toArray(new String[0])));
+          splits.add(new RangeInputSplit(tableName, tableId, entry.getKey(), entry.getValue().toArray(new String[0])));
     }
     
     return splits.toArray(new InputSplit[splits.size()]);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
index d86e111..ed29dda 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
@@ -55,11 +55,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-/**
- * 
- * @param <K>
- * @param <V>
- */
 public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
 
   protected static final Class<?> CLASS = AccumuloInputFormat.class;
@@ -262,13 +257,13 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
   /**
    * Fetches all {@link BatchScanConfig}s that have been set on the given Hadoop configuration.
    * 
-   * @param job
+   * @param context
    *          the Hadoop job instance to be configured
-   * @return
+   * @return the {@link BatchScanConfig} objects for the job
    * @since 1.6.0
    */
-  protected static List<BatchScanConfig> getBatchScanConfigs(JobContext job) {
-    return InputConfigurator.getTableQueryConfigs(CLASS, getConfiguration(job));
+  protected static Map<String, BatchScanConfig> getBatchScanConfigs(JobContext context) {
+    return InputConfigurator.getBatchScanConfigs(CLASS, getConfiguration(context));
   }
 
   /**
@@ -277,15 +272,15 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
    * <p>
    * null is returned in the event that the table doesn't exist.
    * 
-   * @param job
+   * @param context
    *          the Hadoop job instance to be configured
    * @param tableName
    *          the table name for which to grab the config object
    * @return the {@link BatchScanConfig} for the given table
    * @since 1.6.0
    */
-  protected static BatchScanConfig getBatchScanConfig(JobContext job, String tableName) {
-    return InputConfigurator.getTableQueryConfig(CLASS, getConfiguration(job), tableName);
+  protected static BatchScanConfig getBatchScanConfig(JobContext context, String tableName) {
+    return InputConfigurator.getTableQueryConfig(CLASS, getConfiguration(context), tableName);
   }
 
   /**
@@ -335,6 +330,15 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
     protected Iterator<Map.Entry<Key,Value>> scannerIterator;
     protected RangeInputSplit split;
 
+    /**
+     * Configures the iterators on a scanner for the given table name.
+     * @param context
+     *          the Hadoop context for the configured job
+     * @param scanner
+     *          the scanner for which to configure the iterators
+     * @param tableName
+     *          the table name for which the scanner is configured
+     */
     protected abstract void setupIterators(TaskAttemptContext context, Scanner scanner, String tableName);
 
     /**
@@ -544,8 +548,11 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
     validateOptions(context);
 
     LinkedList<InputSplit> splits = new LinkedList<InputSplit>();
-    List<BatchScanConfig> tableConfigs = getBatchScanConfigs(context);
-    for (BatchScanConfig tableConfig : tableConfigs) {
+    Map<String, BatchScanConfig> tableConfigs = getBatchScanConfigs(context);
+    for (Map.Entry<String, BatchScanConfig> tableConfigEntry : tableConfigs.entrySet()) {
+      
+      String tableName = tableConfigEntry.getKey();
+      BatchScanConfig tableConfig = tableConfigEntry.getValue();
 
       boolean autoAdjust = tableConfig.shouldAutoAdjustRanges();
       String tableId = null;
@@ -560,16 +567,16 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
       TabletLocator tl;
       try {
         if (tableConfig.isOfflineScan()) {
-          binnedRanges = binOfflineTable(context, tableConfig.getTableName(), ranges);
+          binnedRanges = binOfflineTable(context,tableName, ranges);
           while (binnedRanges == null) {
             // Some tablets were still online, try again
             UtilWaitThread.sleep(100 + (int) (Math.random() * 100)); // sleep randomly between 100 and 200 ms
-            binnedRanges = binOfflineTable(context, tableConfig.getTableName(), ranges);
+            binnedRanges = binOfflineTable(context, tableName, ranges);
 
           }
         } else {
           Instance instance = getInstance(context);
-          tl = getTabletLocator(context, tableConfig.getTableName());
+          tl = getTabletLocator(context, tableName);
           // its possible that the cache could contain complete, but old information about a tables tablets... so clear it
           tl.invalidateCache();
           Credentials creds = new Credentials(getPrincipal(context), getAuthenticationToken(context));
@@ -580,7 +587,7 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
                 throw new TableDeletedException(tableId);
               if (Tables.getTableState(instance, tableId) == TableState.OFFLINE)
                 throw new TableOfflineException(instance, tableId);
-              tableId = Tables.getTableId(instance, tableConfig.getTableName());
+              tableId = Tables.getTableId(instance, tableName);
             }
             binnedRanges.clear();
             log.warn("Unable to locate bins for specified ranges. Retrying.");
@@ -611,7 +618,7 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
           for (Range r : extentRanges.getValue()) {
             if (autoAdjust) {
               // divide ranges into smaller ranges, based on the tablets
-              splits.add(new RangeInputSplit(tableConfig.getTableName(), tableId, ke.clip(r), new String[] {location}));
+              splits.add(new RangeInputSplit(tableName, tableId, ke.clip(r), new String[] {location}));
             } else {
               // don't divide ranges
               ArrayList<String> locations = splitsToAdd.get(r);
@@ -626,7 +633,7 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
 
       if (!autoAdjust)
         for (Map.Entry<Range,ArrayList<String>> entry : splitsToAdd.entrySet())
-          splits.add(new RangeInputSplit(tableConfig.getTableName(), tableId, entry.getKey(), entry.getValue().toArray(new String[0])));
+          splits.add(new RangeInputSplit(tableName, tableId, entry.getKey(), entry.getValue().toArray(new String[0])));
     }
     return splits;
   }
@@ -786,5 +793,4 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
       throw new RuntimeException(e);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
index 7f8b47a..4208ff4 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
@@ -17,6 +17,24 @@ import java.util.Map;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 
+/**
+ * This class allows MapReduce jobs to use multiple Accumulo tables as the source of data. This {@link org.apache.hadoop.mapreduce.InputFormat} provides keys 
+ * and values of 
+ * type {@link Key} and
+ * {@link Value} to the Map function. 
+ * 
+ * The user must specify the following via static configurator methods:
+ * 
+ * <ul>
+ * <li>{@link AccumuloMultiTableInputFormat#setConnectorInfo(Job, String, org.apache.accumulo.core.client.security.tokens.AuthenticationToken)}
+ * <li>{@link AccumuloMultiTableInputFormat#setScanAuthorizations(Job, org.apache.accumulo.core.security.Authorizations)}
+ * <li>{@link AccumuloMultiTableInputFormat#setZooKeeperInstance(Job, String, String)} OR {@link AccumuloInputFormat#setMockInstance(Job, String)}
+ * <li>{@link AccumuloMultiTableInputFormat#setBatchScanConfigs(org.apache.hadoop.mapreduce.Job, Map<String,BatchScanConfig>)}
+ * </ul>
+ * 
+ * Other static methods are optional.
+ */
+
 public class AccumuloMultiTableInputFormat extends AbstractInputFormat<Key,Value>{
   
   /**
@@ -28,9 +46,9 @@ public class AccumuloMultiTableInputFormat extends AbstractInputFormat<Key,Value
    *          the table query configs to be set on the configuration.
    * @since 1.6.0
    */
-  public static void setBatchScanConfigs(Job job, BatchScanConfig... configs) {
+  public static void setBatchScanConfigs(Job job, Map<String, BatchScanConfig> configs) {
     checkNotNull(configs);
-    InputConfigurator.setTableQueryConfigs(CLASS, getConfiguration(job), configs);
+    InputConfigurator.setBatchScanConfigs(CLASS, getConfiguration(job), configs);
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/main/java/org/apache/accumulo/core/client/mapreduce/BatchScanConfig.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/BatchScanConfig.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/BatchScanConfig.java
index eae973d..feb49bb 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/BatchScanConfig.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/BatchScanConfig.java
@@ -33,36 +33,31 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
 /**
- * This class to holds a query configuration for a table. It contains all the properties needed to specify how rows should be returned from the table.
+ * This class to holds a batch scan configuration for a table. It contains all the properties needed to specify how rows should be returned from the table.
  */
 public class BatchScanConfig implements Writable {
-  
-  private String tableName;
+
   private List<IteratorSetting> iterators;
   private List<Range> ranges;
   private Set<Pair<Text,Text>> columns;
-  
+
   private boolean autoAdjustRanges = true;
   private boolean useLocalIterators = false;
   private boolean useIsolatedScanners = false;
   private boolean offlineScan = false;
-  
-  public BatchScanConfig(String tableName) {
-    checkNotNull(tableName);
-    this.tableName = tableName;
-  }
-  
-  public BatchScanConfig(DataInput input) throws IOException {
-    readFields(input);
-  }
+
+  public BatchScanConfig() {}
   
   /**
-   * Returns the table name associated with this configuration
+   * Creates a batch scan config object out of a previously serialized batch scan config object.
+   * @param input
+   *          the data input of the serialized batch scan config
+   * @throws IOException
    */
-  public String getTableName() {
-    return tableName;
+  public BatchScanConfig(DataInput input) throws IOException {
+    readFields(input);
   }
-  
+
   /**
    * Sets the input ranges to scan for all tables associated with this job. This will be added to any per-table ranges that have been set using
    * 
@@ -74,14 +69,14 @@ public class BatchScanConfig implements Writable {
     this.ranges = ranges;
     return this;
   }
-  
+
   /**
    * Returns the ranges to be queried in the configuration
    */
   public List<Range> getRanges() {
     return ranges != null ? ranges : new ArrayList<Range>();
   }
-  
+
   /**
    * Restricts the columns that will be mapped over for this job for the default input table.
    * 
@@ -94,14 +89,14 @@ public class BatchScanConfig implements Writable {
     this.columns = columns;
     return this;
   }
-  
+
   /**
    * Returns the columns to be fetched for this configuration
    */
   public Set<Pair<Text,Text>> getFetchedColumns() {
     return columns != null ? columns : new HashSet<Pair<Text,Text>>();
   }
-  
+
   /**
    * Set iterators on to be used in the query.
    * 
@@ -113,14 +108,14 @@ public class BatchScanConfig implements Writable {
     this.iterators = iterators;
     return this;
   }
-  
+
   /**
    * Returns the iterators to be set on this configuration
    */
   public List<IteratorSetting> getIterators() {
     return iterators != null ? iterators : new ArrayList<IteratorSetting>();
   }
-  
+
   /**
    * Controls the automatic adjustment of ranges for this job. This feature merges overlapping ranges, then splits them to align with tablet boundaries.
    * Disabling this feature will cause exactly one Map task to be created for each specified range. The default setting is enabled. *
@@ -137,7 +132,7 @@ public class BatchScanConfig implements Writable {
     this.autoAdjustRanges = autoAdjustRanges;
     return this;
   }
-  
+
   /**
    * Determines whether a configuration has auto-adjust ranges enabled.
    * 
@@ -148,7 +143,7 @@ public class BatchScanConfig implements Writable {
   public boolean shouldAutoAdjustRanges() {
     return autoAdjustRanges;
   }
-  
+
   /**
    * Controls the use of the {@link org.apache.accumulo.core.client.ClientSideIteratorScanner} in this job. Enabling this feature will cause the iterator stack
    * to be constructed within the Map task, rather than within the Accumulo TServer. To use this feature, all classes needed for those iterators must be
@@ -165,7 +160,7 @@ public class BatchScanConfig implements Writable {
     this.useLocalIterators = useLocalIterators;
     return this;
   }
-  
+
   /**
    * Determines whether a configuration uses local iterators.
    * 
@@ -176,7 +171,7 @@ public class BatchScanConfig implements Writable {
   public boolean shouldUseLocalIterators() {
     return useLocalIterators;
   }
-  
+
   /**
    * <p>
    * Enable reading offline tables. By default, this feature is disabled and only online tables are scanned. This will make the map reduce job directly read the
@@ -210,7 +205,7 @@ public class BatchScanConfig implements Writable {
     this.offlineScan = offlineScan;
     return this;
   }
-  
+
   /**
    * Determines whether a configuration has the offline table scan feature enabled.
    * 
@@ -221,7 +216,7 @@ public class BatchScanConfig implements Writable {
   public boolean isOfflineScan() {
     return offlineScan;
   }
-  
+
   /**
    * Controls the use of the {@link org.apache.accumulo.core.client.IsolatedScanner} in this job.
    * 
@@ -236,7 +231,7 @@ public class BatchScanConfig implements Writable {
     this.useIsolatedScanners = useIsolatedScanners;
     return this;
   }
-  
+
   /**
    * Determines whether a configuration has isolation enabled.
    * 
@@ -247,10 +242,15 @@ public class BatchScanConfig implements Writable {
   public boolean shouldUseIsolatedScanners() {
     return useIsolatedScanners;
   }
-  
+
+  /**
+   * Writes the state for the current object out to the specified {@see DataOutput}
+   * @param dataOutput
+   *          the output for which to write the object's state
+   * @throws IOException
+   */
   @Override
   public void write(DataOutput dataOutput) throws IOException {
-    dataOutput.writeUTF(tableName);
     if (iterators != null) {
       dataOutput.writeInt(iterators.size());
       for (IteratorSetting setting : iterators)
@@ -284,10 +284,15 @@ public class BatchScanConfig implements Writable {
     dataOutput.writeBoolean(useLocalIterators);
     dataOutput.writeBoolean(useIsolatedScanners);
   }
-  
+
+  /**
+   * Reads the fields in the {@see DataInput} into the current object
+   * @param dataInput
+   *          the input fields to read into the current object
+   * @throws IOException
+   */
   @Override
   public void readFields(DataInput dataInput) throws IOException {
-    this.tableName = dataInput.readUTF();
     // load iterators
     long iterSize = dataInput.readInt();
     if (iterSize > 0)
@@ -323,16 +328,16 @@ public class BatchScanConfig implements Writable {
     useLocalIterators = dataInput.readBoolean();
     useIsolatedScanners = dataInput.readBoolean();
   }
-  
+
   @Override
   public boolean equals(Object o) {
     if (this == o)
       return true;
     if (o == null || getClass() != o.getClass())
       return false;
-    
+
     BatchScanConfig that = (BatchScanConfig) o;
-    
+
     if (autoAdjustRanges != that.autoAdjustRanges)
       return false;
     if (offlineScan != that.offlineScan)
@@ -347,16 +352,12 @@ public class BatchScanConfig implements Writable {
       return false;
     if (ranges != null ? !ranges.equals(that.ranges) : that.ranges != null)
       return false;
-    if (tableName != null ? !tableName.equals(that.tableName) : that.tableName != null)
-      return false;
-    
     return true;
   }
-  
+
   @Override
   public int hashCode() {
-    int result = tableName != null ? tableName.hashCode() : 0;
-    result = 31 * result + (iterators != null ? iterators.hashCode() : 0);
+    int result = 31 * (iterators != null ? iterators.hashCode() : 0);
     result = 31 * result + (ranges != null ? ranges.hashCode() : 0);
     result = 31 * result + (columns != null ? columns.hashCode() : 0);
     result = 31 * result + (autoAdjustRanges ? 1 : 0);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
index f7057f1..af893a8 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
@@ -99,7 +99,6 @@ public abstract class InputFormatBase<K,V> extends AbstractInputFormat<K,V> {
    * @return the ranges
    * @since 1.5.0
    * @see #setRanges(Job, Collection)
-   * @see #setRanges(org.apache.hadoop.mapreduce.Job, java.util.Collection)
    */
   protected static List<Range> getRanges(JobContext context) throws IOException {
     return InputConfigurator.getRanges(CLASS, getConfiguration(context));
@@ -313,11 +312,29 @@ public abstract class InputFormatBase<K,V> extends AbstractInputFormat<K,V> {
   }
   
   protected abstract static class RecordReaderBase<K,V> extends AbstractRecordReader<K,V> {
+    
+      /**
+       * Apply the configured iterators from the configuration to the scanner for the specified table name
+       * 
+       * @param context
+       *          the Hadoop context for the configured job
+       * @param scanner
+       *          the scanner to configure
+       * @since 1.6.0
+       */
       @Override
       protected void setupIterators(TaskAttemptContext context, Scanner scanner, String tableName) {
         setupIterators(context, scanner);
       }
       
+      /**
+       * Apply the configured iterators from the configuration to the scanner.
+       * 
+       * @param context
+       *          the Hadoop context for the configured job
+       * @param scanner
+       *          the scanner to configure
+       */
       protected void setupIterators(TaskAttemptContext context, Scanner scanner) {
         List<IteratorSetting> iterators = getIterators(context);
         for (IteratorSetting iterator : iterators) 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
index 1b76051..4dac750 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
@@ -25,11 +25,14 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.StringTokenizer;
 
+import com.google.common.collect.Maps;
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
@@ -53,23 +56,25 @@ import org.apache.accumulo.core.util.Pair;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.util.StringUtils;
 
 /**
  * @since 1.5.0
  */
 public class InputConfigurator extends ConfiguratorBase {
-  
+
   /**
    * Configuration keys for {@link Scanner}.
    * 
    * @since 1.5.0
    */
-  public static enum ScanOpts { 
+  public static enum ScanOpts {
     TABLE_NAME, AUTHORIZATIONS, RANGES, COLUMNS, ITERATORS, TABLE_CONFIGS
   }
-  
+
   /**
    * Configuration keys for various features.
    * 
@@ -78,7 +83,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static enum Features {
     AUTO_ADJUST_RANGES, SCAN_ISOLATION, USE_LOCAL_ITERATORS, SCAN_OFFLINE
   }
-  
+
   /**
    * Sets the name of the input table, over which this job will scan.
    * 
@@ -94,7 +99,7 @@ public class InputConfigurator extends ConfiguratorBase {
     notNull(tableName);
     conf.set(enumToConfKey(implementingClass, ScanOpts.TABLE_NAME), tableName);
   }
-  
+
   /**
    * Sets the name of the input table, over which this job will scan.
    * 
@@ -107,7 +112,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static String getInputTableName(Class<?> implementingClass, Configuration conf) {
     return conf.get(enumToConfKey(implementingClass, ScanOpts.TABLE_NAME));
   }
-  
+
   /**
    * Sets the {@link Authorizations} used to scan. Must be a subset of the user's authorization. Defaults to the empty set.
    * 
@@ -123,7 +128,7 @@ public class InputConfigurator extends ConfiguratorBase {
     if (auths != null && !auths.isEmpty())
       conf.set(enumToConfKey(implementingClass, ScanOpts.AUTHORIZATIONS), auths.serialize());
   }
-  
+
   /**
    * Gets the authorizations to set for the scans from the configuration.
    * 
@@ -139,7 +144,7 @@ public class InputConfigurator extends ConfiguratorBase {
     String authString = conf.get(enumToConfKey(implementingClass, ScanOpts.AUTHORIZATIONS));
     return authString == null ? Authorizations.EMPTY : new Authorizations(authString.getBytes());
   }
-  
+
   /**
    * Sets the input ranges to scan on all input tables for this job. If not set, the entire table will be scanned.
    * 
@@ -155,7 +160,7 @@ public class InputConfigurator extends ConfiguratorBase {
    */
   public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) {
     notNull(ranges);
-    
+
     ArrayList<String> rangeStrings = new ArrayList<String>(ranges.size());
     try {
       for (Range r : ranges) {
@@ -168,7 +173,7 @@ public class InputConfigurator extends ConfiguratorBase {
       throw new IllegalArgumentException("Unable to encode ranges to Base64", ex);
     }
   }
-  
+
   /**
    * Gets the ranges to scan over from a job.
    * 
@@ -183,7 +188,7 @@ public class InputConfigurator extends ConfiguratorBase {
    * @see #setRanges(Class, Configuration, Collection)
    */
   public static List<Range> getRanges(Class<?> implementingClass, Configuration conf) throws IOException {
-    
+
     Collection<String> encodedRanges = conf.getStringCollection(enumToConfKey(implementingClass, ScanOpts.RANGES));
     List<Range> ranges = new ArrayList<Range>();
     for (String rangeString : encodedRanges) {
@@ -194,7 +199,7 @@ public class InputConfigurator extends ConfiguratorBase {
     }
     return ranges;
   }
-  
+
   /**
    * Gets a list of the iterator settings (for iterators to apply to a scanner) from this configuration.
    * 
@@ -208,11 +213,11 @@ public class InputConfigurator extends ConfiguratorBase {
    */
   public static List<IteratorSetting> getIterators(Class<?> implementingClass, Configuration conf) {
     String iterators = conf.get(enumToConfKey(implementingClass, ScanOpts.ITERATORS));
-    
+
     // If no iterators are present, return an empty list
     if (iterators == null || iterators.isEmpty())
       return new ArrayList<IteratorSetting>();
-    
+
     // Compose the set of iterators encoded in the job configuration
     StringTokenizer tokens = new StringTokenizer(iterators, StringUtils.COMMA_STR);
     List<IteratorSetting> list = new ArrayList<IteratorSetting>();
@@ -228,7 +233,7 @@ public class InputConfigurator extends ConfiguratorBase {
     }
     return list;
   }
-  
+
   /**
    * Restricts the columns that will be mapped over for the single input table on this job.
    * 
@@ -247,10 +252,10 @@ public class InputConfigurator extends ConfiguratorBase {
     notNull(columnFamilyColumnQualifierPairs);
     ArrayList<String> columnStrings = new ArrayList<String>();
     for (Pair<Text,Text> column : columnFamilyColumnQualifierPairs) {
-      
+
       if (column.getFirst() == null)
         throw new IllegalArgumentException("Column family can not be null");
-      
+
       String col = new String(Base64.encodeBase64(TextUtil.getBytes(column.getFirst())), Constants.UTF8);
       if (column.getSecond() != null)
         col += ":" + new String(Base64.encodeBase64(TextUtil.getBytes(column.getSecond())), Constants.UTF8);
@@ -258,7 +263,7 @@ public class InputConfigurator extends ConfiguratorBase {
     }
     conf.setStrings(enumToConfKey(implementingClass, ScanOpts.COLUMNS), columnStrings.toArray(new String[0]));
   }
-  
+
   /**
    * Gets the columns to be mapped over from this job.
    * 
@@ -280,7 +285,7 @@ public class InputConfigurator extends ConfiguratorBase {
     }
     return columns;
   }
-  
+
   /**
    * Encode an iterator on the input for the single input table associated with this job.
    * 
@@ -304,7 +309,7 @@ public class InputConfigurator extends ConfiguratorBase {
     } catch (IOException e) {
       throw new IllegalArgumentException("unable to serialize IteratorSetting");
     }
-    
+
     String confKey = enumToConfKey(implementingClass, ScanOpts.ITERATORS);
     String iterators = conf.get(confKey);
     // No iterators specified yet, create a new string
@@ -317,7 +322,7 @@ public class InputConfigurator extends ConfiguratorBase {
     // Store the iterators w/ the job
     conf.set(confKey, iterators);
   }
-  
+
   /**
    * Controls the automatic adjustment of ranges for this job. This feature merges overlapping ranges, then splits them to align with tablet boundaries.
    * Disabling this feature will cause exactly one Map task to be created for each specified range. The default setting is enabled. *
@@ -337,7 +342,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static void setAutoAdjustRanges(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
     conf.setBoolean(enumToConfKey(implementingClass, Features.AUTO_ADJUST_RANGES), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration has auto-adjust ranges enabled.
    * 
@@ -352,7 +357,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static Boolean getAutoAdjustRanges(Class<?> implementingClass, Configuration conf) {
     return conf.getBoolean(enumToConfKey(implementingClass, Features.AUTO_ADJUST_RANGES), true);
   }
-  
+
   /**
    * Controls the use of the {@link IsolatedScanner} in this job.
    * 
@@ -370,7 +375,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static void setScanIsolation(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
     conf.setBoolean(enumToConfKey(implementingClass, Features.SCAN_ISOLATION), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration has isolation enabled.
    * 
@@ -385,7 +390,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static Boolean isIsolated(Class<?> implementingClass, Configuration conf) {
     return conf.getBoolean(enumToConfKey(implementingClass, Features.SCAN_ISOLATION), false);
   }
-  
+
   /**
    * Controls the use of the {@link ClientSideIteratorScanner} in this job. Enabling this feature will cause the iterator stack to be constructed within the Map
    * task, rather than within the Accumulo TServer. To use this feature, all classes needed for those iterators must be available on the classpath for the task.
@@ -404,7 +409,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static void setLocalIterators(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
     conf.setBoolean(enumToConfKey(implementingClass, Features.USE_LOCAL_ITERATORS), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration uses local iterators.
    * 
@@ -419,7 +424,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static Boolean usesLocalIterators(Class<?> implementingClass, Configuration conf) {
     return conf.getBoolean(enumToConfKey(implementingClass, Features.USE_LOCAL_ITERATORS), false);
   }
-  
+
   /**
    * <p>
    * Enable reading offline tables. By default, this feature is disabled and only online tables are scanned. This will make the map reduce job directly read the
@@ -456,7 +461,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static void setOfflineTableScan(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
     conf.setBoolean(enumToConfKey(implementingClass, Features.SCAN_OFFLINE), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration has the offline table scan feature enabled.
    * 
@@ -471,7 +476,7 @@ public class InputConfigurator extends ConfiguratorBase {
   public static Boolean isOfflineScan(Class<?> implementingClass, Configuration conf) {
     return conf.getBoolean(enumToConfKey(implementingClass, Features.SCAN_OFFLINE), false);
   }
-  
+
   /**
    * Sets configurations for multiple tables at a time.
    * 
@@ -479,25 +484,26 @@ public class InputConfigurator extends ConfiguratorBase {
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
    *          the Hadoop configuration object to configure
-   * @param tconf
+   * @param configs
    *          an array of {@link org.apache.accumulo.core.client.mapreduce.BatchScanConfig} objects to associate with the job
    * @since 1.6.0
    */
-  public static void setTableQueryConfigs(Class<?> implementingClass, Configuration conf, BatchScanConfig... tconf) {
-    List<String> tableQueryConfigStrings = new ArrayList<String>();
-    for (BatchScanConfig queryConfig : tconf) {
-      ByteArrayOutputStream baos = new ByteArrayOutputStream();
-      try {
-        queryConfig.write(new DataOutputStream(baos));
-      } catch (IOException e) {
-        throw new IllegalStateException("Configuration for " + queryConfig.getTableName() + " could not be serialized.");
-      }
-      tableQueryConfigStrings.add(new String(Base64.encodeBase64(baos.toByteArray())));
+  public static void setBatchScanConfigs(Class<?> implementingClass, Configuration conf, Map<String,BatchScanConfig> configs) {
+    MapWritable mapWritable = new MapWritable();
+    for (Map.Entry<String,BatchScanConfig> tableConfig : configs.entrySet())
+      mapWritable.put(new Text(tableConfig.getKey()), tableConfig.getValue());
+
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    try {
+      mapWritable.write(new DataOutputStream(baos));
+    } catch (IOException e) {
+      throw new IllegalStateException("Table configuration could not be serialized.");
     }
+
     String confKey = enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS);
-    conf.setStrings(confKey, tableQueryConfigStrings.toArray(new String[0]));
+    conf.set(confKey, new String(Base64.encodeBase64(baos.toByteArray())));
   }
-  
+
   /**
    * Returns all {@link org.apache.accumulo.core.client.mapreduce.BatchScanConfig} objects associated with this job.
    * 
@@ -508,33 +514,29 @@ public class InputConfigurator extends ConfiguratorBase {
    * @return all of the table query configs for the job
    * @since 1.6.0
    */
-  public static List<BatchScanConfig> getTableQueryConfigs(Class<?> implementingClass, Configuration conf) {
-    List<BatchScanConfig> configs = new ArrayList<BatchScanConfig>();
-    Collection<String> configStrings = conf.getStringCollection(enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS));
-    if (configStrings != null) {
-      for (String str : configStrings) {
-        try {
-          byte[] bytes = Base64.decodeBase64(str.getBytes());
-          ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
-          configs.add(new BatchScanConfig(new DataInputStream(bais)));
-          bais.close();
-        } catch (IOException e) {
-          throw new IllegalStateException("The table query configurations could not be deserialized from the given configuration");
-        }
+  public static Map<String,BatchScanConfig> getBatchScanConfigs(Class<?> implementingClass, Configuration conf) {
+    Map<String,BatchScanConfig> configs = new HashMap<String,BatchScanConfig>();
+    Map.Entry<String, BatchScanConfig> defaultConfig = getDefaultBatchScanConfig(implementingClass, conf);
+    if(defaultConfig != null)
+      configs.put(defaultConfig.getKey(), defaultConfig.getValue());
+    String configString = conf.get(enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS));
+    MapWritable mapWritable = new MapWritable();
+    if (configString != null) {
+      try {
+        byte[] bytes = Base64.decodeBase64(configString.getBytes());
+        ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+        mapWritable.readFields(new DataInputStream(bais));
+        bais.close();
+      } catch (IOException e) {
+        throw new IllegalStateException("The table query configurations could not be deserialized from the given configuration");
       }
     }
-    BatchScanConfig defaultQueryConfig;
-    try {
-      defaultQueryConfig = getDefaultTableConfig(implementingClass, conf);
-    } catch (IOException e) {
-      throw new IllegalStateException("There was an error deserializing the default table configuration.");
-    }
-    if (defaultQueryConfig != null)
-      configs.add(defaultQueryConfig);
-    
+    for (Map.Entry<Writable,Writable> entry : mapWritable.entrySet())
+      configs.put(((Text) entry.getKey()).toString(), (BatchScanConfig) entry.getValue());
+
     return configs;
   }
-  
+
   /**
    * Returns the {@link org.apache.accumulo.core.client.mapreduce.BatchScanConfig} for the given table
    * 
@@ -548,15 +550,10 @@ public class InputConfigurator extends ConfiguratorBase {
    * @since 1.6.0
    */
   public static BatchScanConfig getTableQueryConfig(Class<?> implementingClass, Configuration conf, String tableName) {
-    List<BatchScanConfig> queryConfigs = getTableQueryConfigs(implementingClass, conf);
-    for (BatchScanConfig queryConfig : queryConfigs) {
-      if (queryConfig.getTableName().equals(tableName)) {
-        return queryConfig;
-      }
-    }
-    return null;
+    Map<String,BatchScanConfig> queryConfigs = getBatchScanConfigs(implementingClass, conf);
+    return queryConfigs.get(tableName);
   }
-  
+
   /**
    * Initializes an Accumulo {@link TabletLocator} based on the configuration.
    * 
@@ -578,7 +575,7 @@ public class InputConfigurator extends ConfiguratorBase {
     Instance instance = getInstance(implementingClass, conf);
     return TabletLocator.getLocator(instance, new Text(Tables.getTableId(instance, tableName)));
   }
-  
+
   // InputFormat doesn't have the equivalent of OutputFormat's checkOutputSpecs(JobContext job)
   /**
    * Check whether a configuration is fully configured to be used with an Accumulo {@link org.apache.hadoop.mapreduce.InputFormat}.
@@ -592,6 +589,8 @@ public class InputConfigurator extends ConfiguratorBase {
    * @since 1.5.0
    */
   public static void validateOptions(Class<?> implementingClass, Configuration conf) throws IOException {
+    
+    Map<String, BatchScanConfig> batchScanConfigs = getBatchScanConfigs(implementingClass, conf);
     if (!isConnectorInfoSet(implementingClass, conf))
       throw new IOException("Input info has not been set.");
     String instanceKey = conf.get(enumToConfKey(implementingClass, InstanceOpts.TYPE));
@@ -605,18 +604,20 @@ public class InputConfigurator extends ConfiguratorBase {
       if (!c.securityOperations().authenticateUser(principal, token))
         throw new IOException("Unable to authenticate user");
       
-      for (BatchScanConfig tableConfig : getTableQueryConfigs(implementingClass, conf)) {
-        if (!c.securityOperations().hasTablePermission(getPrincipal(implementingClass, conf), tableConfig.getTableName(), TablePermission.READ))
+      if(getBatchScanConfigs(implementingClass, conf).size() == 0)
+        throw new IOException("No table set.");
+      
+      for (Map.Entry<String, BatchScanConfig> tableConfig : batchScanConfigs.entrySet()) {
+        if (!c.securityOperations().hasTablePermission(getPrincipal(implementingClass, conf), tableConfig.getKey(), TablePermission.READ))
           throw new IOException("Unable to access table");
       }
-      
-      for (BatchScanConfig tableConfig : getTableQueryConfigs(implementingClass, conf)) {
+      for (Map.Entry<String, BatchScanConfig> tableConfigEntry : batchScanConfigs.entrySet()) {
+        BatchScanConfig tableConfig = tableConfigEntry.getValue();
         if (!tableConfig.shouldUseLocalIterators()) {
           if (tableConfig.getIterators() != null) {
             for (IteratorSetting iter : tableConfig.getIterators()) {
-              if (!c.tableOperations().testClassLoad(tableConfig.getTableName(), iter.getIteratorClass(), SortedKeyValueIterator.class.getName()))
+              if (!c.tableOperations().testClassLoad(tableConfigEntry.getKey(), iter.getIteratorClass(), SortedKeyValueIterator.class.getName()))
                 throw new AccumuloException("Servers are unable to load " + iter.getIteratorClass() + " as a " + SortedKeyValueIterator.class.getName());
-              
             }
           }
         }
@@ -629,9 +630,10 @@ public class InputConfigurator extends ConfiguratorBase {
       throw new IOException(e);
     }
   }
-  
+
   /**
-   * Returns the {@link org.apache.accumulo.core.client.mapreduce.BatchScanConfig} for the configuration based on the properties set using the single-table input methods.
+   * Returns the {@link org.apache.accumulo.core.client.mapreduce.BatchScanConfig} for the configuration based on the properties set using the single-table
+   * input methods.
    * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
@@ -641,23 +643,28 @@ public class InputConfigurator extends ConfiguratorBase {
    * @throws IOException
    * @since 1.6.0
    */
-  protected static BatchScanConfig getDefaultTableConfig(Class<?> implementingClass, Configuration conf) throws IOException {
+  protected static Map.Entry<String, BatchScanConfig> getDefaultBatchScanConfig(Class<?> implementingClass, Configuration conf) {
     String tableName = getInputTableName(implementingClass, conf);
     if (tableName != null) {
-      BatchScanConfig queryConfig = new BatchScanConfig(getInputTableName(implementingClass, conf));
+      BatchScanConfig queryConfig = new BatchScanConfig();
       List<IteratorSetting> itrs = getIterators(implementingClass, conf);
       if (itrs != null)
         queryConfig.setIterators(itrs);
       Set<Pair<Text,Text>> columns = getFetchedColumns(implementingClass, conf);
       if (columns != null)
         queryConfig.fetchColumns(columns);
-      List<Range> ranges = getRanges(implementingClass, conf);
+      List<Range> ranges = null;
+      try {
+        ranges = getRanges(implementingClass, conf);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
       if (ranges != null)
         queryConfig.setRanges(ranges);
-      
+
       queryConfig.setAutoAdjustRanges(getAutoAdjustRanges(implementingClass, conf)).setUseIsolatedScanners(isIsolated(implementingClass, conf))
           .setUseLocalIterators(usesLocalIterators(implementingClass, conf)).setOfflineScan(isOfflineScan(implementingClass, conf));
-      return queryConfig;
+      return Maps.immutableEntry(tableName, queryConfig);
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java b/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
index 70cf60d..6e44222 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloInputFormatTest.java
@@ -24,7 +24,9 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
@@ -254,10 +256,14 @@ public class AccumuloInputFormatTest {
       AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(pass));
       AccumuloInputFormat.setMockInstance(job, INSTANCE_NAME);
       
-      BatchScanConfig tableConfig1 = new BatchScanConfig(table1);
-      BatchScanConfig tableConfig2 = new BatchScanConfig(table2);
+      BatchScanConfig tableConfig1 = new BatchScanConfig();
+      BatchScanConfig tableConfig2 = new BatchScanConfig();
+
+      Map<String, BatchScanConfig> configMap = new HashMap<String, BatchScanConfig>();
+      configMap.put(table1, tableConfig1);
+      configMap.put(table2, tableConfig2);
       
-      AccumuloInputFormat.setTableQueryConfigs(job, tableConfig1, tableConfig2);
+      AccumuloInputFormat.setTableQueryConfigs(job, configMap);
       
       job.setMapperClass(TestMapper.class);
       job.setMapOutputKeyClass(Key.class);
@@ -306,16 +312,19 @@ public class AccumuloInputFormatTest {
     
     JobConf job = new JobConf();
     
-    BatchScanConfig table1 = new BatchScanConfig(TEST_TABLE_1).setRanges(Collections.singletonList(new Range("a", "b")))
+    BatchScanConfig table1 = new BatchScanConfig().setRanges(Collections.singletonList(new Range("a", "b")))
         .fetchColumns(Collections.singleton(new Pair<Text, Text>(new Text("CF1"), new Text("CQ1"))))
         .setIterators(Collections.singletonList(new IteratorSetting(50, "iter1", "iterclass1")));
     
-    BatchScanConfig table2 = new BatchScanConfig(TEST_TABLE_2).setRanges(Collections.singletonList(new Range("a", "b")))
+    BatchScanConfig table2 = new BatchScanConfig().setRanges(Collections.singletonList(new Range("a", "b")))
         .fetchColumns(Collections.singleton(new Pair<Text, Text>(new Text("CF1"), new Text("CQ1"))))
         .setIterators(Collections.singletonList(new IteratorSetting(50, "iter1", "iterclass1")));
     
-    AccumuloInputFormat.setTableQueryConfigs(job, table1, table2);
-    
+    Map<String, BatchScanConfig> configMap = new HashMap<String, BatchScanConfig>();
+    configMap.put(TEST_TABLE_1, table1);
+    configMap.put(TEST_TABLE_2, table2);
+    AccumuloInputFormat.setTableQueryConfigs(job, configMap);
+
     assertEquals(table1, AccumuloInputFormat.getTableQueryConfig(job, TEST_TABLE_1));
     assertEquals(table2, AccumuloInputFormat.getTableQueryConfig(job, TEST_TABLE_2));
   }
@@ -328,21 +337,20 @@ public class AccumuloInputFormatTest {
     
     JobConf job = new JobConf();
     
-    BatchScanConfig table1 = new BatchScanConfig(TEST_TABLE_1).setRanges(Collections.singletonList(new Range("a", "b")))
+    BatchScanConfig tableConfig = new BatchScanConfig().setRanges(Collections.singletonList(new Range("a", "b")))
         .fetchColumns(Collections.singleton(new Pair<Text, Text>(new Text("CF1"), new Text("CQ1"))))
         .setIterators(Collections.singletonList(new IteratorSetting(50, "iter1", "iterclass1")));
     
-    BatchScanConfig table2 = new BatchScanConfig(TEST_TABLE_2).setRanges(Collections.singletonList(new Range("a", "b")))
-        .fetchColumns(Collections.singleton(new Pair<Text, Text>(new Text("CF1"), new Text("CQ1"))))
-        .setIterators(Collections.singletonList(new IteratorSetting(50, "iter1", "iterclass1")));
+    Map<String, BatchScanConfig> configMap = new HashMap<String, BatchScanConfig>();
+    configMap.put(TEST_TABLE_1, tableConfig);
     
-    AccumuloInputFormat.setTableQueryConfigs(job, table1);
-    AccumuloInputFormat.setInputTableName(job, table2.getTableName());
-    AccumuloInputFormat.setRanges(job, table2.getRanges());
-    AccumuloInputFormat.fetchColumns(job, table2.getFetchedColumns());
-    AccumuloInputFormat.addIterator(job, table2.getIterators().get(0));
+    AccumuloInputFormat.setTableQueryConfigs(job, configMap);
+    AccumuloInputFormat.setInputTableName(job, TEST_TABLE_2);
+    AccumuloInputFormat.setRanges(job, tableConfig.getRanges());
+    AccumuloInputFormat.fetchColumns(job, tableConfig.getFetchedColumns());
+    AccumuloInputFormat.addIterator(job, tableConfig.getIterators().get(0));
     
-    assertEquals(table1, AccumuloInputFormat.getTableQueryConfig(job, TEST_TABLE_1));
-    assertEquals(table2, AccumuloInputFormat.getTableQueryConfig(job, TEST_TABLE_2));
+    assertEquals(tableConfig, AccumuloInputFormat.getTableQueryConfig(job, TEST_TABLE_1));
+    assertEquals(tableConfig, AccumuloInputFormat.getTableQueryConfig(job, TEST_TABLE_2));
   }
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
index 6b2eba1..6bbb1d6 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormatTest.java
@@ -23,6 +23,8 @@ import org.junit.Test;
 
 import java.io.IOException;
 import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
@@ -87,10 +89,14 @@ public class AccumuloMultiTableInputFormatTest {
       
       AccumuloMultiTableInputFormat.setConnectorInfo(job, user, new PasswordToken(pass));
       
-      BatchScanConfig tableConfig1 = new BatchScanConfig(table1);
-      BatchScanConfig tableConfig2 = new BatchScanConfig(table2);
+      BatchScanConfig tableConfig1 = new BatchScanConfig();
+      BatchScanConfig tableConfig2 = new BatchScanConfig();
       
-      AccumuloMultiTableInputFormat.setBatchScanConfigs(job, tableConfig1, tableConfig2);
+      Map<String, BatchScanConfig> configMap = new HashMap<String, BatchScanConfig>();
+      configMap.put(table1, tableConfig1);
+      configMap.put(table2, tableConfig2);
+      
+      AccumuloMultiTableInputFormat.setBatchScanConfigs(job, configMap);
       AccumuloMultiTableInputFormat.setMockInstance(job, INSTANCE_NAME);
       
       job.setMapperClass(TestMapper.class);
@@ -145,18 +151,20 @@ public class AccumuloMultiTableInputFormatTest {
     
     Job job = new Job();
     
-    BatchScanConfig table1 = new BatchScanConfig(TEST_TABLE_1).setRanges(Collections.singletonList(new Range("a", "b")))
-        .fetchColumns(Collections.singleton(new Pair<Text, Text>(new Text("CF1"), new Text("CQ1"))))
-        .setIterators(Collections.singletonList(new IteratorSetting(50, "iter1", "iterclass1")));
     
-    BatchScanConfig table2 = new BatchScanConfig(TEST_TABLE_2).setRanges(Collections.singletonList(new Range("a", "b")))
+    
+    BatchScanConfig tableConfig= new BatchScanConfig().setRanges(Collections.singletonList(new Range("a", "b")))
         .fetchColumns(Collections.singleton(new Pair<Text, Text>(new Text("CF1"), new Text("CQ1"))))
         .setIterators(Collections.singletonList(new IteratorSetting(50, "iter1", "iterclass1")));
+
+    Map<String, BatchScanConfig> configMap = new HashMap<String, BatchScanConfig>();
+    configMap.put(TEST_TABLE_1, tableConfig);
+    configMap.put(TEST_TABLE_2, tableConfig);
     
-    AccumuloMultiTableInputFormat.setBatchScanConfigs(job, table1, table2);
+    AccumuloMultiTableInputFormat.setBatchScanConfigs(job, configMap);
     
-    assertEquals(table1, AccumuloMultiTableInputFormat.getBatchScanConfig(job, TEST_TABLE_1));
-    assertEquals(table2, AccumuloMultiTableInputFormat.getBatchScanConfig(job, TEST_TABLE_2));
+    assertEquals(tableConfig, AccumuloMultiTableInputFormat.getBatchScanConfig(job, TEST_TABLE_1));
+    assertEquals(tableConfig, AccumuloMultiTableInputFormat.getBatchScanConfig(job, TEST_TABLE_2));
   }
   
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/9a63ff4e/core/src/test/java/org/apache/accumulo/core/conf/TableQueryConfigTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/conf/TableQueryConfigTest.java b/core/src/test/java/org/apache/accumulo/core/conf/TableQueryConfigTest.java
index 9910182..a2b0db0 100644
--- a/core/src/test/java/org/apache/accumulo/core/conf/TableQueryConfigTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/conf/TableQueryConfigTest.java
@@ -38,12 +38,11 @@ import org.junit.Test;
 
 public class TableQueryConfigTest {
   
-  private static final String TEST_TABLE = "TEST_TABLE";
   private BatchScanConfig tableQueryConfig;
   
   @Before
   public void setUp() {
-    tableQueryConfig = new BatchScanConfig(TEST_TABLE);
+    tableQueryConfig = new BatchScanConfig();
   }
   
   @Test


Mime
View raw message