accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cjno...@apache.org
Subject [10/11] git commit: Fixing some formatting. Adding some comments. ACCUMULO-391
Date Wed, 02 Oct 2013 03:22:46 GMT
Fixing some formatting. Adding some comments. ACCUMULO-391


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/e4e05c80
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/e4e05c80
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/e4e05c80

Branch: refs/heads/ACCUMULO-391
Commit: e4e05c804ea7f486290181f0246cf6b2880f5d1a
Parents: 10b4eb8
Author: Corey J. Nolet <cjnolet@gmail.com>
Authored: Sun Sep 29 21:05:55 2013 -0400
Committer: Corey J. Nolet <cjnolet@gmail.com>
Committed: Tue Oct 1 21:46:17 2013 -0400

----------------------------------------------------------------------
 .../core/client/impl/thrift/ClientService.java  |  16 +-
 .../thrift/ThriftTableOperationException.java   |   4 +-
 .../core/client/mapred/InputFormatBase.java     | 116 +++++----
 .../core/client/mapreduce/InputFormatBase.java  | 239 ++++++++++---------
 .../mapreduce/lib/util/InputConfigurator.java   | 129 +++++-----
 .../core/master/thrift/MasterClientService.java |  16 +-
 6 files changed, 270 insertions(+), 250 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/e4e05c80/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java b/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
index f44d4a6..488e065 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
@@ -17751,7 +17751,7 @@ import org.slf4j.LoggerFactory;
             return CREDENTIALS;
           case 2: // PRINCIPAL
             return PRINCIPAL;
-          case 3: // TABLE
+          case 3: // TABLE_NAME
             return TABLE_NAME;
           case 4: // TBL_PERM
             return TBL_PERM;
@@ -18329,7 +18329,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 3: // TABLE
+            case 3: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);
@@ -21107,7 +21107,7 @@ import org.slf4j.LoggerFactory;
             return CREDENTIALS;
           case 2: // PRINCIPAL
             return PRINCIPAL;
-          case 3: // TABLE
+          case 3: // TABLE_NAME
             return TABLE_NAME;
           case 4: // PERMISSION
             return PERMISSION;
@@ -21685,7 +21685,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 3: // TABLE
+            case 3: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);
@@ -22327,7 +22327,7 @@ import org.slf4j.LoggerFactory;
             return CREDENTIALS;
           case 2: // PRINCIPAL
             return PRINCIPAL;
-          case 3: // TABLE
+          case 3: // TABLE_NAME
             return TABLE_NAME;
           case 4: // PERMISSION
             return PERMISSION;
@@ -22905,7 +22905,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 3: // TABLE
+            case 3: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);
@@ -24536,7 +24536,7 @@ import org.slf4j.LoggerFactory;
             return TINFO;
           case 3: // CREDENTIALS
             return CREDENTIALS;
-          case 2: // TABLE
+          case 2: // TABLE_NAME
             return TABLE_NAME;
           default:
             return null;
@@ -24960,7 +24960,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 2: // TABLE
+            case 2: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/e4e05c80/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java b/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java
index dd2e192..3863e75 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java
@@ -106,7 +106,7 @@ import org.slf4j.LoggerFactory;
       switch(fieldId) {
         case 1: // TABLE_ID
           return TABLE_ID;
-        case 2: // TABLE
+        case 2: // TABLE_NAME
           return TABLE_NAME;
         case 3: // OP
           return OP;
@@ -680,7 +680,7 @@ import org.slf4j.LoggerFactory;
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
-          case 2: // TABLE
+          case 2: // TABLE_NAME
             if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
               struct.tableName = iprot.readString();
               struct.setTableNameIsSet(true);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/e4e05c80/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
index cbb069d..c5831a1 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
@@ -133,7 +133,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    * @see #setConnectorInfo(JobConf, String, AuthenticationToken)
    */
   protected static Boolean isConnectorInfoSet(JobConf job) {
-    return InputConfigurator.isConnectorInfoSet(CLASS,job);
+    return InputConfigurator.isConnectorInfoSet(CLASS, job);
   }
 
   /**
@@ -314,7 +314,6 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    *          the ranges that will be mapped over
    * @since 1.5.0
    */
-  @Deprecated
   public static void setRanges(JobConf job, Collection<Range> ranges) {
     InputConfigurator.setRanges(CLASS, job, ranges);
   }
@@ -330,9 +329,8 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    * @since 1.5.0
    * @see #setRanges(JobConf, Collection)
    */
-  @Deprecated
   protected static List<Range> getRanges(JobConf job) throws IOException {
-    return InputConfigurator.getRanges(CLASS,job);
+    return InputConfigurator.getRanges(CLASS, job);
   }
 
   /**
@@ -385,7 +383,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    * @see #addIterator(JobConf, IteratorSetting)
    */
   protected static List<IteratorSetting> getIterators(JobConf job) {
-    return InputConfigurator.getIterators(CLASS,job);
+    return InputConfigurator.getIterators(CLASS, job);
   }
 
   /**
@@ -568,7 +566,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
     protected long numKeysRead;
     protected Iterator<Entry<Key,Value>> scannerIterator;
     protected RangeInputSplit split;
-
+  
     /**
      * Apply the configured iterators from the configuration to the scanner.
      * 
@@ -583,7 +581,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
         scanner.addScanIterator(iterator);
       }
     }
-
+  
     /**
      * Initialize a scanner over the given input split using this task attempt configuration.
      */
@@ -595,11 +593,11 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
       String user = getPrincipal(job);
       AuthenticationToken token = getAuthenticationToken(job);
       Authorizations authorizations = getScanAuthorizations(job);
-
+    
       // in case the table name changed, we can still use the previous name for terms of configuration,
       // but for the scanner, we'll need to reference the new table name.
       String actualNameForId = split.getTableName();
-      if(!(instance instanceof MockInstance)) {
+      if (!(instance instanceof MockInstance)) {
         try {
           actualNameForId = Tables.getTableName(instance, split.getTableId());
           if (!actualNameForId.equals(split.getTableName()))
@@ -608,7 +606,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
           throw new IOException("The specified table was not found for id=" + split.getTableId());
         }
       }
-
+    
       try {
         log.debug("Creating connector with user: " + user);
         Connector conn = instance.getConnector(user, token);
@@ -631,7 +629,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
       } catch (Exception e) {
         throw new IOException(e);
       }
-
+    
       // setup a scanner within the bounds of this split
       for (Pair<Text,Text> c : getFetchedColumns(job)) {
         if (c.getSecond() != null) {
@@ -642,58 +640,58 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
           scanner.fetchColumnFamily(c.getFirst());
         }
       }
-
+    
       scanner.setRange(split.getRange());
-
+    
       numKeysRead = 0;
-
+    
       // do this last after setting all scanner options
       scannerIterator = scanner.iterator();
     }
-
+  
     @Override
     public void close() {}
-
+  
     @Override
     public long getPos() throws IOException {
       return numKeysRead;
     }
-
+  
     @Override
     public float getProgress() throws IOException {
       if (numKeysRead > 0 && currentKey == null)
         return 1.0f;
       return split.getProgress(currentKey);
     }
-
+  
     protected Key currentKey = null;
-
+  
   }
 
   Map<String,Map<KeyExtent,List<Range>>> binOfflineTable(JobConf job, String tableName, List<Range> ranges) throws TableNotFoundException, AccumuloException,
       AccumuloSecurityException {
-
+  
     Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>();
-
+  
     Instance instance = getInstance(job);
     Connector conn = instance.getConnector(getPrincipal(job), getAuthenticationToken(job));
     String tableId = Tables.getTableId(instance, tableName);
-
+  
     if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {
       Tables.clearCache(instance);
       if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {
         throw new AccumuloException("Table is online " + tableName + "(" + tableId + ") cannot scan table in offline mode ");
       }
     }
-
+  
     for (Range range : ranges) {
       Text startRow;
-
+    
       if (range.getStartKey() != null)
         startRow = range.getStartKey().getRow();
       else
         startRow = new Text();
-
+    
       Range metadataRange = new Range(new KeyExtent(new Text(tableId), startRow, null).getMetadataEntry(), true, null, false);
       Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
       TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.fetch(scanner);
@@ -701,70 +699,70 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
       scanner.fetchColumnFamily(TabletsSection.CurrentLocationColumnFamily.NAME);
       scanner.fetchColumnFamily(TabletsSection.FutureLocationColumnFamily.NAME);
       scanner.setRange(metadataRange);
-
+    
       RowIterator rowIter = new RowIterator(scanner);
-
+    
       KeyExtent lastExtent = null;
-
+    
       while (rowIter.hasNext()) {
         Iterator<Entry<Key,Value>> row = rowIter.next();
         String last = "";
         KeyExtent extent = null;
         String location = null;
-
+      
         while (row.hasNext()) {
           Entry<Key,Value> entry = row.next();
           Key key = entry.getKey();
-
+        
           if (key.getColumnFamily().equals(TabletsSection.LastLocationColumnFamily.NAME)) {
             last = entry.getValue().toString();
           }
-
+        
           if (key.getColumnFamily().equals(TabletsSection.CurrentLocationColumnFamily.NAME)
               || key.getColumnFamily().equals(TabletsSection.FutureLocationColumnFamily.NAME)) {
             location = entry.getValue().toString();
           }
-
+        
           if (TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.hasColumns(key)) {
             extent = new KeyExtent(key.getRow(), entry.getValue());
           }
-
+        
         }
-
+      
         if (location != null)
           return null;
-
+      
         if (!extent.getTableId().toString().equals(tableId)) {
           throw new AccumuloException("Saw unexpected table Id " + tableId + " " + extent);
         }
-
+      
         if (lastExtent != null && !extent.isPreviousExtent(lastExtent)) {
           throw new AccumuloException(" " + lastExtent + " is not previous extent " + extent);
         }
-
+      
         Map<KeyExtent,List<Range>> tabletRanges = binnedRanges.get(last);
         if (tabletRanges == null) {
           tabletRanges = new HashMap<KeyExtent,List<Range>>();
           binnedRanges.put(last, tabletRanges);
         }
-
+      
         List<Range> rangeList = tabletRanges.get(extent);
         if (rangeList == null) {
           rangeList = new ArrayList<Range>();
           tabletRanges.put(extent, rangeList);
         }
-
+      
         rangeList.add(range);
-
+      
         if (extent.getEndRow() == null || range.afterEndKey(new Key(extent.getEndRow()).followingKey(PartialKey.ROW))) {
           break;
         }
-
+      
         lastExtent = extent;
       }
-
+    
     }
-
+  
     return binnedRanges;
   }
 
@@ -775,19 +773,19 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
     log.setLevel(getLogLevel(job));
     validateOptions(job);
-
+  
     boolean autoAdjust = getAutoAdjustRanges(job);
     List<Range> tablesRanges = getRanges(job);
     LinkedList<InputSplit> splits = new LinkedList<InputSplit>();
-
+  
     String tableName = getInputTableName(job);
     List<Range> ranges = autoAdjust ? Range.mergeOverlapping(tablesRanges) : tablesRanges;
-
+  
     if (ranges.isEmpty()) {
       ranges = new ArrayList<Range>(1);
       ranges.add(new Range());
     }
-
+  
     // get the metadata information for these ranges
     Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>();
     TabletLocator tl;
@@ -806,10 +804,10 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
         // its possible that the cache could contain complete, but old information about a tables tablets... so clear it
         tl.invalidateCache();
         Credentials creds = new Credentials(getPrincipal(job), AuthenticationTokenSerializer.deserialize(getTokenClass(job), getToken(job)));
-
+      
         while (!tl.binRanges(creds, ranges, binnedRanges).isEmpty()) {
           if (!(instance instanceof MockInstance)) {
-              tableId = Tables.getTableId(instance, tableName);
+            tableId = Tables.getTableId(instance, tableName);
             if (!Tables.exists(instance, tableId))
               throw new TableDeletedException(tableId);
             if (Tables.getTableState(instance, tableId) == TableState.OFFLINE)
@@ -825,12 +823,12 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
       throw new IOException(e);
     }
     HashMap<Range,ArrayList<String>> splitsToAdd = null;
-
+  
     if (!autoAdjust)
       splitsToAdd = new HashMap<Range,ArrayList<String>>();
-
+  
     HashMap<String,String> hostNameCache = new HashMap<String,String>();
-
+  
     for (Entry<String,Map<KeyExtent,List<Range>>> tserverBin : binnedRanges.entrySet()) {
       String ip = tserverBin.getKey().split(":", 2)[0];
       String location = hostNameCache.get(ip);
@@ -839,7 +837,7 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
         location = inetAddress.getHostName();
         hostNameCache.put(ip, location);
       }
-
+    
       for (Entry<KeyExtent,List<Range>> extentRanges : tserverBin.getValue().entrySet()) {
         Range ke = extentRanges.getKey().toDataRange();
         for (Range r : extentRanges.getValue()) {
@@ -857,11 +855,11 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
         }
       }
     }
-
+  
     if (!autoAdjust)
       for (Entry<Range,ArrayList<String>> entry : splitsToAdd.entrySet())
         splits.add(new RangeInputSplit(tableName, tableId, entry.getKey(), entry.getValue().toArray(new String[0])));
-
+  
     return splits.toArray(new InputSplit[splits.size()]);
   }
 
@@ -869,17 +867,17 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V> {
    * The Class RangeInputSplit. Encapsulates an Accumulo range for use in Map Reduce jobs.
    */
   public static class RangeInputSplit extends org.apache.accumulo.core.client.mapreduce.InputFormatBase.RangeInputSplit implements InputSplit {
-
+  
     public RangeInputSplit() {
       super();
     }
-
+  
     public RangeInputSplit(RangeInputSplit split) throws IOException {
       super(split);
     }
-
+  
     protected RangeInputSplit(String table, String tableId, Range range, String[] locations) {
-      super(table, tableId,  range, locations);
+      super(table, tableId, range, locations);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/e4e05c80/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
index 98a3ae4..2cec670 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
@@ -91,10 +91,10 @@ import org.apache.log4j.Logger;
  * See {@link AccumuloInputFormat} for an example implementation.
  */
 public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
-  
+
   private static final Class<?> CLASS = AccumuloInputFormat.class;
   protected static final Logger log = Logger.getLogger(CLASS);
-  
+
   /**
    * Sets the connector information needed to communicate with Accumulo in this job.
    * 
@@ -114,7 +114,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setConnectorInfo(Job job, String principal, AuthenticationToken token) throws AccumuloSecurityException {
     InputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), principal, token);
   }
-  
+
   /**
    * Sets the connector information needed to communicate with Accumulo in this job.
    * 
@@ -133,7 +133,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setConnectorInfo(Job job, String principal, String tokenFile) throws AccumuloSecurityException {
     InputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), principal, tokenFile);
   }
-  
+
   /**
    * Determines if the connector has been configured.
    * 
@@ -146,7 +146,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static Boolean isConnectorInfoSet(JobContext context) {
     return InputConfigurator.isConnectorInfoSet(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Gets the user name from the configuration.
    * 
@@ -157,9 +157,9 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
    * @see #setConnectorInfo(Job, String, AuthenticationToken)
    */
   protected static String getPrincipal(JobContext context) {
-    return InputConfigurator.getPrincipal(CLASS,getConfiguration(context));
+    return InputConfigurator.getPrincipal(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Gets the table name from the configuration.
    * 
@@ -170,9 +170,9 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
    * @see #setInputTableName(Job, String)
    */
   protected static String getInputTableName(JobContext context) {
-    return InputConfigurator.getInputTableName(CLASS,getConfiguration(context));
+    return InputConfigurator.getInputTableName(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Gets the serialized token class from either the configuration or the token file.
    * 
@@ -183,7 +183,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static String getTokenClass(JobContext context) {
     return getAuthenticationToken(context).getClass().getName();
   }
-  
+
   /**
    * Gets the serialized token from either the configuration or the token file.
    * 
@@ -194,7 +194,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static byte[] getToken(JobContext context) {
     return AuthenticationTokenSerializer.serialize(getAuthenticationToken(context));
   }
-  
+
   /**
    * Gets the authenticated token from either the specified token file or directly from the configuration, whichever was used when the job was configured.
    * 
@@ -206,9 +206,9 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
    * @see #setConnectorInfo(Job, String, String)
    */
   protected static AuthenticationToken getAuthenticationToken(JobContext context) {
-    return InputConfigurator.getAuthenticationToken(CLASS,getConfiguration(context));
+    return InputConfigurator.getAuthenticationToken(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Configures a {@link ZooKeeperInstance} for this job.
    * 
@@ -223,7 +223,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setZooKeeperInstance(Job job, String instanceName, String zooKeepers) {
     InputConfigurator.setZooKeeperInstance(CLASS, job.getConfiguration(), instanceName, zooKeepers);
   }
-  
+
   /**
    * Configures a {@link MockInstance} for this job.
    * 
@@ -234,9 +234,9 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
    * @since 1.5.0
    */
   public static void setMockInstance(Job job, String instanceName) {
-    InputConfigurator.setMockInstance(CLASS,job.getConfiguration(),instanceName);
+    InputConfigurator.setMockInstance(CLASS, job.getConfiguration(), instanceName);
   }
-  
+
   /**
    * Initializes an Accumulo {@link Instance} based on the configuration.
    * 
@@ -250,7 +250,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static Instance getInstance(JobContext context) {
     return InputConfigurator.getInstance(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Sets the log level for this job.
    * 
@@ -263,7 +263,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setLogLevel(Job job, Level level) {
     InputConfigurator.setLogLevel(CLASS, job.getConfiguration(), level);
   }
-  
+
   /**
    * Gets the log level from this configuration.
    * 
@@ -276,7 +276,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static Level getLogLevel(JobContext context) {
     return InputConfigurator.getLogLevel(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Sets the name of the input table, over which this job will scan.
    * 
@@ -289,7 +289,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setInputTableName(Job job, String tableName) {
     InputConfigurator.setInputTableName(CLASS, job.getConfiguration(), tableName);
   }
-  
+
   /**
    * Sets the {@link Authorizations} used to scan. Must be a subset of the user's authorization. Defaults to the empty set.
    * 
@@ -300,9 +300,9 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
    * @since 1.5.0
    */
   public static void setScanAuthorizations(Job job, Authorizations auths) {
-    InputConfigurator.setScanAuthorizations(CLASS,job.getConfiguration(),auths);
+    InputConfigurator.setScanAuthorizations(CLASS, job.getConfiguration(), auths);
   }
-  
+
   /**
    * Gets the authorizations to set for the scans from the configuration.
    * 
@@ -315,7 +315,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static Authorizations getScanAuthorizations(JobContext context) {
     return InputConfigurator.getScanAuthorizations(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Sets the input ranges to scan for all tables associated with this job. This will be added to any per-table ranges that have been set using
    * {@link #setRanges(org.apache.hadoop.mapreduce.Job, java.util.Collection)}
@@ -329,7 +329,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setRanges(Job job, Collection<Range> ranges) {
     InputConfigurator.setRanges(CLASS, job.getConfiguration(), ranges);
   }
-  
+
   /**
    * Gets the ranges to scan over from a job.
    * 
@@ -343,7 +343,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static List<Range> getRanges(JobContext context) throws IOException {
     return InputConfigurator.getRanges(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Restricts the columns that will be mapped over for this job for the default input table.
    * 
@@ -357,7 +357,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void fetchColumns(Job job, Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
     InputConfigurator.fetchColumns(CLASS, job.getConfiguration(), columnFamilyColumnQualifierPairs);
   }
-  
+
   /**
    * Gets the columns to be mapped over from this job.
    * 
@@ -370,7 +370,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static Set<Pair<Text,Text>> getFetchedColumns(JobContext context) {
     return InputConfigurator.getFetchedColumns(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Encode an iterator on the default all tables for this job.
    * 
@@ -383,7 +383,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void addIterator(Job job, IteratorSetting cfg) {
     InputConfigurator.addIterator(CLASS, job.getConfiguration(), cfg);
   }
-  
+
   /**
    * Gets a list of the iterator settings (for iterators to apply to a scanner) from this configuration.
    * 
@@ -396,7 +396,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static List<IteratorSetting> getIterators(JobContext context) {
     return InputConfigurator.getIterators(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Controls the automatic adjustment of ranges for this job. This feature merges overlapping ranges, then splits them to align with tablet boundaries.
    * Disabling this feature will cause exactly one Map task to be created for each specified range. The default setting is enabled. *
@@ -414,7 +414,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setAutoAdjustRanges(Job job, boolean enableFeature) {
     InputConfigurator.setAutoAdjustRanges(CLASS, job.getConfiguration(), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration has auto-adjust ranges enabled.
    * 
@@ -430,6 +430,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
 
   /**
    * Sets the {@link TableQueryConfig} objects on the given Hadoop configuration
+   * 
    * @param job
    *          the Hadoop job instance to be configured
    * @param configs
@@ -442,14 +443,13 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
 
   /**
    * Fetches all {@link TableQueryConfig}s that have been set on the given Hadoop configuration.
-   *
+   * 
    * <p>
-   * Note this also returns the {@link TableQueryConfig} representing the table configurations set through the single
-   * table input methods ({@link #setInputTableName(org.apache.hadoop.mapreduce.Job, String)},
-   * {@link #setRanges(org.apache.hadoop.mapreduce.Job, java.util.Collection)},
+   * Note this also returns the {@link TableQueryConfig} representing the table configurations set through the single table input methods (
+   * {@link #setInputTableName(org.apache.hadoop.mapreduce.Job, String)}, {@link #setRanges(org.apache.hadoop.mapreduce.Job, java.util.Collection)},
    * {@link #fetchColumns(org.apache.hadoop.mapreduce.Job, java.util.Collection)},
    * {@link #addIterator(org.apache.hadoop.mapreduce.Job, org.apache.accumulo.core.client.IteratorSetting)}, etc...)
-   *
+   * 
    * @param job
    *          the Hadoop job instance to be configured
    * @return
@@ -460,9 +460,10 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
 
   /**
    * Fetches a {@link TableQueryConfig} that has been set on the configuration for a specific table.
-   *
+   * 
    * <p>
    * null is returned in the event that the table doesn't exist.
+   * 
    * @param job
    *          the Hadoop job instance to be configured
    * @param tableName
@@ -470,9 +471,9 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
    * @return the {@link TableQueryConfig} for the given table
    */
   public static TableQueryConfig getTableQueryConfig(JobContext job, String tableName) {
-    return InputConfigurator.getTableQueryConfigs(CLASS, getConfiguration(job), tableName);
+    return InputConfigurator.getTableQueryConfig(CLASS,getConfiguration(job),tableName);
   }
-  
+
   /**
    * Controls the use of the {@link IsolatedScanner} in this job.
    * 
@@ -488,7 +489,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setScanIsolation(Job job, boolean enableFeature) {
     InputConfigurator.setScanIsolation(CLASS, job.getConfiguration(), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration has isolation enabled.
    * 
@@ -501,7 +502,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static boolean isIsolated(JobContext context) {
     return InputConfigurator.isIsolated(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Controls the use of the {@link ClientSideIteratorScanner} in this job. Enabling this feature will cause the iterator stack to be constructed within the Map
    * task, rather than within the Accumulo TServer. To use this feature, all classes needed for those iterators must be available on the classpath for the task.
@@ -518,7 +519,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setLocalIterators(Job job, boolean enableFeature) {
     InputConfigurator.setLocalIterators(CLASS, job.getConfiguration(), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration uses local iterators.
    * 
@@ -531,7 +532,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static boolean usesLocalIterators(JobContext context) {
     return InputConfigurator.usesLocalIterators(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * <p>
    * Enable reading offline tables. By default, this feature is disabled and only online tables are scanned. This will make the map reduce job directly read the
@@ -566,7 +567,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public static void setOfflineTableScan(Job job, boolean enableFeature) {
     InputConfigurator.setOfflineTableScan(CLASS, job.getConfiguration(), enableFeature);
   }
-  
+
   /**
    * Determines whether a configuration has the offline table scan feature enabled.
    * 
@@ -579,7 +580,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static boolean isOfflineScan(JobContext context) {
     return InputConfigurator.isOfflineScan(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * Initializes an Accumulo {@link TabletLocator} based on the configuration.
    * 
@@ -595,7 +596,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static TabletLocator getTabletLocator(JobContext context, String table) throws TableNotFoundException {
     return InputConfigurator.getTabletLocator(CLASS, getConfiguration(context), table);
   }
-  
+
   // InputFormat doesn't have the equivalent of OutputFormat's checkOutputSpecs(JobContext job)
   /**
    * Check whether a configuration is fully configured to be used with an Accumulo {@link org.apache.hadoop.mapreduce.InputFormat}.
@@ -609,7 +610,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static void validateOptions(JobContext context) throws IOException {
     InputConfigurator.validateOptions(CLASS, getConfiguration(context));
   }
-  
+
   /**
    * An abstract base class to be used to create {@link RecordReader} instances that convert from Accumulo {@link Key}/{@link Value} pairs to the user's K/V
    * types.
@@ -626,7 +627,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
     protected long numKeysRead;
     protected Iterator<Entry<Key,Value>> scannerIterator;
     protected RangeInputSplit split;
-    
+  
     /**
      * Apply the configured iterators from the configuration to the scanner. This applies both the default iterators and the per-table iterators.
      * 
@@ -638,30 +639,30 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
      *          the table name for which to set up the iterators
      */
     protected void setupIterators(TaskAttemptContext context, Scanner scanner, String tableName) {
-      TableQueryConfig config = getTableQueryConfig(context,tableName);
+      TableQueryConfig config = getTableQueryConfig(context, tableName);
       List<IteratorSetting> iterators = config.getIterators();
       for (IteratorSetting iterator : iterators)
         scanner.addScanIterator(iterator);
     }
-    
+  
     /**
      * Initialize a scanner over the given input split using this task attempt configuration.
      */
     @Override
     public void initialize(InputSplit inSplit, TaskAttemptContext attempt) throws IOException {
-      
+    
       Scanner scanner;
       split = (RangeInputSplit) inSplit;
       log.debug("Initializing input split: " + split.getRange());
       Instance instance = getInstance(attempt);
       String principal = getPrincipal(attempt);
-      
-      TableQueryConfig tableConfig = getTableQueryConfig(attempt,split.getTableName());
-      
+    
+      TableQueryConfig tableConfig = getTableQueryConfig(attempt, split.getTableName());
+    
       // in case the table name changed, we can still use the previous name for terms of configuration,
       // but for the scanner, we'll need to reference the new table name.
       String actualNameForId = split.getTableName();
-      if(!(instance instanceof MockInstance)) {
+      if (!(instance instanceof MockInstance)) {
         try {
           actualNameForId = Tables.getTableName(instance, split.getTableId());
           if (!actualNameForId.equals(split.getTableName()))
@@ -670,12 +671,12 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
           throw new IOException("The specified table was not found for id=" + split.getTableId());
         }
       }
-
+    
       AuthenticationToken token = getAuthenticationToken(attempt);
       Authorizations authorizations = getScanAuthorizations(attempt);
       try {
         log.debug("Creating connector with user: " + principal);
-        
+      
         Connector conn = instance.getConnector(principal, token);
         log.debug("Creating scanner for table: " + split.getTableName());
         log.debug("Authorizations are: " + authorizations);
@@ -696,7 +697,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       } catch (Exception e) {
         throw new IOException(e);
       }
-      
+    
       // setup a scanner within the bounds of this split
       for (Pair<Text,Text> c : tableConfig.getColumns()) {
         if (c.getSecond() != null) {
@@ -707,64 +708,64 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
           scanner.fetchColumnFamily(c.getFirst());
         }
       }
-      
+    
       scanner.setRange(split.getRange());
       numKeysRead = 0;
-      
+    
       // do this last after setting all scanner options
       scannerIterator = scanner.iterator();
     }
-    
+  
     @Override
     public void close() {}
-    
+  
     @Override
     public float getProgress() throws IOException {
       if (numKeysRead > 0 && currentKey == null)
         return 1.0f;
       return split.getProgress(currentKey);
     }
-    
+  
     protected K currentK = null;
     protected V currentV = null;
     protected Key currentKey = null;
     protected Value currentValue = null;
-    
+  
     @Override
     public K getCurrentKey() throws IOException, InterruptedException {
       return currentK;
     }
-    
+  
     @Override
     public V getCurrentValue() throws IOException, InterruptedException {
       return currentV;
     }
   }
-  
+
   Map<String,Map<KeyExtent,List<Range>>> binOfflineTable(JobContext context, String tableName, List<Range> ranges) throws TableNotFoundException,
       AccumuloException, AccumuloSecurityException {
-    
+  
     Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>();
-    
+  
     Instance instance = getInstance(context);
     Connector conn = instance.getConnector(getPrincipal(context), getAuthenticationToken(context));
     String tableId = Tables.getTableId(instance, tableName);
-    
+  
     if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {
       Tables.clearCache(instance);
       if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {
         throw new AccumuloException("Table is online " + tableName + "(" + tableId + ") cannot scan table in offline mode ");
       }
     }
-    
+  
     for (Range range : ranges) {
       Text startRow;
-      
+    
       if (range.getStartKey() != null)
         startRow = range.getStartKey().getRow();
       else
         startRow = new Text();
-      
+    
       Range metadataRange = new Range(new KeyExtent(new Text(tableId), startRow, null).getMetadataEntry(), true, null, false);
       Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY);
       TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.fetch(scanner);
@@ -772,7 +773,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       scanner.fetchColumnFamily(TabletsSection.CurrentLocationColumnFamily.NAME);
       scanner.fetchColumnFamily(TabletsSection.FutureLocationColumnFamily.NAME);
       scanner.setRange(metadataRange);
-      
+    
       RowIterator rowIter = new RowIterator(scanner);
       KeyExtent lastExtent = null;
       while (rowIter.hasNext()) {
@@ -780,63 +781,63 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
         String last = "";
         KeyExtent extent = null;
         String location = null;
-        
+      
         while (row.hasNext()) {
           Entry<Key,Value> entry = row.next();
           Key key = entry.getKey();
-          
+        
           if (key.getColumnFamily().equals(TabletsSection.LastLocationColumnFamily.NAME)) {
             last = entry.getValue().toString();
           }
-          
+        
           if (key.getColumnFamily().equals(TabletsSection.CurrentLocationColumnFamily.NAME)
               || key.getColumnFamily().equals(TabletsSection.FutureLocationColumnFamily.NAME)) {
             location = entry.getValue().toString();
           }
-          
+        
           if (TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.hasColumns(key)) {
             extent = new KeyExtent(key.getRow(), entry.getValue());
           }
-          
-        }
         
+        }
+      
         if (location != null)
           return null;
-        
+      
         if (!extent.getTableId().toString().equals(tableId)) {
           throw new AccumuloException("Saw unexpected table Id " + tableId + " " + extent);
         }
-        
+      
         if (lastExtent != null && !extent.isPreviousExtent(lastExtent)) {
           throw new AccumuloException(" " + lastExtent + " is not previous extent " + extent);
         }
-        
+      
         Map<KeyExtent,List<Range>> tabletRanges = binnedRanges.get(last);
         if (tabletRanges == null) {
           tabletRanges = new HashMap<KeyExtent,List<Range>>();
           binnedRanges.put(last, tabletRanges);
         }
-        
+      
         List<Range> rangeList = tabletRanges.get(extent);
         if (rangeList == null) {
           rangeList = new ArrayList<Range>();
           tabletRanges.put(extent, rangeList);
         }
-        
+      
         rangeList.add(range);
-        
+      
         if (extent.getEndRow() == null || range.afterEndKey(new Key(extent.getEndRow()).followingKey(PartialKey.ROW))) {
           break;
         }
-        
+      
         lastExtent = extent;
       }
-      
-    }
     
+    }
+  
     return binnedRanges;
   }
-  
+
   /**
    * Gets the splits of the tables that have been set on the job.
    * 
@@ -849,11 +850,11 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   public List<InputSplit> getSplits(JobContext conf) throws IOException {
     log.setLevel(getLogLevel(conf));
     validateOptions(conf);
-    
+  
     LinkedList<InputSplit> splits = new LinkedList<InputSplit>();
     List<TableQueryConfig> tableConfigs = getTableQueryConfigs(conf);
     for (TableQueryConfig tableConfig : tableConfigs) {
-      
+    
       boolean autoAdjust = tableConfig.shouldAutoAdjustRanges();
       String tableId = null;
       List<Range> ranges = autoAdjust ? Range.mergeOverlapping(tableConfig.getRanges()) : tableConfig.getRanges();
@@ -861,7 +862,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
         ranges = new ArrayList<Range>(1);
         ranges.add(new Range());
       }
-      
+    
       // get the metadata information for these ranges
       Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>();
       TabletLocator tl;
@@ -872,7 +873,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
             // Some tablets were still online, try again
             UtilWaitThread.sleep(100 + (int) (Math.random() * 100)); // sleep randomly between 100 and 200 ms
             binnedRanges = binOfflineTable(conf, tableConfig.getTableName(), ranges);
-            
+          
           }
         } else {
           Instance instance = getInstance(conf);
@@ -880,7 +881,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
           // its possible that the cache could contain complete, but old information about a tables tablets... so clear it
           tl.invalidateCache();
           Credentials creds = new Credentials(getPrincipal(conf), getAuthenticationToken(conf));
-          
+        
           while (!tl.binRanges(creds, ranges, binnedRanges).isEmpty()) {
             if (!(instance instanceof MockInstance)) {
               if (!Tables.exists(instance, tableId))
@@ -898,12 +899,12 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       } catch (Exception e) {
         throw new IOException(e);
       }
-      
+    
       HashMap<Range,ArrayList<String>> splitsToAdd = null;
-      
+    
       if (!autoAdjust)
         splitsToAdd = new HashMap<Range,ArrayList<String>>();
-      
+    
       HashMap<String,String> hostNameCache = new HashMap<String,String>();
       for (Entry<String,Map<KeyExtent,List<Range>>> tserverBin : binnedRanges.entrySet()) {
         String ip = tserverBin.getKey().split(":", 2)[0];
@@ -930,14 +931,14 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
           }
         }
       }
-      
+    
       if (!autoAdjust)
         for (Entry<Range,ArrayList<String>> entry : splitsToAdd.entrySet())
           splits.add(new RangeInputSplit(tableConfig.getTableName(), tableId, entry.getKey(), entry.getValue().toArray(new String[0])));
     }
     return splits;
   }
-  
+
   /**
    * The Class RangeInputSplit. Encapsulates an Accumulo range for use in Map Reduce jobs.
    */
@@ -946,51 +947,51 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
     private String[] locations;
     private String tableId;
     private String tableName;
-    
+  
     public RangeInputSplit() {
       range = new Range();
       locations = new String[0];
       tableId = "";
       tableName = "";
     }
-    
+  
     public RangeInputSplit(RangeInputSplit split) throws IOException {
       this.setRange(split.getRange());
       this.setLocations(split.getLocations());
       this.setTableName(split.getTableName());
     }
-    
+  
     protected RangeInputSplit(String table, String tableId, Range range, String[] locations) {
       this.range = range;
       this.locations = locations;
       this.tableName = table;
       this.tableId = tableId;
     }
-    
+  
     public Range getRange() {
       return range;
     }
-    
+  
     public void setRange(Range range) {
       this.range = range;
     }
-    
+  
     public String getTableName() {
       return tableName;
     }
-    
+  
     public void setTableName(String tableName) {
       this.tableName = tableName;
     }
-    
+  
     public void setTableId(String tableId) {
       this.tableId = tableId;
     }
-    
+  
     public String getTableId() {
       return tableId;
     }
-    
+  
     private static byte[] extractBytes(ByteSequence seq, int numBytes) {
       byte[] bytes = new byte[numBytes + 1];
       bytes[0] = 0;
@@ -1002,7 +1003,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       }
       return bytes;
     }
-    
+  
     public static float getProgress(ByteSequence start, ByteSequence end, ByteSequence position) {
       int maxDepth = Math.min(Math.max(end.length(), start.length()), position.length());
       BigInteger startBI = new BigInteger(extractBytes(start, maxDepth));
@@ -1010,7 +1011,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       BigInteger positionBI = new BigInteger(extractBytes(position, maxDepth));
       return (float) (positionBI.subtract(startBI).doubleValue() / endBI.subtract(startBI).doubleValue());
     }
-    
+  
     public float getProgress(Key currentKey) {
       if (currentKey == null)
         return 0f;
@@ -1029,7 +1030,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       // if we can't figure it out, then claim no progress
       return 0f;
     }
-    
+  
     /**
      * This implementation of length is only an estimate, it does not provide exact values. Do not have your code rely on this return value.
      */
@@ -1039,29 +1040,29 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       Text stopRow = range.isInfiniteStopKey() ? new Text(new byte[] {Byte.MAX_VALUE}) : range.getEndKey().getRow();
       int maxCommon = Math.min(7, Math.min(startRow.getLength(), stopRow.getLength()));
       long diff = 0;
-      
+    
       byte[] start = startRow.getBytes();
       byte[] stop = stopRow.getBytes();
       for (int i = 0; i < maxCommon; ++i) {
         diff |= 0xff & (start[i] ^ stop[i]);
         diff <<= Byte.SIZE;
       }
-      
+    
       if (startRow.getLength() != stopRow.getLength())
         diff |= 0xff;
-      
+    
       return diff + 1;
     }
-    
+  
     @Override
     public String[] getLocations() throws IOException {
       return locations;
     }
-    
+  
     public void setLocations(String[] locations) {
       this.locations = locations;
     }
-    
+  
     @Override
     public void readFields(DataInput in) throws IOException {
       range.readFields(in);
@@ -1071,7 +1072,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
       for (int i = 0; i < numLocs; ++i)
         locations[i] = in.readUTF();
     }
-    
+  
     @Override
     public void write(DataOutput out) throws IOException {
       range.write(out);
@@ -1081,7 +1082,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
         out.writeUTF(locations[i]);
     }
   }
-  
+
   // use reflection to pull the Configuration out of the JobContext for Hadoop 1 and Hadoop 2 compatibility
   static Configuration getConfiguration(JobContext context) {
     try {

http://git-wip-us.apache.org/repos/asf/accumulo/blob/e4e05c80/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
index 9c1d553..b235e13 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
@@ -97,7 +97,7 @@ public class InputConfigurator extends ConfiguratorBase {
 
   /**
    * Sets the name of the input table, over which this job will scan.
-   *
+   * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
@@ -150,12 +150,12 @@ public class InputConfigurator extends ConfiguratorBase {
    * @param ranges
    *          the ranges that will be mapped over
    * @throws IllegalArgumentException
-   *          if the ranges cannot be encoded into base 64
+   *           if the ranges cannot be encoded into base 64
    * @since 1.5.0
    */
   public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) {
     notNull(ranges);
-
+  
     ArrayList<String> rangeStrings = new ArrayList<String>(ranges.size());
     try {
       for (Range r : ranges) {
@@ -179,11 +179,11 @@ public class InputConfigurator extends ConfiguratorBase {
    * @return the ranges
    * @throws IOException
    *           if the ranges have been encoded improperly
-   * @since 1.6.0
+   * @since 1.5.0
    * @see #setRanges(Class, Configuration, Collection)
    */
   public static List<Range> getRanges(Class<?> implementingClass, Configuration conf) throws IOException {
-
+  
     Collection<String> encodedRanges = conf.getStringCollection(enumToConfKey(implementingClass, ScanOpts.RANGES));
     List<Range> ranges = new ArrayList<Range>();
     for (String rangeString : encodedRanges) {
@@ -195,10 +195,9 @@ public class InputConfigurator extends ConfiguratorBase {
     return ranges;
   }
 
-
   /**
    * Gets a list of the iterator settings (for iterators to apply to a scanner) from this configuration.
-   *
+   * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
@@ -208,12 +207,12 @@ public class InputConfigurator extends ConfiguratorBase {
    * @see #addIterator(Class, Configuration, IteratorSetting)
    */
   public static List<IteratorSetting> getIterators(Class<?> implementingClass, Configuration conf) {
-    String iterators = conf.get(enumToConfKey(implementingClass, ScanOpts.ITERATORS));
-
+    String iterators = conf.get(enumToConfKey(implementingClass,ScanOpts.ITERATORS));
+  
     // If no iterators are present, return an empty list
     if (iterators == null || iterators.isEmpty())
       return new ArrayList<IteratorSetting>();
-
+  
     // Compose the set of iterators encoded in the job configuration
     StringTokenizer tokens = new StringTokenizer(iterators, StringUtils.COMMA_STR);
     List<IteratorSetting> list = new ArrayList<IteratorSetting>();
@@ -230,7 +229,6 @@ public class InputConfigurator extends ConfiguratorBase {
     return list;
   }
 
-
   /**
    * Restricts the columns that will be mapped over for this job. This applies the columns to all tables that have been set on the job.
    * 
@@ -242,18 +240,17 @@ public class InputConfigurator extends ConfiguratorBase {
    *          a pair of {@link Text} objects corresponding to column family and column qualifier. If the column qualifier is null, the entire column family is
    *          selected. An empty set is the default and is equivalent to scanning the all columns.
    * @throws IllegalArgumentException
-   *          if the column family is null
+   *           if the column family is null
    * @since 1.5.0
-   * @deprecated since 1.6.0
    */
   public static void fetchColumns(Class<?> implementingClass, Configuration conf, Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
     notNull(columnFamilyColumnQualifierPairs);
     ArrayList<String> columnStrings = new ArrayList<String>();
     for (Pair<Text,Text> column : columnFamilyColumnQualifierPairs) {
-
+    
       if (column.getFirst() == null)
         throw new IllegalArgumentException("Column family can not be null");
-
+    
       String col = new String(Base64.encodeBase64(TextUtil.getBytes(column.getFirst())), Constants.UTF8);
       if (column.getSecond() != null)
         col += ":" + new String(Base64.encodeBase64(TextUtil.getBytes(column.getSecond())), Constants.UTF8);
@@ -262,10 +259,9 @@ public class InputConfigurator extends ConfiguratorBase {
     conf.setStrings(enumToConfKey(implementingClass, ScanOpts.COLUMNS), columnStrings.toArray(new String[0]));
   }
 
-
   /**
    * Gets the columns to be mapped over from this job.
-   *
+   * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
@@ -295,7 +291,7 @@ public class InputConfigurator extends ConfiguratorBase {
    * @param cfg
    *          the configuration of the iterator
    * @throws IllegalArgumentException
-   *          if the iterator can't be serialized into the configuration
+   *           if the iterator can't be serialized into the configuration
    * @since 1.5.0
    */
   public static void addIterator(Class<?> implementingClass, Configuration conf, IteratorSetting cfg) {
@@ -308,7 +304,7 @@ public class InputConfigurator extends ConfiguratorBase {
     } catch (IOException e) {
       throw new IllegalArgumentException("unable to serialize IteratorSetting");
     }
-
+  
     String confKey = enumToConfKey(implementingClass, ScanOpts.ITERATORS);
     String iterators = conf.get(confKey);
     // No iterators specified yet, create a new string
@@ -319,7 +315,7 @@ public class InputConfigurator extends ConfiguratorBase {
       iterators = iterators.concat(StringUtils.COMMA_STR + newIter);
     }
     // Store the iterators w/ the job
-    conf.set(confKey,iterators);
+    conf.set(confKey, iterators);
   }
 
   /**
@@ -339,7 +335,7 @@ public class InputConfigurator extends ConfiguratorBase {
    * @since 1.5.0
    */
   public static void setAutoAdjustRanges(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
-    conf.setBoolean(enumToConfKey(implementingClass, Features.AUTO_ADJUST_RANGES), enableFeature);
+    conf.setBoolean(enumToConfKey(implementingClass,Features.AUTO_ADJUST_RANGES),enableFeature);
   }
 
   /**
@@ -476,13 +472,22 @@ public class InputConfigurator extends ConfiguratorBase {
     return conf.getBoolean(enumToConfKey(implementingClass, Features.SCAN_OFFLINE), false);
   }
 
+  /**
+   * Sets configurations for multiple tables at a time.
+   * @param implementingClass
+   *          the class whose name will be used as a prefix for the property configuration key
+   * @param conf
+   *          the Hadoop configuration object to configure
+   * @param tconf
+   *          an array of {@link TableQueryConfig} objects to associate with the job
+   */
   public static void setTableQueryConfigs(Class<?> implementingClass, Configuration conf, TableQueryConfig... tconf) {
     List<String> tableQueryConfigStrings = new ArrayList<String>();
-    for(TableQueryConfig queryConfig : tconf) {
+    for (TableQueryConfig queryConfig : tconf) {
       ByteArrayOutputStream baos = new ByteArrayOutputStream();
       try {
         queryConfig.write(new DataOutputStream(baos));
-      } catch(IOException e) {
+      } catch (IOException e) {
         throw new IllegalStateException("Configuration for " + queryConfig.getTableName() + " could not be serialized.");
       }
       tableQueryConfigStrings.add(new String(Base64.encodeBase64(baos.toByteArray())));
@@ -491,17 +496,25 @@ public class InputConfigurator extends ConfiguratorBase {
     conf.setStrings(confKey, tableQueryConfigStrings.toArray(new String[0]));
   }
 
+  /**
+   * Returns all {@link TableQueryConfig} objects associated with this job.
+   * @param implementingClass
+   *          the class whose name will be used as a prefix for the property configuration key
+   * @param conf
+   *          the Hadoop configuration object to configure
+   * @return all of the table query configs for the job
+   */
   public static List<TableQueryConfig> getTableQueryConfigs(Class<?> implementingClass, Configuration conf) {
     List<TableQueryConfig> configs = new ArrayList<TableQueryConfig>();
     Collection<String> configStrings = conf.getStringCollection(enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS));
-    if(configStrings != null) {
-      for(String str : configStrings) {
-        try{
+    if (configStrings != null) {
+      for (String str : configStrings) {
+        try {
           byte[] bytes = Base64.decodeBase64(str.getBytes());
           ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
           configs.add(new TableQueryConfig(new DataInputStream(bais)));
           bais.close();
-        } catch(IOException e) {
+        } catch (IOException e) {
           throw new IllegalStateException("The table query configurations could not be deserialized from the given configuration");
         }
       }
@@ -509,19 +522,29 @@ public class InputConfigurator extends ConfiguratorBase {
     TableQueryConfig defaultQueryConfig;
     try {
       defaultQueryConfig = getDefaultTableConfig(implementingClass, conf);
-    } catch(IOException e) {
+    } catch (IOException e) {
       throw new IllegalStateException("There was an error deserializing the default table configuration.");
     }
-    if(defaultQueryConfig != null)
+    if (defaultQueryConfig != null)
       configs.add(defaultQueryConfig);
-
+  
     return configs;
   }
 
-  public static TableQueryConfig getTableQueryConfigs(Class<?> implementingClass, Configuration conf, String tableName) {
-    List<TableQueryConfig> queryConfigs = getTableQueryConfigs(implementingClass,conf);
-    for(TableQueryConfig queryConfig : queryConfigs) {
-      if(queryConfig.getTableName().equals(tableName)) {
+  /**
+   * Returns the {@link TableQueryConfig} for the given table
+   * @param implementingClass
+   *          the class whose name will be used as a prefix for the property configuration key
+   * @param conf
+   *          the Hadoop configuration object to configure
+   * @param tableName
+   *          the table name for which to fetch the table query config
+   * @return the table query config for the given table name (if it exists) and null if it does not
+   */
+  public static TableQueryConfig getTableQueryConfig(Class<?> implementingClass, Configuration conf, String tableName) {
+    List<TableQueryConfig> queryConfigs = getTableQueryConfigs(implementingClass, conf);
+    for (TableQueryConfig queryConfig : queryConfigs) {
+      if (queryConfig.getTableName().equals(tableName)) {
         return queryConfig;
       }
     }
@@ -546,7 +569,7 @@ public class InputConfigurator extends ConfiguratorBase {
     String instanceType = conf.get(enumToConfKey(implementingClass, InstanceOpts.TYPE));
     if ("MockInstance".equals(instanceType))
       return new MockTabletLocator();
-    Instance instance = getInstance(implementingClass,conf);
+    Instance instance = getInstance(implementingClass, conf);
     return TabletLocator.getLocator(instance, new Text(Tables.getTableId(instance, tableName)));
   }
 
@@ -575,19 +598,19 @@ public class InputConfigurator extends ConfiguratorBase {
       Connector c = getInstance(implementingClass, conf).getConnector(principal, token);
       if (!c.securityOperations().authenticateUser(principal, token))
         throw new IOException("Unable to authenticate user");
-
-      for (TableQueryConfig tableConfig : getTableQueryConfigs(implementingClass,conf)) {
+    
+      for (TableQueryConfig tableConfig : getTableQueryConfigs(implementingClass, conf)) {
         if (!c.securityOperations().hasTablePermission(getPrincipal(implementingClass, conf), tableConfig.getTableName(), TablePermission.READ))
           throw new IOException("Unable to access table");
       }
-
-      for (TableQueryConfig tableConfig : getTableQueryConfigs(implementingClass,conf)) {
-        if(!tableConfig.shouldUseLocalIterators()) {
-          if(tableConfig.getIterators() != null) {
+    
+      for (TableQueryConfig tableConfig : getTableQueryConfigs(implementingClass, conf)) {
+        if (!tableConfig.shouldUseLocalIterators()) {
+          if (tableConfig.getIterators() != null) {
             for (IteratorSetting iter : tableConfig.getIterators()) {
               if (!c.tableOperations().testClassLoad(tableConfig.getTableName(), iter.getIteratorClass(), SortedKeyValueIterator.class.getName()))
                 throw new AccumuloException("Servers are unable to load " + iter.getIteratorClass() + " as a " + SortedKeyValueIterator.class.getName());
-
+            
             }
           }
         }
@@ -602,9 +625,8 @@ public class InputConfigurator extends ConfiguratorBase {
   }
 
   /**
-   * Returns the {@link TableQueryConfig} for the configuration based on the properties set using the single-table
-   * input methods.
-   *
+   * Returns the {@link TableQueryConfig} for the configuration based on the properties set using the single-table input methods.
+   * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
@@ -614,22 +636,21 @@ public class InputConfigurator extends ConfiguratorBase {
    */
   protected static TableQueryConfig getDefaultTableConfig(Class<?> implementingClass, Configuration conf) throws IOException {
     String tableName = getInputTableName(implementingClass, conf);
-    if(tableName != null) {
+    if (tableName != null) {
       TableQueryConfig queryConfig = new TableQueryConfig(getInputTableName(implementingClass, conf));
       List<IteratorSetting> itrs = getIterators(implementingClass, conf);
-      if(itrs != null)
+      if (itrs != null)
         queryConfig.setIterators(itrs);
       Set<Pair<Text,Text>> columns = getFetchedColumns(implementingClass, conf);
-      if(columns != null)
+      if (columns != null)
         queryConfig.setColumns(columns);
       List<Range> ranges = getRanges(implementingClass, conf);
-      if(ranges != null)
+      if (ranges != null)
         queryConfig.setRanges(ranges);
-
-      queryConfig.setAutoAdjustRanges(getAutoAdjustRanges(implementingClass, conf))
-              .setUseIsolatedScanners(isIsolated(implementingClass, conf))
-              .setUseLocalIterators(usesLocalIterators(implementingClass, conf))
-              .setOfflineScan(isOfflineScan(implementingClass, conf));
+    
+      queryConfig.setAutoAdjustRanges(getAutoAdjustRanges(implementingClass, conf)).setUseIsolatedScanners( isIsolated
+              ( implementingClass,conf ) )
+          .setUseLocalIterators( usesLocalIterators( implementingClass,conf ) ).setOfflineScan( isOfflineScan( implementingClass,conf ) );
       return queryConfig;
     }
     return null;

http://git-wip-us.apache.org/repos/asf/accumulo/blob/e4e05c80/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java b/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
index 3a00518..5b9949a 100644
--- a/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
+++ b/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
@@ -1690,7 +1690,7 @@ import org.slf4j.LoggerFactory;
             return TINFO;
           case 1: // CREDENTIALS
             return CREDENTIALS;
-          case 2: // TABLE
+          case 2: // TABLE_NAME
             return TABLE_NAME;
           default:
             return null;
@@ -2114,7 +2114,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 2: // TABLE
+            case 2: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);
@@ -2824,7 +2824,7 @@ import org.slf4j.LoggerFactory;
             return TINFO;
           case 1: // CREDENTIALS
             return CREDENTIALS;
-          case 2: // TABLE
+          case 2: // TABLE_NAME
             return TABLE_NAME;
           case 6: // START_ROW
             return START_ROW;
@@ -3562,7 +3562,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 2: // TABLE
+            case 2: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);
@@ -4254,7 +4254,7 @@ import org.slf4j.LoggerFactory;
             return TINFO;
           case 1: // CREDENTIALS
             return CREDENTIALS;
-          case 2: // TABLE
+          case 2: // TABLE_NAME
             return TABLE_NAME;
           case 3: // PROPERTY
             return PROPERTY;
@@ -4826,7 +4826,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 2: // TABLE
+            case 2: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);
@@ -5473,7 +5473,7 @@ import org.slf4j.LoggerFactory;
             return TINFO;
           case 1: // CREDENTIALS
             return CREDENTIALS;
-          case 2: // TABLE
+          case 2: // TABLE_NAME
             return TABLE_NAME;
           case 3: // PROPERTY
             return PROPERTY;
@@ -5971,7 +5971,7 @@ import org.slf4j.LoggerFactory;
                 org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
               }
               break;
-            case 2: // TABLE
+            case 2: // TABLE_NAME
               if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
                 struct.tableName = iprot.readString();
                 struct.setTableNameIsSet(true);


Mime
View raw message