accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mwa...@apache.org
Subject [accumulo] branch master updated: #408 - Remove deprecated ClientConfiguration from MapReduce API (#489)
Date Tue, 22 May 2018 15:22:58 GMT
This is an automated email from the ASF dual-hosted git repository.

mwalch pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo.git


The following commit(s) were added to refs/heads/master by this push:
     new 1cbd22d   #408 - Remove deprecated ClientConfiguration from MapReduce API (#489)
1cbd22d is described below

commit 1cbd22d1354f3127033c83a79302b6e2eb59f7ec
Author: Mike Walch <mwalch@apache.org>
AuthorDate: Tue May 22 11:22:52 2018 -0400

     #408 - Remove deprecated ClientConfiguration from MapReduce API (#489)
    
    * Removed unnecessary fields in RangeInputSplit
    * Updated init method in AbstractInputFormat
---
 .../core/client/mapred/AbstractInputFormat.java    |  84 +++++------
 .../core/client/mapred/AccumuloOutputFormat.java   |  12 +-
 .../core/client/mapreduce/AbstractInputFormat.java |  92 +++++-------
 .../client/mapreduce/AccumuloOutputFormat.java     |  11 +-
 .../core/client/mapreduce/RangeInputSplit.java     | 155 +-------------------
 .../core/client/mapreduce/impl/SplitUtils.java     |  16 +--
 .../mapreduce/lib/impl/ConfiguratorBase.java       | 160 ++++++++++++++++-----
 .../mapreduce/lib/impl/InputConfigurator.java      |  15 +-
 .../lib/impl/MapReduceClientOnDefaultTable.java    |   9 +-
 .../core/client/mapred/RangeInputSplitTest.java    |  12 --
 .../core/client/mapreduce/RangeInputSplitTest.java |  12 --
 .../client/mapreduce/impl/BatchInputSplitTest.java |  13 --
 .../mapreduce/lib/impl/ConfiguratorBaseTest.java   |  65 +++++----
 .../core/client/impl/ConnectionInfoImpl.java       |   2 +-
 .../minicluster/impl/MiniAccumuloClusterImpl.java  |   3 +-
 .../test/mapred/AccumuloInputFormatIT.java         |   4 -
 .../apache/accumulo/test/mapred/TokenFileIT.java   |   3 +-
 .../test/mapreduce/AccumuloInputFormatIT.java      |  48 -------
 .../accumulo/test/mapreduce/TokenFileIT.java       |   6 +-
 19 files changed, 257 insertions(+), 465 deletions(-)

diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
index 49bab9a..c38566b 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
@@ -27,13 +27,13 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchScanner;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ClientSideIteratorScanner;
 import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.Connector;
@@ -49,7 +49,6 @@ import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
 import org.apache.accumulo.core.client.admin.SecurityOperations;
 import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
 import org.apache.accumulo.core.client.impl.ClientContext;
-import org.apache.accumulo.core.client.impl.ConnectionInfoFactory;
 import org.apache.accumulo.core.client.impl.Credentials;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.impl.OfflineScanner;
@@ -126,10 +125,21 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
    *          Connection information for Accumulo
    * @since 2.0.0
    */
-  public static void setConnectionInfo(JobConf job, ConnectionInfo info)
-      throws AccumuloSecurityException {
-    setConnectorInfo(job, info.getPrincipal(), info.getAuthenticationToken());
-    setZooKeeperInstance(job, ConnectionInfoFactory.getClientConfiguration(info));
+  public static void setConnectionInfo(JobConf job, ConnectionInfo info) {
+    ConnectionInfo inputInfo = InputConfigurator.updateToken(job.getCredentials(), info);
+    InputConfigurator.setConnectionInfo(CLASS, job, inputInfo);
+  }
+
+  /**
+   * Retrieves {@link ConnectionInfo} from the configuration
+   *
+   * @param job
+   *          Hadoop job instance configuration
+   * @return {@link ConnectionInfo} object
+   * @since 2.0.0
+   */
+  protected static ConnectionInfo getConnectionInfo(JobConf job) {
+    return InputConfigurator.getConnectionInfo(CLASS, job);
   }
 
   /**
@@ -259,7 +269,8 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
    * @deprecated since 2.0.0; Use {@link #setConnectionInfo(JobConf, ConnectionInfo)} instead.
    */
   @Deprecated
-  public static void setZooKeeperInstance(JobConf job, ClientConfiguration clientConfig) {
+  public static void setZooKeeperInstance(JobConf job,
+      org.apache.accumulo.core.client.ClientConfiguration clientConfig) {
     InputConfigurator.setZooKeeperInstance(CLASS, job, clientConfig);
   }
 
@@ -336,8 +347,11 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
    *          The job
    * @return The client configuration for the job
    * @since 1.7.0
+   * @deprecated since 2.0.0, replaced by {{@link #getConnectionInfo(JobConf)}}
    */
-  protected static ClientConfiguration getClientConfiguration(JobConf job) {
+  @Deprecated
+  protected static org.apache.accumulo.core.client.ClientConfiguration getClientConfiguration(
+      JobConf job) {
     return InputConfigurator.getClientConfiguration(CLASS, job);
   }
 
@@ -353,17 +367,7 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
    * @since 1.5.0
    */
   protected static void validateOptions(JobConf job) throws IOException {
-    final Instance inst = InputConfigurator.validateInstance(CLASS, job);
-    String principal = InputConfigurator.getPrincipal(CLASS, job);
-    AuthenticationToken token = InputConfigurator.getAuthenticationToken(CLASS, job);
-    // In secure mode, we need to convert the DelegationTokenStub into a real DelegationToken
-    token = ConfiguratorBase.unwrapAuthenticationToken(job, token);
-    Connector conn;
-    try {
-      conn = inst.getConnector(principal, token);
-    } catch (Exception e) {
-      throw new IOException(e);
-    }
+    Connector conn = InputConfigurator.getConnector(CLASS, job);
     InputConfigurator.validatePermissions(CLASS, job, conn);
   }
 
@@ -463,25 +467,11 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
       baseSplit = (org.apache.accumulo.core.client.mapreduce.RangeInputSplit) inSplit;
       log.debug("Initializing input split: " + baseSplit);
 
-      Instance instance = baseSplit.getInstance(getClientConfiguration(job));
-      if (null == instance) {
-        instance = getInstance(job);
-      }
-
-      String principal = baseSplit.getPrincipal();
-      if (null == principal) {
-        principal = getPrincipal(job);
-      }
-
-      AuthenticationToken token = baseSplit.getToken();
-      if (null == token) {
-        token = getAuthenticationToken(job);
-      }
+      Instance instance = getInstance(job);
 
-      Authorizations authorizations = baseSplit.getAuths();
-      if (null == authorizations) {
-        authorizations = getScanAuthorizations(job);
-      }
+      String principal = getPrincipal(job);
+      AuthenticationToken token = getAuthenticationToken(job);
+      Authorizations authorizations = getScanAuthorizations(job);
       String classLoaderContext = getClassLoaderContext(job);
       String table = baseSplit.getTableName();
 
@@ -538,9 +528,9 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
             scanner = new OfflineScanner(instance, new Credentials(principal, token),
                 Table.ID.of(baseSplit.getTableId()), authorizations);
           } else {
-            ClientConfiguration clientConf = getClientConfiguration(job);
+            Properties props = getConnectionInfo(job).getProperties();
             ClientContext context = new ClientContext(instance, new Credentials(principal, token),
-                clientConf);
+                props);
             scanner = new ScannerImpl(context, Table.ID.of(baseSplit.getTableId()), authorizations);
           }
           if (isIsolated) {
@@ -655,10 +645,6 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
         throw new IOException(e);
       }
 
-      Authorizations auths = getScanAuthorizations(job);
-      String principal = getPrincipal(job);
-      AuthenticationToken token = getAuthenticationToken(job);
-
       boolean batchScan = InputConfigurator.isBatchScan(CLASS, job);
       boolean supportBatchScan = !(tableConfig.isOfflineScan()
           || tableConfig.shouldUseIsolatedScanners() || tableConfig.shouldUseLocalIterators());
@@ -696,9 +682,7 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
           // tablets... so clear it
           tl.invalidateCache();
 
-          ClientContext context = new ClientContext(getInstance(job),
-              new Credentials(getPrincipal(job), getAuthenticationToken(job)),
-              getClientConfiguration(job));
+          ClientContext context = new ClientContext(getConnectionInfo(job));
           while (!tl.binRanges(context, ranges, binnedRanges).isEmpty()) {
             String tableIdStr = tableId.canonicalID();
             if (!Tables.exists(instance, tableId))
@@ -740,7 +724,7 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
 
             BatchInputSplit split = new BatchInputSplit(tableName, tableId, clippedRanges,
                 new String[] {location});
-            SplitUtils.updateSplit(split, instance, tableConfig, principal, token, auths, logLevel);
+            SplitUtils.updateSplit(split, tableConfig, logLevel);
 
             splits.add(split);
           } else {
@@ -750,8 +734,7 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
                 // divide ranges into smaller ranges, based on the tablets
                 RangeInputSplit split = new RangeInputSplit(tableName, tableId.canonicalID(),
                     ke.clip(r), new String[] {location});
-                SplitUtils.updateSplit(split, instance, tableConfig, principal, token, auths,
-                    logLevel);
+                SplitUtils.updateSplit(split, tableConfig, logLevel);
                 split.setOffline(tableConfig.isOfflineScan());
                 split.setIsolatedScan(tableConfig.shouldUseIsolatedScanners());
                 split.setUsesLocalIterators(tableConfig.shouldUseLocalIterators());
@@ -774,7 +757,7 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
         for (Map.Entry<Range,ArrayList<String>> entry : splitsToAdd.entrySet()) {
           RangeInputSplit split = new RangeInputSplit(tableName, tableId.canonicalID(),
               entry.getKey(), entry.getValue().toArray(new String[0]));
-          SplitUtils.updateSplit(split, instance, tableConfig, principal, token, auths, logLevel);
+          SplitUtils.updateSplit(split, tableConfig, logLevel);
           split.setOffline(tableConfig.isOfflineScan());
           split.setIsolatedScan(tableConfig.shouldUseIsolatedScanners());
           split.setUsesLocalIterators(tableConfig.shouldUseLocalIterators());
@@ -785,5 +768,4 @@ public abstract class AbstractInputFormat<K,V> implements InputFormat<K,V> {
 
     return splits.toArray(new InputSplit[splits.size()]);
   }
-
 }
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
index 9fdacc9..fad8a4f 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
@@ -26,7 +26,6 @@ import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
@@ -38,7 +37,6 @@ import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
 import org.apache.accumulo.core.client.admin.SecurityOperations;
 import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
-import org.apache.accumulo.core.client.impl.ConnectionInfoFactory;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator;
@@ -89,10 +87,9 @@ public class AccumuloOutputFormat implements OutputFormat<Text,Mutation> {
    *          Accumulo connection information
    * @since 2.0.0
    */
-  public static void setConnectionInfo(JobConf job, ConnectionInfo info)
-      throws AccumuloSecurityException {
-    setConnectorInfo(job, info.getPrincipal(), info.getAuthenticationToken());
-    setZooKeeperInstance(job, ConnectionInfoFactory.getClientConfiguration(info));
+  public static void setConnectionInfo(JobConf job, ConnectionInfo info) {
+    ConnectionInfo outInfo = OutputConfigurator.updateToken(job.getCredentials(), info);
+    OutputConfigurator.setConnectionInfo(CLASS, job, outInfo);
   }
 
   /**
@@ -225,7 +222,8 @@ public class AccumuloOutputFormat implements OutputFormat<Text,Mutation> {
    * @deprecated since 2.0.0; Use {@link #setConnectionInfo(JobConf, ConnectionInfo)} instead.
    */
   @Deprecated
-  public static void setZooKeeperInstance(JobConf job, ClientConfiguration clientConfig) {
+  public static void setZooKeeperInstance(JobConf job,
+      org.apache.accumulo.core.client.ClientConfiguration clientConfig) {
     OutputConfigurator.setZooKeeperInstance(CLASS, job, clientConfig);
   }
 
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
index 6b9679a..d0deaec 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
@@ -27,13 +27,13 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchScanner;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ClientSideIteratorScanner;
 import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.Connector;
@@ -49,7 +49,6 @@ import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
 import org.apache.accumulo.core.client.admin.SecurityOperations;
 import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
 import org.apache.accumulo.core.client.impl.ClientContext;
-import org.apache.accumulo.core.client.impl.ConnectionInfoFactory;
 import org.apache.accumulo.core.client.impl.Credentials;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.impl.OfflineScanner;
@@ -73,7 +72,6 @@ import org.apache.accumulo.core.data.impl.KeyExtent;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.Pair;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.InputSplit;
@@ -129,10 +127,21 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
    *          Connection information for Accumulo
    * @since 2.0.0
    */
-  public static void setConnectionInfo(Job job, ConnectionInfo info)
-      throws AccumuloSecurityException {
-    setConnectorInfo(job, info.getPrincipal(), info.getAuthenticationToken());
-    setZooKeeperInstance(job, ConnectionInfoFactory.getClientConfiguration(info));
+  public static void setConnectionInfo(Job job, ConnectionInfo info) {
+    ConnectionInfo inputInfo = InputConfigurator.updateToken(job.getCredentials(), info);
+    InputConfigurator.setConnectionInfo(CLASS, job.getConfiguration(), inputInfo);
+  }
+
+  /**
+   * Gets the {@link ConnectionInfo} from the configuration
+   *
+   * @param context
+   *          Hadoop job context
+   * @return ConnectionInfo
+   * @since 2.0.0
+   */
+  protected static ConnectionInfo getConnectionInfo(JobContext context) {
+    return InputConfigurator.getConnectionInfo(CLASS, context.getConfiguration());
   }
 
   /**
@@ -262,7 +271,8 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
    * @deprecated since 2.0.0; Use {@link #setConnectionInfo(Job, ConnectionInfo)} instead.
    */
   @Deprecated
-  public static void setZooKeeperInstance(Job job, ClientConfiguration clientConfig) {
+  public static void setZooKeeperInstance(Job job,
+      org.apache.accumulo.core.client.ClientConfiguration clientConfig) {
     InputConfigurator.setZooKeeperInstance(CLASS, job.getConfiguration(), clientConfig);
   }
 
@@ -372,30 +382,21 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
    * @since 1.5.0
    */
   protected static void validateOptions(JobContext context) throws IOException {
-    final Configuration conf = context.getConfiguration();
-    final Instance inst = InputConfigurator.validateInstance(CLASS, conf);
-    String principal = InputConfigurator.getPrincipal(CLASS, conf);
-    AuthenticationToken token = InputConfigurator.getAuthenticationToken(CLASS, conf);
-    // In secure mode, we need to convert the DelegationTokenStub into a real DelegationToken
-    token = ConfiguratorBase.unwrapAuthenticationToken(context, token);
-    Connector conn;
-    try {
-      conn = inst.getConnector(principal, token);
-    } catch (Exception e) {
-      throw new IOException(e);
-    }
-    InputConfigurator.validatePermissions(CLASS, conf, conn);
+    Connector conn = InputConfigurator.getConnector(CLASS, context.getConfiguration());
+    InputConfigurator.validatePermissions(CLASS, context.getConfiguration(), conn);
   }
 
   /**
-   * Construct the {@link ClientConfiguration} given the provided context.
+   * Construct the ClientConfiguration given the provided context.
    *
    * @param context
    *          The Job
    * @return The ClientConfiguration
    * @since 1.7.0
    */
-  protected static ClientConfiguration getClientConfiguration(JobContext context) {
+  @Deprecated
+  protected static org.apache.accumulo.core.client.ClientConfiguration getClientConfiguration(
+      JobContext context) {
     return InputConfigurator.getClientConfiguration(CLASS, context.getConfiguration());
   }
 
@@ -470,25 +471,10 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
       split = (RangeInputSplit) inSplit;
       log.debug("Initializing input split: " + split);
 
-      Instance instance = split.getInstance(getClientConfiguration(attempt));
-      if (null == instance) {
-        instance = getInstance(attempt);
-      }
-
-      String principal = split.getPrincipal();
-      if (null == principal) {
-        principal = getPrincipal(attempt);
-      }
-
-      AuthenticationToken token = split.getToken();
-      if (null == token) {
-        token = getAuthenticationToken(attempt);
-      }
-
-      Authorizations authorizations = split.getAuths();
-      if (null == authorizations) {
-        authorizations = getScanAuthorizations(attempt);
-      }
+      Instance instance = getInstance(attempt);
+      String principal = getPrincipal(attempt);
+      AuthenticationToken token = getAuthenticationToken(attempt);
+      Authorizations authorizations = getScanAuthorizations(attempt);
       String classLoaderContext = getClassLoaderContext(attempt);
       String table = split.getTableName();
 
@@ -545,9 +531,11 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
             scanner = new OfflineScanner(instance, new Credentials(principal, token),
                 Table.ID.of(split.getTableId()), authorizations);
           } else {
-            ClientConfiguration clientConf = getClientConfiguration(attempt);
+            Properties props = getConnectionInfo(attempt).getProperties();
             ClientContext context = new ClientContext(instance, new Credentials(principal, token),
-                clientConf);
+                props);
+            // Not using public API to create scanner so that we can use table ID
+            // Table ID is used in case of renames during M/R job
             scanner = new ScannerImpl(context, Table.ID.of(split.getTableId()), authorizations);
           }
           if (isIsolated) {
@@ -680,10 +668,6 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
         throw new IOException(e);
       }
 
-      Authorizations auths = getScanAuthorizations(context);
-      String principal = getPrincipal(context);
-      AuthenticationToken token = getAuthenticationToken(context);
-
       boolean batchScan = InputConfigurator.isBatchScan(CLASS, context.getConfiguration());
       boolean supportBatchScan = !(tableConfig.isOfflineScan()
           || tableConfig.shouldUseIsolatedScanners() || tableConfig.shouldUseLocalIterators());
@@ -722,9 +706,7 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
           // tablets... so clear it
           tl.invalidateCache();
 
-          ClientContext clientContext = new ClientContext(getInstance(context),
-              new Credentials(getPrincipal(context), getAuthenticationToken(context)),
-              getClientConfiguration(context));
+          ClientContext clientContext = new ClientContext(getConnectionInfo(context));
           while (!tl.binRanges(clientContext, ranges, binnedRanges).isEmpty()) {
             String tableIdStr = tableId.canonicalID();
             if (!Tables.exists(instance, tableId))
@@ -768,7 +750,7 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
               clippedRanges.add(ke.clip(r));
             BatchInputSplit split = new BatchInputSplit(tableName, tableId, clippedRanges,
                 new String[] {location});
-            SplitUtils.updateSplit(split, instance, tableConfig, principal, token, auths, logLevel);
+            SplitUtils.updateSplit(split, tableConfig, logLevel);
 
             splits.add(split);
           } else {
@@ -778,12 +760,10 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
                 // divide ranges into smaller ranges, based on the tablets
                 RangeInputSplit split = new RangeInputSplit(tableName, tableId.canonicalID(),
                     ke.clip(r), new String[] {location});
-                SplitUtils.updateSplit(split, instance, tableConfig, principal, token, auths,
-                    logLevel);
+                SplitUtils.updateSplit(split, tableConfig, logLevel);
                 split.setOffline(tableConfig.isOfflineScan());
                 split.setIsolatedScan(tableConfig.shouldUseIsolatedScanners());
                 split.setUsesLocalIterators(tableConfig.shouldUseLocalIterators());
-
                 splits.add(split);
               } else {
                 // don't divide ranges
@@ -802,7 +782,7 @@ public abstract class AbstractInputFormat<K,V> extends InputFormat<K,V> {
         for (Map.Entry<Range,ArrayList<String>> entry : splitsToAdd.entrySet()) {
           RangeInputSplit split = new RangeInputSplit(tableName, tableId.canonicalID(),
               entry.getKey(), entry.getValue().toArray(new String[0]));
-          SplitUtils.updateSplit(split, instance, tableConfig, principal, token, auths, logLevel);
+          SplitUtils.updateSplit(split, tableConfig, logLevel);
           split.setOffline(tableConfig.isOfflineScan());
           split.setIsolatedScan(tableConfig.shouldUseIsolatedScanners());
           split.setUsesLocalIterators(tableConfig.shouldUseLocalIterators());
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
index 90a0a62..9cf78b0 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
@@ -26,7 +26,6 @@ import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
@@ -38,7 +37,6 @@ import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
 import org.apache.accumulo.core.client.admin.SecurityOperations;
 import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
-import org.apache.accumulo.core.client.impl.ConnectionInfoFactory;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator;
@@ -90,10 +88,8 @@ public class AccumuloOutputFormat extends OutputFormat<Text,Mutation> {
    *          Accumulo connection information
    * @since 2.0.0
    */
-  public static void setConnectionInfo(Job job, ConnectionInfo info)
-      throws AccumuloSecurityException {
-    setConnectorInfo(job, info.getPrincipal(), info.getAuthenticationToken());
-    setZooKeeperInstance(job, ConnectionInfoFactory.getClientConfiguration(info));
+  public static void setConnectionInfo(Job job, ConnectionInfo info) {
+    OutputConfigurator.setConnectionInfo(CLASS, job.getConfiguration(), info);
   }
 
   /**
@@ -227,7 +223,8 @@ public class AccumuloOutputFormat extends OutputFormat<Text,Mutation> {
    * @deprecated since 2.0.0; Use {@link #setConnectionInfo(Job, ConnectionInfo)} instead.
    */
   @Deprecated
-  public static void setZooKeeperInstance(Job job, ClientConfiguration clientConfig) {
+  public static void setZooKeeperInstance(Job job,
+      org.apache.accumulo.core.client.ClientConfiguration clientConfig) {
     OutputConfigurator.setZooKeeperInstance(CLASS, job.getConfiguration(), clientConfig);
   }
 
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
index 26af244..3f289c3 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplit.java
@@ -16,35 +16,25 @@
  */
 package org.apache.accumulo.core.client.mapreduce;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
-
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Base64;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.mapreduce.impl.SplitUtils;
-import org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.TokenSource;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
 import org.apache.accumulo.core.data.ByteSequence;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.sample.impl.SamplerConfigurationImpl;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -57,12 +47,8 @@ import org.apache.log4j.Level;
 public class RangeInputSplit extends InputSplit implements Writable {
   private Range range;
   private String[] locations;
-  private String tableId, tableName, instanceName, zooKeepers, principal;
-  private TokenSource tokenSource;
-  private String tokenFile;
-  private AuthenticationToken token;
+  private String tableId, tableName;
   private Boolean offline, isolatedScan, localIterators;
-  private Authorizations auths;
   private Set<Pair<Text,Text>> fetchedColumns;
   private List<IteratorSetting> iterators;
   private SamplerConfiguration samplerConfig;
@@ -175,44 +161,6 @@ public class RangeInputSplit extends InputSplit implements Writable {
     }
 
     if (in.readBoolean()) {
-      String strAuths = in.readUTF();
-      auths = new Authorizations(strAuths.getBytes(UTF_8));
-    }
-
-    if (in.readBoolean()) {
-      principal = in.readUTF();
-    }
-
-    if (in.readBoolean()) {
-      int ordinal = in.readInt();
-      this.tokenSource = TokenSource.values()[ordinal];
-
-      switch (this.tokenSource) {
-        case INLINE:
-          String tokenClass = in.readUTF();
-          byte[] tokenBytes = Base64.getDecoder().decode(in.readUTF());
-
-          this.token = AuthenticationTokenSerializer.deserialize(tokenClass, tokenBytes);
-          break;
-
-        case FILE:
-          this.tokenFile = in.readUTF();
-
-          break;
-        default:
-          throw new IOException("Cannot parse unknown TokenSource ordinal");
-      }
-    }
-
-    if (in.readBoolean()) {
-      instanceName = in.readUTF();
-    }
-
-    if (in.readBoolean()) {
-      zooKeepers = in.readUTF();
-    }
-
-    if (in.readBoolean()) {
       int numIterators = in.readInt();
       iterators = new ArrayList<>(numIterators);
       for (int i = 0; i < numIterators; i++) {
@@ -265,42 +213,6 @@ public class RangeInputSplit extends InputSplit implements Writable {
       }
     }
 
-    out.writeBoolean(null != auths);
-    if (null != auths) {
-      out.writeUTF(auths.serialize());
-    }
-
-    out.writeBoolean(null != principal);
-    if (null != principal) {
-      out.writeUTF(principal);
-    }
-
-    out.writeBoolean(null != tokenSource);
-    if (null != tokenSource) {
-      out.writeInt(tokenSource.ordinal());
-
-      if (null != token && null != tokenFile) {
-        throw new IOException(
-            "Cannot use both inline AuthenticationToken and file-based AuthenticationToken");
-      } else if (null != token) {
-        out.writeUTF(token.getClass().getName());
-        out.writeUTF(
-            Base64.getEncoder().encodeToString(AuthenticationTokenSerializer.serialize(token)));
-      } else {
-        out.writeUTF(tokenFile);
-      }
-    }
-
-    out.writeBoolean(null != instanceName);
-    if (null != instanceName) {
-      out.writeUTF(instanceName);
-    }
-
-    out.writeBoolean(null != zooKeepers);
-    if (null != zooKeepers) {
-      out.writeUTF(zooKeepers);
-    }
-
     out.writeBoolean(null != iterators);
     if (null != iterators) {
       out.writeInt(iterators.size());
@@ -336,56 +248,6 @@ public class RangeInputSplit extends InputSplit implements Writable {
     return tableId;
   }
 
-  public Instance getInstance(ClientConfiguration base) {
-    if (null == instanceName) {
-      return null;
-    }
-
-    if (null == zooKeepers) {
-      return null;
-    }
-
-    return new ZooKeeperInstance(base.withInstance(getInstanceName()).withZkHosts(getZooKeepers()));
-  }
-
-  public String getInstanceName() {
-    return instanceName;
-  }
-
-  public void setInstanceName(String instanceName) {
-    this.instanceName = instanceName;
-  }
-
-  public String getZooKeepers() {
-    return zooKeepers;
-  }
-
-  public void setZooKeepers(String zooKeepers) {
-    this.zooKeepers = zooKeepers;
-  }
-
-  public String getPrincipal() {
-    return principal;
-  }
-
-  public void setPrincipal(String principal) {
-    this.principal = principal;
-  }
-
-  public AuthenticationToken getToken() {
-    return token;
-  }
-
-  public void setToken(AuthenticationToken token) {
-    this.tokenSource = TokenSource.INLINE;
-    this.token = token;
-  }
-
-  public void setToken(String tokenFile) {
-    this.tokenSource = TokenSource.FILE;
-    this.tokenFile = tokenFile;
-  }
-
   public Boolean isOffline() {
     return offline;
   }
@@ -406,14 +268,6 @@ public class RangeInputSplit extends InputSplit implements Writable {
     this.isolatedScan = isolatedScan;
   }
 
-  public Authorizations getAuths() {
-    return auths;
-  }
-
-  public void setAuths(Authorizations auths) {
-    this.auths = auths;
-  }
-
   public void setRange(Range range) {
     this.range = range;
   }
@@ -464,13 +318,6 @@ public class RangeInputSplit extends InputSplit implements Writable {
     sb.append(" Locations: ").append(Arrays.asList(locations));
     sb.append(" Table: ").append(tableName);
     sb.append(" TableID: ").append(tableId);
-    sb.append(" InstanceName: ").append(instanceName);
-    sb.append(" zooKeepers: ").append(zooKeepers);
-    sb.append(" principal: ").append(principal);
-    sb.append(" tokenSource: ").append(tokenSource);
-    sb.append(" authenticationToken: ").append(token);
-    sb.append(" authenticationTokenFile: ").append(tokenFile);
-    sb.append(" Authorizations: ").append(auths);
     sb.append(" offlineScan: ").append(offline);
     sb.append(" isolatedScan: ").append(isolatedScan);
     sb.append(" localIterators: ").append(localIterators);
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
index 53fea91..d49e3fe 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/impl/SplitUtils.java
@@ -20,13 +20,10 @@ package org.apache.accumulo.core.client.mapreduce.impl;
 import java.io.IOException;
 import java.math.BigInteger;
 
-import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.mapreduce.InputTableConfig;
 import org.apache.accumulo.core.client.mapreduce.RangeInputSplit;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.data.ByteSequence;
 import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
 
@@ -36,20 +33,11 @@ public class SplitUtils {
    * Central place to set common split configuration not handled by split constructors. The
    * intention is to make it harder to miss optional setters in future refactor.
    */
-  public static void updateSplit(RangeInputSplit split, Instance instance,
-      InputTableConfig tableConfig, String principal, AuthenticationToken token,
-      Authorizations auths, Level logLevel) {
-    split.setInstanceName(instance.getInstanceName());
-    split.setZooKeepers(instance.getZooKeepers());
-
-    split.setPrincipal(principal);
-    split.setToken(token);
-    split.setAuths(auths);
-
+  public static void updateSplit(RangeInputSplit split, InputTableConfig tableConfig,
+      Level logLevel) {
     split.setFetchedColumns(tableConfig.getFetchedColumns());
     split.setIterators(tableConfig.getIterators());
     split.setLogLevel(logLevel);
-
     split.setSamplerConfiguration(tableConfig.getSamplerConfiguration());
   }
 
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
index 441189e..bb1d32c 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
@@ -22,21 +22,31 @@ import static java.util.Objects.requireNonNull;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.Base64;
+import java.util.Properties;
 
 import org.apache.accumulo.core.Constants;
+import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.ConnectionInfo;
+import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
 import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
+import org.apache.accumulo.core.client.impl.ConnectionInfoImpl;
 import org.apache.accumulo.core.client.impl.Credentials;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.mapreduce.impl.DelegationTokenStub;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
+import org.apache.accumulo.core.client.security.tokens.KerberosToken;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -55,21 +65,27 @@ import org.apache.log4j.Logger;
  */
 public class ConfiguratorBase {
 
+  protected static final Logger log = Logger.getLogger(ConfiguratorBase.class);
+
   /**
    * Configuration keys for {@link Instance#getConnector(String, AuthenticationToken)}.
    *
    * @since 1.6.0
    */
-  public static enum ConnectorInfo {
+  public enum ConnectorInfo {
     IS_CONFIGURED, PRINCIPAL, TOKEN,
   }
 
-  public static enum TokenSource {
+  public enum ConnectionInfoOpts {
+    CLIENT_PROPS
+  }
+
+  public enum TokenSource {
     FILE, INLINE, JOB;
 
     private String prefix;
 
-    private TokenSource() {
+    TokenSource() {
       prefix = name().toLowerCase() + ":";
     }
 
@@ -83,7 +99,7 @@ public class ConfiguratorBase {
    *
    * @since 1.6.0
    */
-  public static enum InstanceOpts {
+  public enum InstanceOpts {
     TYPE, NAME, ZOO_KEEPERS, CLIENT_CONFIG;
   }
 
@@ -92,7 +108,7 @@ public class ConfiguratorBase {
    *
    * @since 1.6.0
    */
-  public static enum GeneralOpts {
+  public enum GeneralOpts {
     LOG_LEVEL, VISIBILITY_CACHE_SIZE
   }
 
@@ -123,6 +139,79 @@ public class ConfiguratorBase {
         + StringUtils.camelize(e.name().toLowerCase());
   }
 
+  public static ConnectionInfo updateToken(org.apache.hadoop.security.Credentials credentials,
+      ConnectionInfo info) {
+    ConnectionInfo result = info;
+    if (info.getAuthenticationToken() instanceof KerberosToken) {
+      log.info("Received KerberosToken, attempting to fetch DelegationToken");
+      try {
+        Connector conn = Connector.builder().usingConnectionInfo(info).build();
+        AuthenticationToken token = conn.securityOperations()
+            .getDelegationToken(new DelegationTokenConfig());
+        result = new ConnectionInfoImpl(info.getProperties(), token);
+      } catch (Exception e) {
+        log.warn("Failed to automatically obtain DelegationToken, "
+            + "Mappers/Reducers will likely fail to communicate with Accumulo", e);
+      }
+    }
+    // DelegationTokens can be passed securely from user to task without serializing insecurely in
+    // the configuration
+    if (info.getAuthenticationToken() instanceof DelegationTokenImpl) {
+      DelegationTokenImpl delegationToken = (DelegationTokenImpl) info.getAuthenticationToken();
+
+      // Convert it into a Hadoop Token
+      AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
+      Token<AuthenticationTokenIdentifier> hadoopToken = new Token<>(identifier.getBytes(),
+          delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());
+
+      // Add the Hadoop Token to the Job so it gets serialized and passed along.
+      credentials.addToken(hadoopToken.getService(), hadoopToken);
+    }
+    return result;
+  }
+
+  public static void setConnectionInfo(Class<?> implementingClass, Configuration conf,
+      ConnectionInfo info) {
+    conf.set(enumToConfKey(implementingClass, InstanceOpts.TYPE), "ConnectionInfo");
+    Properties props = info.getProperties();
+    StringWriter writer = new StringWriter();
+    try {
+      props.store(writer, "client properties");
+    } catch (IOException e) {
+      throw new IllegalStateException(e);
+    }
+    conf.set(enumToConfKey(implementingClass, ConnectionInfoOpts.CLIENT_PROPS), writer.toString());
+    setConnectorInfo(implementingClass, conf, info.getPrincipal(), info.getAuthenticationToken());
+  }
+
+  @SuppressWarnings("deprecation")
+  public static ConnectionInfo getConnectionInfo(Class<?> implementingClass, Configuration conf) {
+    AuthenticationToken token = getAuthenticationToken(implementingClass, conf);
+    String propString = conf.get(enumToConfKey(implementingClass, ConnectionInfoOpts.CLIENT_PROPS),
+        "");
+    String confString = conf.get(enumToConfKey(implementingClass, InstanceOpts.CLIENT_CONFIG), "");
+    if (!propString.isEmpty() && !confString.isEmpty()) {
+      throw new IllegalStateException("Client connection information was set using both "
+          + "setConnectionInfo & setZookeeperInstance");
+    }
+    Properties props = new Properties();
+    if (!propString.isEmpty()) {
+      try {
+        props.load(new StringReader(propString));
+      } catch (IOException e) {
+        throw new IllegalStateException(e);
+      }
+    } else {
+      props = ClientConfConverter.toProperties(
+          org.apache.accumulo.core.client.ClientConfiguration.deserialize(confString));
+      String principal = conf.get(enumToConfKey(implementingClass, ConnectorInfo.PRINCIPAL), "");
+      if (!principal.isEmpty()) {
+        props.setProperty(ClientProperty.AUTH_USERNAME.getKey(), principal);
+      }
+    }
+    return new ConnectionInfoImpl(props, token);
+  }
+
   /**
    * Sets the connector information needed to communicate with Accumulo in this job.
    *
@@ -142,7 +231,7 @@ public class ConfiguratorBase {
    * @since 1.6.0
    */
   public static void setConnectorInfo(Class<?> implementingClass, Configuration conf,
-      String principal, AuthenticationToken token) throws AccumuloSecurityException {
+      String principal, AuthenticationToken token) {
     if (isConnectorInfoSet(implementingClass, conf))
       throw new IllegalStateException("Connector info for " + implementingClass.getSimpleName()
           + " can only be set once per job");
@@ -327,8 +416,9 @@ public class ConfiguratorBase {
    *          client configuration for specifying connection timeouts, SSL connection options, etc.
    * @since 1.6.0
    */
+  @Deprecated
   public static void setZooKeeperInstance(Class<?> implementingClass, Configuration conf,
-      ClientConfiguration clientConfig) {
+      org.apache.accumulo.core.client.ClientConfiguration clientConfig) {
     String key = enumToConfKey(implementingClass, InstanceOpts.TYPE);
     if (!conf.get(key, "").isEmpty())
       throw new IllegalStateException(
@@ -352,46 +442,44 @@ public class ConfiguratorBase {
    * @since 1.6.0
    */
   public static Instance getInstance(Class<?> implementingClass, Configuration conf) {
-    String instanceType = conf.get(enumToConfKey(implementingClass, InstanceOpts.TYPE), "");
-    if ("ZooKeeperInstance".equals(instanceType)) {
-      return new ZooKeeperInstance(getClientConfiguration(implementingClass, conf));
-    } else if (instanceType.isEmpty()) {
-      throw new IllegalStateException(
-          "Instance has not been configured for " + implementingClass.getSimpleName());
-    } else {
-      throw new IllegalStateException("Unrecognized instance type " + instanceType);
+    return getConnector(implementingClass, conf).getInstance();
+  }
+
+  /**
+   * Creates an Accumulo {@link Connector} based on the configuration
+   *
+   * @param implementingClass
+   *          class whose name will be used as a prefix for the property configu
+   * @param conf
+   *          Hadoop configuration object
+   * @return Accumulo connector
+   * @since 2.0.0
+   */
+  public static Connector getConnector(Class<?> implementingClass, Configuration conf) {
+    try {
+      return Connector.builder().usingConnectionInfo(getConnectionInfo(implementingClass, conf))
+          .build();
+    } catch (AccumuloException | AccumuloSecurityException e) {
+      throw new IllegalStateException(e);
     }
   }
 
   /**
-   * Obtain a {@link ClientConfiguration} based on the configuration.
+   * Obtain a ClientConfiguration based on the configuration.
    *
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
    *          the Hadoop configuration object to configure
    *
-   * @return A {@link ClientConfiguration}
+   * @return A ClientConfiguration
    * @since 1.7.0
    */
-  public static ClientConfiguration getClientConfiguration(Class<?> implementingClass,
-      Configuration conf) {
-    String clientConfigString = conf
-        .get(enumToConfKey(implementingClass, InstanceOpts.CLIENT_CONFIG));
-    if (null != clientConfigString) {
-      return ClientConfiguration.deserialize(clientConfigString);
-    }
-
-    String instanceName = conf.get(enumToConfKey(implementingClass, InstanceOpts.NAME));
-    String zookeepers = conf.get(enumToConfKey(implementingClass, InstanceOpts.ZOO_KEEPERS));
-    ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-    if (null != instanceName) {
-      clientConf.withInstance(instanceName);
-    }
-    if (null != zookeepers) {
-      clientConf.withZkHosts(zookeepers);
-    }
-    return clientConf;
+  @SuppressWarnings("deprecation")
+  public static org.apache.accumulo.core.client.ClientConfiguration getClientConfiguration(
+      Class<?> implementingClass, Configuration conf) {
+    return ClientConfConverter
+        .toClientConf(getConnectionInfo(implementingClass, conf).getProperties());
   }
 
   /**
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
index b3da02a..93536d4 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/InputConfigurator.java
@@ -39,8 +39,8 @@ import java.util.StringTokenizer;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchScanner;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ClientSideIteratorScanner;
+import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.IsolatedScanner;
@@ -49,7 +49,6 @@ import org.apache.accumulo.core.client.RowIterator;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.impl.ClientContext;
-import org.apache.accumulo.core.client.impl.Credentials;
 import org.apache.accumulo.core.client.impl.Table;
 import org.apache.accumulo.core.client.impl.Tables;
 import org.apache.accumulo.core.client.impl.TabletLocator;
@@ -718,18 +717,12 @@ public class InputConfigurator extends ConfiguratorBase {
    * @param tableId
    *          The table id for which to initialize the {@link TabletLocator}
    * @return an Accumulo tablet locator
-   * @throws TableNotFoundException
-   *           if the table name set on the configuration doesn't exist
    * @since 1.6.0
    */
   public static TabletLocator getTabletLocator(Class<?> implementingClass, Configuration conf,
-      Table.ID tableId) throws TableNotFoundException {
-    Instance instance = getInstance(implementingClass, conf);
-    ClientConfiguration clientConf = getClientConfiguration(implementingClass, conf);
-    ClientContext context = new ClientContext(instance,
-        new Credentials(getPrincipal(implementingClass, conf),
-            getAuthenticationToken(implementingClass, conf)),
-        clientConf);
+      Table.ID tableId) {
+    ConnectionInfo info = getConnectionInfo(implementingClass, conf);
+    ClientContext context = new ClientContext(info);
     return TabletLocator.getLocator(context, tableId);
   }
 
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java
index 1eb1c30..78fff68 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/MapReduceClientOnDefaultTable.java
@@ -17,9 +17,9 @@
 package org.apache.accumulo.core.client.mapreduce.lib.impl;
 
 import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
 import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.hadoop.mapreduce.Job;
 
 import com.beust.jcommander.Parameter;
@@ -40,12 +40,11 @@ public class MapReduceClientOnDefaultTable extends MapReduceClientOpts {
   public void setAccumuloConfigs(Job job) throws AccumuloSecurityException {
     super.setAccumuloConfigs(job);
     final String tableName = getTableName();
-    final String principal = getPrincipal();
-    final AuthenticationToken token = getToken();
-    AccumuloInputFormat.setConnectorInfo(job, principal, token);
+    final ConnectionInfo info = getConnectionInfo();
+    AccumuloInputFormat.setConnectionInfo(job, info);
     AccumuloInputFormat.setInputTableName(job, tableName);
     AccumuloInputFormat.setScanAuthorizations(job, auths);
-    AccumuloOutputFormat.setConnectorInfo(job, principal, token);
+    AccumuloOutputFormat.setConnectionInfo(job, info);
     AccumuloOutputFormat.setCreateTables(job, true);
     AccumuloOutputFormat.setDefaultTableName(job, tableName);
   }
diff --git a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
index 9435277..24eeeaa 100644
--- a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
+++ b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapred/RangeInputSplitTest.java
@@ -27,12 +27,10 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.iterators.user.SummingCombiner;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
@@ -80,15 +78,10 @@ public class RangeInputSplitTest {
     setting.addOption("bar", "foo");
     iterators.add(setting);
 
-    split.setAuths(new Authorizations("foo"));
     split.setOffline(true);
     split.setIsolatedScan(true);
     split.setUsesLocalIterators(true);
     split.setFetchedColumns(fetchedColumns);
-    split.setToken(new PasswordToken("password"));
-    split.setPrincipal("root");
-    split.setInstanceName("instance");
-    split.setZooKeepers("localhost");
     split.setIterators(iterators);
     split.setLogLevel(Level.WARN);
 
@@ -105,15 +98,10 @@ public class RangeInputSplitTest {
     Assert.assertEquals(split.getRange(), newSplit.getRange());
     Assert.assertArrayEquals(split.getLocations(), newSplit.getLocations());
 
-    Assert.assertEquals(split.getAuths(), newSplit.getAuths());
     Assert.assertEquals(split.isOffline(), newSplit.isOffline());
     Assert.assertEquals(split.isIsolatedScan(), newSplit.isOffline());
     Assert.assertEquals(split.usesLocalIterators(), newSplit.usesLocalIterators());
     Assert.assertEquals(split.getFetchedColumns(), newSplit.getFetchedColumns());
-    Assert.assertEquals(split.getToken(), newSplit.getToken());
-    Assert.assertEquals(split.getPrincipal(), newSplit.getPrincipal());
-    Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
-    Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
     Assert.assertEquals(split.getIterators(), newSplit.getIterators());
     Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
   }
diff --git a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
index 0f2133a..12a4b19 100644
--- a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
+++ b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/RangeInputSplitTest.java
@@ -27,12 +27,10 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.iterators.user.SummingCombiner;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
@@ -83,15 +81,10 @@ public class RangeInputSplitTest {
     iterators.add(setting);
 
     split.setTableName("table");
-    split.setAuths(new Authorizations("foo"));
     split.setOffline(true);
     split.setIsolatedScan(true);
     split.setUsesLocalIterators(true);
     split.setFetchedColumns(fetchedColumns);
-    split.setToken(new PasswordToken("password"));
-    split.setPrincipal("root");
-    split.setInstanceName("instance");
-    split.setZooKeepers("localhost");
     split.setIterators(iterators);
     split.setLogLevel(Level.WARN);
 
@@ -109,15 +102,10 @@ public class RangeInputSplitTest {
     Assert.assertArrayEquals(split.getLocations(), newSplit.getLocations());
 
     Assert.assertEquals(split.getTableName(), newSplit.getTableName());
-    Assert.assertEquals(split.getAuths(), newSplit.getAuths());
     Assert.assertEquals(split.isOffline(), newSplit.isOffline());
     Assert.assertEquals(split.isIsolatedScan(), newSplit.isOffline());
     Assert.assertEquals(split.usesLocalIterators(), newSplit.usesLocalIterators());
     Assert.assertEquals(split.getFetchedColumns(), newSplit.getFetchedColumns());
-    Assert.assertEquals(split.getToken(), newSplit.getToken());
-    Assert.assertEquals(split.getPrincipal(), newSplit.getPrincipal());
-    Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
-    Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
     Assert.assertEquals(split.getIterators(), newSplit.getIterators());
     Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
   }
diff --git a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
index a5fb3bb..0e4a1ce 100644
--- a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
+++ b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/impl/BatchInputSplitTest.java
@@ -30,12 +30,10 @@ import java.util.Set;
 
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.impl.Table;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.iterators.user.SummingCombiner;
 import org.apache.accumulo.core.iterators.user.WholeRowIterator;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Level;
@@ -88,12 +86,7 @@ public class BatchInputSplitTest {
     iterators.add(setting);
 
     split.setTableName("table");
-    split.setAuths(new Authorizations("foo"));
     split.setFetchedColumns(fetchedColumns);
-    split.setToken(new PasswordToken("password"));
-    split.setPrincipal("root");
-    split.setInstanceName("instance");
-    split.setZooKeepers("localhost");
     split.setIterators(iterators);
     split.setLogLevel(Level.WARN);
 
@@ -111,14 +104,8 @@ public class BatchInputSplitTest {
     Assert.assertArrayEquals(split.getLocations(), newSplit.getLocations());
 
     Assert.assertEquals(split.getTableName(), newSplit.getTableName());
-    Assert.assertEquals(split.getAuths(), newSplit.getAuths());
     Assert.assertEquals(split.getFetchedColumns(), newSplit.getFetchedColumns());
-    Assert.assertEquals(split.getToken(), newSplit.getToken());
-    Assert.assertEquals(split.getPrincipal(), newSplit.getPrincipal());
-    Assert.assertEquals(split.getInstanceName(), newSplit.getInstanceName());
-    Assert.assertEquals(split.getZooKeepers(), newSplit.getZooKeepers());
     Assert.assertEquals(split.getIterators(), newSplit.getIterators());
     Assert.assertEquals(split.getLogLevel(), newSplit.getLogLevel());
   }
-
 }
diff --git a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
index 9cdd950..974f21d 100644
--- a/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
+++ b/client/mapreduce/src/test/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBaseTest.java
@@ -18,29 +18,29 @@ package org.apache.accumulo.core.client.mapreduce.lib.impl;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 import java.util.Base64;
+import java.util.Properties;
 
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.ConnectionInfo;
+import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
+import org.junit.Assert;
 import org.junit.Test;
 
-/**
- *
- */
 public class ConfiguratorBaseTest {
 
-  private static enum PrivateTestingEnum {
+  private enum PrivateTestingEnum {
     SOMETHING, SOMETHING_ELSE
   }
 
@@ -53,8 +53,7 @@ public class ConfiguratorBaseTest {
   }
 
   @Test
-  public void testSetConnectorInfoClassOfQConfigurationStringAuthenticationToken()
-      throws AccumuloSecurityException {
+  public void testSetConnectorInfoClassOfQConfigurationStringAuthenticationToken() {
     Configuration conf = new Configuration();
     assertFalse(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
     ConfiguratorBase.setConnectorInfo(this.getClass(), conf, "testUser",
@@ -62,6 +61,7 @@ public class ConfiguratorBaseTest {
     assertTrue(ConfiguratorBase.isConnectorInfoSet(this.getClass(), conf));
     assertEquals("testUser", ConfiguratorBase.getPrincipal(this.getClass(), conf));
     AuthenticationToken token = ConfiguratorBase.getAuthenticationToken(this.getClass(), conf);
+    assertNotNull(token);
     assertEquals(PasswordToken.class, token.getClass());
     assertEquals(new PasswordToken("testPassword"), token);
     assertEquals(
@@ -85,27 +85,38 @@ public class ConfiguratorBaseTest {
   }
 
   @Test
+  public void testSetConnectionInfo() {
+    Configuration conf = new Configuration();
+    ConnectionInfo info = Connector.builder().forInstance("myinstance", "myzookeepers")
+        .usingPassword("user", "pass").info();
+    ConfiguratorBase.setConnectionInfo(this.getClass(), conf, info);
+    ConnectionInfo info2 = ConfiguratorBase.getConnectionInfo(this.getClass(), conf);
+    Assert.assertEquals("myinstance", info2.getInstanceName());
+    Assert.assertEquals("myzookeepers", info2.getZooKeepers());
+    Assert.assertEquals("user", info2.getPrincipal());
+    Assert.assertTrue(info2.getAuthenticationToken() instanceof PasswordToken);
+  }
+
+  @SuppressWarnings("deprecation")
+  @Test
   public void testSetZooKeeperInstance() {
     Configuration conf = new Configuration();
     ConfiguratorBase.setZooKeeperInstance(this.getClass(), conf,
-        ClientConfiguration.create().withInstance("testInstanceName").withZkHosts("testZooKeepers")
-            .withSsl(true).withZkTimeout(1234));
-    ClientConfiguration clientConf = ClientConfiguration.deserialize(conf.get(ConfiguratorBase
-        .enumToConfKey(this.getClass(), ConfiguratorBase.InstanceOpts.CLIENT_CONFIG)));
-    assertEquals("testInstanceName", clientConf.get(ClientProperty.INSTANCE_NAME));
-    assertEquals("testZooKeepers", clientConf.get(ClientProperty.INSTANCE_ZK_HOST));
-    assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SSL_ENABLED));
-    assertEquals("1234", clientConf.get(ClientProperty.INSTANCE_ZK_TIMEOUT));
-    assertEquals(ZooKeeperInstance.class.getSimpleName(), conf
-        .get(ConfiguratorBase.enumToConfKey(this.getClass(), ConfiguratorBase.InstanceOpts.TYPE)));
-
-    // We want to test that the correct parameters from the config get passed to the ZKI
-    // but that keeps us from being able to make assertions on a valid instance name at ZKI creation
-    // Instance instance = ConfiguratorBase.getInstance(this.getClass(), conf);
-    // assertEquals(ZooKeeperInstance.class.getName(), instance.getClass().getName());
-    // assertEquals("testInstanceName", ((ZooKeeperInstance) instance).getInstanceName());
-    // assertEquals("testZooKeepers", ((ZooKeeperInstance) instance).getZooKeepers());
-    // assertEquals(1234000, ((ZooKeeperInstance) instance).getZooKeepersSessionTimeOut());
+        org.apache.accumulo.core.client.ClientConfiguration.create()
+            .withInstance("testInstanceName").withZkHosts("testZooKeepers").withSsl(true)
+            .withZkTimeout(1234));
+
+    org.apache.accumulo.core.client.ClientConfiguration clientConf = ConfiguratorBase
+        .getClientConfiguration(this.getClass(), conf);
+    assertEquals("testInstanceName", clientConf
+        .get(org.apache.accumulo.core.client.ClientConfiguration.ClientProperty.INSTANCE_NAME));
+
+    Properties props = ConfiguratorBase.getConnectionInfo(this.getClass(), conf).getProperties();
+    assertEquals("testInstanceName", props.getProperty(ClientProperty.INSTANCE_NAME.getKey()));
+    assertEquals("testZooKeepers", props.getProperty(ClientProperty.INSTANCE_ZOOKEEPERS.getKey()));
+    assertEquals("true", props.getProperty(ClientProperty.SSL_ENABLED.getKey()));
+    assertEquals("1234",
+        props.getProperty(ClientProperty.INSTANCE_ZOOKEEPERS_TIMEOUT_SEC.getKey()));
   }
 
   @Test
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectionInfoImpl.java b/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectionInfoImpl.java
index 16701fc..63a7fc2 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectionInfoImpl.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectionInfoImpl.java
@@ -28,7 +28,7 @@ public class ConnectionInfoImpl implements ConnectionInfo {
   private Properties properties;
   private AuthenticationToken token;
 
-  ConnectionInfoImpl(Properties properties, AuthenticationToken token) {
+  public ConnectionInfoImpl(Properties properties, AuthenticationToken token) {
     this.properties = properties;
     this.token = token;
   }
diff --git a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
index 665504e..4a68a16 100644
--- a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
+++ b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
@@ -833,7 +833,8 @@ public class MiniAccumuloClusterImpl implements AccumuloCluster {
       try {
         Instance instance = new ZooKeeperInstance(getClientConfig());
         ClientContext context = new ClientContext(instance,
-            new Credentials("root", new PasswordToken("unchecked")), getClientConfig());
+            new Credentials("root", new PasswordToken("unchecked")),
+            getConnectionInfo().getProperties());
         client = MasterClient.getConnectionWithRetry(context);
         return client.getMasterStats(Tracer.traceInfo(), context.rpcCreds());
       } catch (ThriftSecurityException exception) {
diff --git a/test/src/main/java/org/apache/accumulo/test/mapred/AccumuloInputFormatIT.java b/test/src/main/java/org/apache/accumulo/test/mapred/AccumuloInputFormatIT.java
index 149f11f..d451cb4 100644
--- a/test/src/main/java/org/apache/accumulo/test/mapred/AccumuloInputFormatIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/mapred/AccumuloInputFormatIT.java
@@ -239,11 +239,7 @@ public class AccumuloInputFormatIT extends AccumuloClusterHarness {
 
     RangeInputSplit risplit = (RangeInputSplit) split;
 
-    Assert.assertEquals(getAdminPrincipal(), risplit.getPrincipal());
     Assert.assertEquals(table, risplit.getTableName());
-    Assert.assertEquals(getAdminToken(), risplit.getToken());
-    Assert.assertEquals(auths, risplit.getAuths());
-    Assert.assertEquals(getConnector().getInstance().getInstanceName(), risplit.getInstanceName());
     Assert.assertEquals(isolated, risplit.isIsolatedScan());
     Assert.assertEquals(localIters, risplit.usesLocalIterators());
     Assert.assertEquals(fetchColumns, risplit.getFetchedColumns());
diff --git a/test/src/main/java/org/apache/accumulo/test/mapred/TokenFileIT.java b/test/src/main/java/org/apache/accumulo/test/mapred/TokenFileIT.java
index 5b2ef85..4f569b7 100644
--- a/test/src/main/java/org/apache/accumulo/test/mapred/TokenFileIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/mapred/TokenFileIT.java
@@ -109,9 +109,8 @@ public class TokenFileIT extends AccumuloClusterHarness {
 
       job.setInputFormat(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.setConnectorInfo(job, user, tokenFile);
       AccumuloInputFormat.setInputTableName(job, table1);
-      AccumuloInputFormat.setZooKeeperInstance(job, getCluster().getClientConfig());
+      AccumuloInputFormat.setConnectionInfo(job, getConnectionInfo());
 
       job.setMapperClass(TestMapper.class);
       job.setMapOutputKeyClass(Key.class);
diff --git a/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java b/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
index 1e0c2a7..15df6d8 100644
--- a/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
@@ -43,7 +43,6 @@ import org.apache.accumulo.core.client.mapreduce.RangeInputSplit;
 import org.apache.accumulo.core.client.mapreduce.impl.BatchInputSplit;
 import org.apache.accumulo.core.client.sample.RowSampler;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Range;
@@ -421,11 +420,7 @@ public class AccumuloInputFormatIT extends AccumuloClusterHarness {
 
     RangeInputSplit risplit = (RangeInputSplit) split;
 
-    Assert.assertEquals(getAdminPrincipal(), risplit.getPrincipal());
     Assert.assertEquals(table, risplit.getTableName());
-    Assert.assertEquals(getAdminToken(), risplit.getToken());
-    Assert.assertEquals(auths, risplit.getAuths());
-    Assert.assertEquals(getConnector().getInstance().getInstanceName(), risplit.getInstanceName());
     Assert.assertEquals(isolated, risplit.isIsolatedScan());
     Assert.assertEquals(localIters, risplit.usesLocalIterators());
     Assert.assertEquals(fetchColumns, risplit.getFetchedColumns());
@@ -451,49 +446,6 @@ public class AccumuloInputFormatIT extends AccumuloClusterHarness {
     assertEquals(1, assertionErrors.get(table + "_cleanup").size());
   }
 
-  @Test
-  public void testPartialFailedInputSplitDelegationToConfiguration() throws Exception {
-    String table = getUniqueNames(1)[0];
-    Connector c = getConnector();
-    c.tableOperations().create(table);
-    BatchWriter bw = c.createBatchWriter(table, new BatchWriterConfig());
-    for (int i = 0; i < 100; i++) {
-      Mutation m = new Mutation(new Text(String.format("%09x", i + 1)));
-      m.put(new Text(), new Text(), new Value(String.format("%09x", i).getBytes()));
-      bw.addMutation(m);
-    }
-    bw.close();
-
-    Assert.assertEquals(1,
-        MRTester.main(new String[] {table, BadPasswordSplitsAccumuloInputFormat.class.getName()}));
-    assertEquals(1, assertionErrors.get(table + "_map").size());
-    // We should fail when the RecordReader fails to get the next key/value pair, because the record
-    // reader is set up with a clientcontext, rather than a
-    // connector, so it doesn't do fast-fail on bad credentials
-    assertEquals(2, assertionErrors.get(table + "_cleanup").size());
-  }
-
-  /**
-   * AccumuloInputFormat which returns an "empty" RangeInputSplit
-   */
-  public static class BadPasswordSplitsAccumuloInputFormat extends AccumuloInputFormat {
-
-    @Override
-    public List<InputSplit> getSplits(JobContext context) throws IOException {
-      List<InputSplit> splits = super.getSplits(context);
-
-      for (InputSplit split : splits) {
-        // @formatter:off
-        org.apache.accumulo.core.client.mapreduce.RangeInputSplit rangeSplit =
-          (org.apache.accumulo.core.client.mapreduce.RangeInputSplit) split;
-        // @formatter:on
-        rangeSplit.setToken(new PasswordToken("anythingelse"));
-      }
-
-      return splits;
-    }
-  }
-
   /**
    * AccumuloInputFormat which returns an "empty" RangeInputSplit
    */
diff --git a/test/src/main/java/org/apache/accumulo/test/mapreduce/TokenFileIT.java b/test/src/main/java/org/apache/accumulo/test/mapreduce/TokenFileIT.java
index 52ccfa4..37fa021 100644
--- a/test/src/main/java/org/apache/accumulo/test/mapreduce/TokenFileIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/mapreduce/TokenFileIT.java
@@ -100,9 +100,8 @@ public class TokenFileIT extends AccumuloClusterHarness {
 
       job.setInputFormatClass(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.setConnectorInfo(job, user, tokenFile);
       AccumuloInputFormat.setInputTableName(job, table1);
-      AccumuloInputFormat.setZooKeeperInstance(job, getCluster().getClientConfig());
+      AccumuloInputFormat.setConnectionInfo(job, getConnectionInfo());
 
       job.setMapperClass(TestMapper.class);
       job.setMapOutputKeyClass(Key.class);
@@ -111,10 +110,9 @@ public class TokenFileIT extends AccumuloClusterHarness {
       job.setOutputKeyClass(Text.class);
       job.setOutputValueClass(Mutation.class);
 
-      AccumuloOutputFormat.setConnectorInfo(job, user, tokenFile);
+      AccumuloOutputFormat.setConnectionInfo(job, getConnectionInfo());
       AccumuloOutputFormat.setCreateTables(job, false);
       AccumuloOutputFormat.setDefaultTableName(job, table2);
-      AccumuloOutputFormat.setZooKeeperInstance(job, getCluster().getClientConfig());
 
       job.setNumReduceTasks(0);
 

-- 
To stop receiving notification emails like this one, please contact
mwalch@apache.org.

Mime
View raw message