accumulo-notifications mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] mikewalch closed pull request #430: #408 - Removed uses of deprecated ClientConfiguration
Date Tue, 24 Apr 2018 19:37:13 GMT
mikewalch closed pull request #430:  #408 - Removed uses of deprecated ClientConfiguration
URL: https://github.com/apache/accumulo/pull/430
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
index f58a8a320e..36792a7c5a 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AbstractInputFormat.java
@@ -271,7 +271,6 @@ public static void setZooKeeperInstance(JobConf job, ClientConfiguration clientC
    *          the Hadoop context for the configured job
    * @return an Accumulo instance
    * @since 1.5.0
-   * @see #setZooKeeperInstance(JobConf, ClientConfiguration)
    */
   protected static Instance getInstance(JobConf job) {
     return InputConfigurator.getInstance(CLASS, job);
@@ -487,8 +486,7 @@ public void initialize(InputSplit inSplit, JobConf job) throws IOException {
       String table = baseSplit.getTableName();
 
       // in case the table name changed, we can still use the previous name for terms of
-      // configuration,
-      // but the scanner will use the table id resolved at job setup time
+      // configuration, but the scanner will use the table id resolved at job setup time
       InputTableConfig tableConfig = getInputTableConfig(job, baseSplit.getTableName());
 
       log.debug("Creating connector with user: " + principal);
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
index 871efa8894..12b4dbba46 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloMultiTableInputFormat.java
@@ -19,7 +19,6 @@
 import java.io.IOException;
 import java.util.Map;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.mapred.InputFormatBase.RecordReaderBase;
 import org.apache.accumulo.core.client.mapreduce.InputTableConfig;
@@ -41,9 +40,7 @@
  *
  * <ul>
  * <li>{@link AccumuloInputFormat#setConnectionInfo(JobConf, ConnectionInfo)}
- * <li>{@link AccumuloInputFormat#setConnectorInfo(JobConf, String, String)}
  * <li>{@link AccumuloInputFormat#setScanAuthorizations(JobConf, org.apache.accumulo.core.security.Authorizations)}
- * <li>{@link AccumuloInputFormat#setZooKeeperInstance(JobConf, ClientConfiguration)}
  * <li>{@link AccumuloMultiTableInputFormat#setInputTableConfigs(org.apache.hadoop.mapred.JobConf, java.util.Map)}
  * </ul>
  *
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
index 1daf9e3d1a..c273892d3b 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
@@ -236,7 +236,6 @@ public static void setZooKeeperInstance(JobConf job, ClientConfiguration clientC
    *          the Hadoop context for the configured job
    * @return an Accumulo instance
    * @since 1.5.0
-   * @see #setZooKeeperInstance(JobConf, ClientConfiguration)
    */
   protected static Instance getInstance(JobConf job) {
     return OutputConfigurator.getInstance(CLASS, job);
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
index 2b54efe53b..b7adbbb2e4 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AbstractInputFormat.java
@@ -274,7 +274,6 @@ public static void setZooKeeperInstance(Job job, ClientConfiguration clientConfi
    *          the Hadoop context for the configured job
    * @return an Accumulo instance
    * @since 1.5.0
-   * @see #setZooKeeperInstance(Job, ClientConfiguration)
    */
   protected static Instance getInstance(JobContext context) {
     return InputConfigurator.getInstance(CLASS, context.getConfiguration());
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
index 9d73a55ce9..8c94a7873b 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloMultiTableInputFormat.java
@@ -22,10 +22,9 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.ConnectionInfo;
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
@@ -43,9 +42,8 @@
  * The user must specify the following via static configurator methods:
  *
  * <ul>
- * <li>{@link AccumuloMultiTableInputFormat#setConnectorInfo(Job, String, AuthenticationToken)}
+ * <li>{@link AccumuloMultiTableInputFormat#setConnectionInfo(Job, ConnectionInfo)}
  * <li>{@link AccumuloMultiTableInputFormat#setScanAuthorizations(Job, Authorizations)}
- * <li>{@link AccumuloMultiTableInputFormat#setZooKeeperInstance(Job, ClientConfiguration)}
  * <li>{@link AccumuloMultiTableInputFormat#setInputTableConfigs(Job, Map)}
  * </ul>
  *
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
index ae099c6d71..90a0a62fac 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
@@ -238,7 +238,6 @@ public static void setZooKeeperInstance(Job job, ClientConfiguration clientConfi
    *          the Hadoop context for the configured job
    * @return an Accumulo instance
    * @since 1.5.0
-   * @see #setZooKeeperInstance(Job, ClientConfiguration)
    */
   protected static Instance getInstance(JobContext context) {
     return OutputConfigurator.getInstance(CLASS, context.getConfiguration());
diff --git a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
index 7bfd4f9afe..9223b5ab3c 100644
--- a/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
+++ b/client/mapreduce/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/impl/ConfiguratorBase.java
@@ -350,7 +350,6 @@ public static void setZooKeeperInstance(Class<?> implementingClass, Configuratio
    *          the Hadoop configuration object to configure
    * @return an Accumulo instance
    * @since 1.6.0
-   * @see #setZooKeeperInstance(Class, Configuration, ClientConfiguration)
    */
   public static Instance getInstance(Class<?> implementingClass, Configuration conf) {
     String instanceType = conf.get(enumToConfKey(implementingClass, InstanceOpts.TYPE), "");
diff --git a/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java b/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
index 27c5fea4e0..d7e97cf39b 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/ZooKeeperInstance.java
@@ -26,7 +26,7 @@
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
 import org.apache.accumulo.core.client.impl.ClientContext;
 import org.apache.accumulo.core.client.impl.ConnectorImpl;
 import org.apache.accumulo.core.client.impl.Credentials;
@@ -75,6 +75,7 @@
 
   private final int zooKeepersSessionTimeOut;
 
+  @SuppressWarnings("deprecation")
   private ClientConfiguration clientConf;
 
   /**
@@ -85,6 +86,7 @@
    *          A comma separated list of zoo keeper server locations. Each location can contain an
    *          optional port, of the format host:port.
    */
+  @SuppressWarnings("deprecation")
   public ZooKeeperInstance(String instanceName, String zooKeepers) {
     this(ClientConfiguration.loadDefault().withInstance(instanceName).withZkHosts(zooKeepers));
   }
@@ -137,17 +139,18 @@ public ZooKeeperInstance(UUID instanceId, String zooKeepers, int sessionTimeout)
         .withZkTimeout(sessionTimeout));
   }
 
+  @SuppressWarnings("deprecation")
   ZooKeeperInstance(ClientConfiguration config, ZooCacheFactory zcf) {
     checkArgument(config != null, "config is null");
     this.clientConf = config;
-    this.instanceId = clientConf.get(ClientProperty.INSTANCE_ID);
-    this.instanceName = clientConf.get(ClientProperty.INSTANCE_NAME);
+    this.instanceId = clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_ID);
+    this.instanceName = clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_NAME);
     if ((instanceId == null) == (instanceName == null))
       throw new IllegalArgumentException(
           "Expected exactly one of instanceName and instanceId to be set");
-    this.zooKeepers = clientConf.get(ClientProperty.INSTANCE_ZK_HOST);
+    this.zooKeepers = clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_ZK_HOST);
     this.zooKeepersSessionTimeOut = (int) ConfigurationTypeHelper
-        .getTimeInMillis(clientConf.get(ClientProperty.INSTANCE_ZK_TIMEOUT));
+        .getTimeInMillis(clientConf.get(ClientConfiguration.ClientProperty.INSTANCE_ZK_TIMEOUT));
     zooCache = zcf.getZooCache(zooKeepers, zooKeepersSessionTimeOut);
     if (null != instanceName) {
       // Validates that the provided instanceName actually exists
@@ -162,6 +165,7 @@ public ZooKeeperInstance(UUID instanceId, String zooKeepers, int sessionTimeout)
    *          specific to Accumulo.
    * @since 1.9.0
    */
+  @SuppressWarnings("deprecation")
   public ZooKeeperInstance(ClientConfiguration config) {
     this(config, new ZooCacheFactory());
   }
@@ -282,8 +286,8 @@ public Connector getConnector(String user, ByteBuffer pass)
   @Override
   public Connector getConnector(String principal, AuthenticationToken token)
       throws AccumuloException, AccumuloSecurityException {
-    return new ConnectorImpl(
-        new ClientContext(this, new Credentials(principal, token), clientConf));
+    return new ConnectorImpl(new ClientContext(this, new Credentials(principal, token),
+        ClientConfConverter.toProperties(clientConf)));
   }
 
   @Override
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
index d5080f089e..16cb28601b 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientConfConverter.java
@@ -16,17 +16,31 @@
  */
 package org.apache.accumulo.core.client.impl;
 
+import static org.apache.accumulo.core.client.ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY;
+
+import java.io.IOException;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Properties;
+import java.util.function.Predicate;
 
 import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
 import org.apache.accumulo.core.conf.ClientProperty;
+import org.apache.accumulo.core.conf.CredentialProviderFactoryShim;
+import org.apache.accumulo.core.conf.DefaultConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.rpc.SaslConnectionParams;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @SuppressWarnings("deprecation")
 public class ClientConfConverter {
 
+  private static final Logger log = LoggerFactory.getLogger(ClientConfConverter.class);
   private static Map<String,String> confProps = new HashMap<>();
   private static Map<String,String> propsConf = new HashMap<>();
 
@@ -56,7 +70,7 @@
     propsConf.put(ClientProperty.SASL_QOP.getKey(),
         ClientConfiguration.ClientProperty.RPC_SASL_QOP.getKey());
     propsConf.put(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(),
-        ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey());
+        KERBEROS_SERVER_PRIMARY.getKey());
 
     for (Map.Entry<String,String> entry : propsConf.entrySet()) {
       confProps.put(entry.getValue(), entry.getKey());
@@ -99,4 +113,158 @@ public static Properties toProperties(ClientConfiguration clientConf) {
     }
     return props;
   }
+
+  public static Properties toProperties(AccumuloConfiguration config) {
+    return toProperties(toClientConf(config));
+  }
+
+  public static AccumuloConfiguration toAccumuloConf(Properties properties) {
+    return toAccumuloConf(toClientConf(properties));
+  }
+
+  /**
+   * A utility method for converting client configuration to a standard configuration object for use
+   * internally.
+   *
+   * @param config
+   *          the original {@link ClientConfiguration}
+   * @return the client configuration presented in the form of an {@link AccumuloConfiguration}
+   */
+  public static AccumuloConfiguration toAccumuloConf(final ClientConfiguration config) {
+
+    final AccumuloConfiguration defaults = DefaultConfiguration.getInstance();
+
+    return new AccumuloConfiguration() {
+
+      @Override
+      public String get(Property property) {
+        final String key = property.getKey();
+
+        // Attempt to load sensitive properties from a CredentialProvider, if configured
+        if (property.isSensitive()) {
+          org.apache.hadoop.conf.Configuration hadoopConf = getHadoopConfiguration();
+          if (null != hadoopConf) {
+            try {
+              char[] value = CredentialProviderFactoryShim
+                  .getValueFromCredentialProvider(hadoopConf, key);
+              if (null != value) {
+                log.trace("Loaded sensitive value for {} from CredentialProvider", key);
+                return new String(value);
+              } else {
+                log.trace("Tried to load sensitive value for {} from CredentialProvider, "
+                    + "but none was found", key);
+              }
+            } catch (IOException e) {
+              log.warn("Failed to extract sensitive property ({}) from Hadoop CredentialProvider,"
+                  + " falling back to base AccumuloConfiguration", key, e);
+            }
+          }
+        }
+
+        if (config.containsKey(key))
+          return config.getString(key);
+        else {
+          // Reconstitute the server kerberos property from the client config
+          if (Property.GENERAL_KERBEROS_PRINCIPAL == property) {
+            if (config.containsKey(KERBEROS_SERVER_PRIMARY.getKey())) {
+              // Avoid providing a realm since we don't know what it is...
+              return config.getString(KERBEROS_SERVER_PRIMARY.getKey()) + "/_HOST@"
+                  + SaslConnectionParams.getDefaultRealm();
+            }
+          }
+          return defaults.get(property);
+        }
+      }
+
+      @Override
+      public void getProperties(Map<String,String> props, Predicate<String> filter) {
+        defaults.getProperties(props, filter);
+
+        Iterator<String> keyIter = config.getKeys();
+        while (keyIter.hasNext()) {
+          String key = keyIter.next();
+          if (filter.test(key))
+            props.put(key, config.getString(key));
+        }
+
+        // Two client props that don't exist on the server config. Client doesn't need to know about
+        // the Kerberos instance from the principle, but servers do
+        // Automatically reconstruct the server property when converting a client config.
+        if (props.containsKey(KERBEROS_SERVER_PRIMARY.getKey())) {
+          final String serverPrimary = props.remove(KERBEROS_SERVER_PRIMARY.getKey());
+          if (filter.test(Property.GENERAL_KERBEROS_PRINCIPAL.getKey())) {
+            // Use the _HOST expansion. It should be unnecessary in "client land".
+            props.put(Property.GENERAL_KERBEROS_PRINCIPAL.getKey(),
+                serverPrimary + "/_HOST@" + SaslConnectionParams.getDefaultRealm());
+          }
+        }
+
+        // Attempt to load sensitive properties from a CredentialProvider, if configured
+        org.apache.hadoop.conf.Configuration hadoopConf = getHadoopConfiguration();
+        if (null != hadoopConf) {
+          try {
+            for (String key : CredentialProviderFactoryShim.getKeys(hadoopConf)) {
+              if (!Property.isValidPropertyKey(key) || !Property.isSensitive(key)) {
+                continue;
+              }
+
+              if (filter.test(key)) {
+                char[] value = CredentialProviderFactoryShim
+                    .getValueFromCredentialProvider(hadoopConf, key);
+                if (null != value) {
+                  props.put(key, new String(value));
+                }
+              }
+            }
+          } catch (IOException e) {
+            log.warn("Failed to extract sensitive properties from Hadoop CredentialProvider, "
+                + "falling back to accumulo-site.xml", e);
+          }
+        }
+      }
+
+      private org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
+        String credProviderPaths = config
+            .getString(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey());
+        if (null != credProviderPaths && !credProviderPaths.isEmpty()) {
+          org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration();
+          hConf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH, credProviderPaths);
+          return hConf;
+        }
+
+        log.trace("Did not find credential provider configuration in ClientConfiguration");
+
+        return null;
+      }
+    };
+  }
+
+  public static ClientConfiguration toClientConf(AccumuloConfiguration conf) {
+    ClientConfiguration clientConf = ClientConfiguration.create();
+
+    // Servers will only have the full principal in their configuration -- parse the
+    // primary and realm from it.
+    final String serverPrincipal = conf.get(Property.GENERAL_KERBEROS_PRINCIPAL);
+
+    final KerberosName krbName;
+    if (serverPrincipal != null && !serverPrincipal.isEmpty()) {
+      krbName = new KerberosName(serverPrincipal);
+      clientConf.setProperty(KERBEROS_SERVER_PRIMARY, krbName.getServiceName());
+    }
+
+    HashSet<String> clientKeys = new HashSet<>();
+    for (ClientConfiguration.ClientProperty prop : ClientConfiguration.ClientProperty.values()) {
+      clientKeys.add(prop.getKey());
+    }
+
+    String key;
+    for (Map.Entry<String,String> entry : conf) {
+      key = entry.getKey();
+      if (clientKeys.contains(key)) {
+        clientConf.setProperty(key, entry.getValue());
+      }
+    }
+    return clientConf;
+  }
+
 }
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
index 13880ab4d4..028480a8cf 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/ClientContext.java
@@ -19,11 +19,8 @@
 import static com.google.common.base.Preconditions.checkArgument;
 import static java.util.Objects.requireNonNull;
 
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
+import java.util.Properties;
 import java.util.concurrent.TimeUnit;
-import java.util.function.Predicate;
 import java.util.function.Supplier;
 
 import org.apache.accumulo.core.client.AccumuloException;
@@ -35,14 +32,10 @@
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.CredentialProviderFactoryShim;
-import org.apache.accumulo.core.conf.DefaultConfiguration;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.rpc.SaslConnectionParams;
 import org.apache.accumulo.core.rpc.SslConnectionParams;
 import org.apache.accumulo.core.security.thrift.TCredentials;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Suppliers;
 
@@ -57,8 +50,6 @@
  */
 public class ClientContext {
 
-  private static final Logger log = LoggerFactory.getLogger(ClientContext.class);
-
   protected final Instance inst;
   private Credentials creds;
   private ClientConfiguration clientConf;
@@ -85,9 +76,10 @@ public ClientContext(ConnectionInfo connectionInfo) {
         ConnectionInfoFactory.getBatchWriterConfig(connectionInfo));
   }
 
-  /**
-   * Instantiate a client context
-   */
+  public ClientContext(Instance instance, Credentials credentials, Properties clientProps) {
+    this(instance, credentials, ClientConfConverter.toClientConf(clientProps));
+  }
+
   public ClientContext(Instance instance, Credentials credentials, ClientConfiguration clientConf) {
     this(instance, credentials, clientConf, new BatchWriterConfig());
   }
@@ -95,7 +87,7 @@ public ClientContext(Instance instance, Credentials credentials, ClientConfigura
   public ClientContext(Instance instance, Credentials credentials, ClientConfiguration clientConf,
       BatchWriterConfig batchWriterConfig) {
     this(instance, credentials,
-        convertClientConfig(requireNonNull(clientConf, "clientConf is null")));
+        ClientConfConverter.toAccumuloConf(requireNonNull(clientConf, "clientConf is null")));
     this.clientConf = clientConf;
     this.batchWriterConfig = batchWriterConfig;
   }
@@ -111,22 +103,20 @@ public ClientContext(Instance instance, Credentials credentials,
     rpcConf = requireNonNull(serverConf, "serverConf is null");
     clientConf = null;
 
-    saslSupplier = new Supplier<SaslConnectionParams>() {
-      @Override
-      public SaslConnectionParams get() {
-        // Use the clientConf if we have it
-        if (null != clientConf) {
-          if (!clientConf.hasSasl()) {
-            return null;
-          }
-          return new SaslConnectionParams(clientConf, getCredentials().getToken());
-        }
-        AccumuloConfiguration conf = getConfiguration();
-        if (!conf.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)) {
+    saslSupplier = () -> {
+      // Use the clientConf if we have it
+      if (null != clientConf) {
+        if (!clientConf.hasSasl()) {
           return null;
         }
-        return new SaslConnectionParams(conf, getCredentials().getToken());
+        return new SaslConnectionParams(ClientConfConverter.toProperties(clientConf),
+            getCredentials().getToken());
+      }
+      AccumuloConfiguration conf = getConfiguration();
+      if (!conf.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)) {
+        return null;
       }
+      return new SaslConnectionParams(conf, getCredentials().getToken());
     };
 
     timeoutSupplier = memoizeWithExpiration(
@@ -143,6 +133,10 @@ public Instance getInstance() {
     return inst;
   }
 
+  public ConnectionInfo getConnectionInfo() {
+    return new ConnectionInfoImpl(ClientConfConverter.toProperties(clientConf), creds.getToken());
+  }
+
   /**
    * Retrieve the credentials used to construct this context
    */
@@ -224,126 +218,4 @@ public synchronized TCredentials rpcCreds() {
     return rpcCreds;
   }
 
-  /**
-   * A utility method for converting client configuration to a standard configuration object for use
-   * internally.
-   *
-   * @param config
-   *          the original {@link ClientConfiguration}
-   * @return the client configuration presented in the form of an {@link AccumuloConfiguration}
-   */
-  public static AccumuloConfiguration convertClientConfig(final ClientConfiguration config) {
-
-    final AccumuloConfiguration defaults = DefaultConfiguration.getInstance();
-
-    return new AccumuloConfiguration() {
-
-      @Override
-      public String get(Property property) {
-        final String key = property.getKey();
-
-        // Attempt to load sensitive properties from a CredentialProvider, if configured
-        if (property.isSensitive()) {
-          org.apache.hadoop.conf.Configuration hadoopConf = getHadoopConfiguration();
-          if (null != hadoopConf) {
-            try {
-              char[] value = CredentialProviderFactoryShim
-                  .getValueFromCredentialProvider(hadoopConf, key);
-              if (null != value) {
-                log.trace("Loaded sensitive value for {} from CredentialProvider", key);
-                return new String(value);
-              } else {
-                log.trace("Tried to load sensitive value for {} from CredentialProvider, "
-                    + "but none was found", key);
-              }
-            } catch (IOException e) {
-              log.warn("Failed to extract sensitive property ({}) from Hadoop CredentialProvider,"
-                  + " falling back to base AccumuloConfiguration", key, e);
-            }
-          }
-        }
-
-        if (config.containsKey(key))
-          return config.getString(key);
-        else {
-          // Reconstitute the server kerberos property from the client config
-          if (Property.GENERAL_KERBEROS_PRINCIPAL == property) {
-            if (config
-                .containsKey(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey())) {
-              // Avoid providing a realm since we don't know what it is...
-              return config
-                  .getString(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey())
-                  + "/_HOST@" + SaslConnectionParams.getDefaultRealm();
-            }
-          }
-          return defaults.get(property);
-        }
-      }
-
-      @Override
-      public void getProperties(Map<String,String> props, Predicate<String> filter) {
-        defaults.getProperties(props, filter);
-
-        Iterator<String> keyIter = config.getKeys();
-        while (keyIter.hasNext()) {
-          String key = keyIter.next().toString();
-          if (filter.test(key))
-            props.put(key, config.getString(key));
-        }
-
-        // Two client props that don't exist on the server config. Client doesn't need to know about
-        // the Kerberos instance from the principle, but servers do
-        // Automatically reconstruct the server property when converting a client config.
-        if (props
-            .containsKey(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey())) {
-          final String serverPrimary = props
-              .remove(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey());
-          if (filter.test(Property.GENERAL_KERBEROS_PRINCIPAL.getKey())) {
-            // Use the _HOST expansion. It should be unnecessary in "client land".
-            props.put(Property.GENERAL_KERBEROS_PRINCIPAL.getKey(),
-                serverPrimary + "/_HOST@" + SaslConnectionParams.getDefaultRealm());
-          }
-        }
-
-        // Attempt to load sensitive properties from a CredentialProvider, if configured
-        org.apache.hadoop.conf.Configuration hadoopConf = getHadoopConfiguration();
-        if (null != hadoopConf) {
-          try {
-            for (String key : CredentialProviderFactoryShim.getKeys(hadoopConf)) {
-              if (!Property.isValidPropertyKey(key) || !Property.isSensitive(key)) {
-                continue;
-              }
-
-              if (filter.test(key)) {
-                char[] value = CredentialProviderFactoryShim
-                    .getValueFromCredentialProvider(hadoopConf, key);
-                if (null != value) {
-                  props.put(key, new String(value));
-                }
-              }
-            }
-          } catch (IOException e) {
-            log.warn("Failed to extract sensitive properties from Hadoop CredentialProvider, "
-                + "falling back to accumulo-site.xml", e);
-          }
-        }
-      }
-
-      private org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
-        String credProviderPaths = config
-            .getString(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey());
-        if (null != credProviderPaths && !credProviderPaths.isEmpty()) {
-          org.apache.hadoop.conf.Configuration hConf = new org.apache.hadoop.conf.Configuration();
-          hConf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH, credProviderPaths);
-          return hConf;
-        }
-
-        log.trace("Did not find credential provider configuration in ClientConfiguration");
-
-        return null;
-      }
-    };
-
-  }
-
 }
diff --git a/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java b/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
index 253af4a7a8..3d81e0cf54 100644
--- a/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
+++ b/core/src/main/java/org/apache/accumulo/core/rpc/SaslConnectionParams.java
@@ -21,23 +21,20 @@
 import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
-import java.util.Map.Entry;
+import java.util.Properties;
 
 import javax.security.auth.callback.CallbackHandler;
 import javax.security.sasl.Sasl;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -129,16 +126,16 @@ public static SaslMechanism get(String mechanismName) {
   protected final Map<String,String> saslProperties;
 
   public SaslConnectionParams(AccumuloConfiguration conf, AuthenticationToken token) {
-    this(ClientConfiguration.fromMap(getProperties(conf)), token);
+    this(ClientConfConverter.toProperties(conf), token);
   }
 
-  public SaslConnectionParams(ClientConfiguration conf, AuthenticationToken token) {
-    requireNonNull(conf, "Configuration was null");
+  public SaslConnectionParams(Properties properties, AuthenticationToken token) {
+    requireNonNull(properties, "Properties was null");
     requireNonNull(token, "AuthenticationToken was null");
 
     saslProperties = new HashMap<>();
     updatePrincipalFromUgi();
-    updateFromConfiguration(conf);
+    updateFromConfiguration(properties);
     updateFromToken(token);
   }
 
@@ -156,38 +153,6 @@ protected void updateFromToken(AuthenticationToken token) {
     }
   }
 
-  protected static Map<String,String> getProperties(AccumuloConfiguration conf) {
-    final Map<String,String> clientProperties = new HashMap<>();
-
-    // Servers will only have the full principal in their configuration -- parse the
-    // primary and realm from it.
-    final String serverPrincipal = conf.get(Property.GENERAL_KERBEROS_PRINCIPAL);
-
-    final KerberosName krbName;
-    try {
-      krbName = new KerberosName(serverPrincipal);
-      clientProperties.put(ClientProperty.KERBEROS_SERVER_PRIMARY.getKey(),
-          krbName.getServiceName());
-    } catch (Exception e) {
-      // bad value or empty, assume we're not using kerberos
-    }
-
-    HashSet<String> clientKeys = new HashSet<>();
-    for (ClientProperty prop : ClientProperty.values()) {
-      clientKeys.add(prop.getKey());
-    }
-
-    String key;
-    for (Entry<String,String> entry : conf) {
-      key = entry.getKey();
-      if (clientKeys.contains(key)) {
-        clientProperties.put(key, entry.getValue());
-      }
-    }
-
-    return clientProperties;
-  }
-
   protected void updatePrincipalFromUgi() {
     // Ensure we're using Kerberos auth for Hadoop UGI
     if (!UserGroupInformation.isSecurityEnabled()) {
@@ -210,16 +175,17 @@ protected void updatePrincipalFromUgi() {
 
   }
 
-  protected void updateFromConfiguration(ClientConfiguration conf) {
+  protected void updateFromConfiguration(Properties properties) {
     // Get the quality of protection to use
-    final String qopValue = conf.get(ClientProperty.RPC_SASL_QOP);
+    final String qopValue = ClientProperty.SASL_QOP.getValue(properties);
     this.qop = QualityOfProtection.get(qopValue);
 
     // Add in the SASL properties to a map so we don't have to repeatedly construct this map
     this.saslProperties.put(Sasl.QOP, this.qop.getQuality());
 
     // The primary from the KRB principal on each server (e.g. primary/instance@realm)
-    this.kerberosServerPrimary = conf.get(ClientProperty.KERBEROS_SERVER_PRIMARY);
+    this.kerberosServerPrimary = properties
+        .getProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey());
   }
 
   public Map<String,String> getSaslProperties() {
diff --git a/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java b/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
index 4df139f109..6153728892 100644
--- a/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
+++ b/core/src/main/java/org/apache/accumulo/core/trace/DistributedTrace.java
@@ -91,8 +91,8 @@ public static void enable(String hostname, String service) {
    * Enable tracing by setting up SpanReceivers for the current process. If host name is null, it
    * will be determined. If service name is null, the simple name of the class will be used.
    * Properties required in the client configuration include
-   * {@link org.apache.accumulo.core.client.ClientConfiguration.ClientProperty#TRACE_SPAN_RECEIVERS}
-   * and any properties specific to the span receiver.
+   * {@link org.apache.accumulo.core.conf.ClientProperty#TRACE_SPAN_RECEIVERS} and any properties
+   * specific to the span receiver.
    */
   public static void enable(String hostname, String service, Properties properties) {
     String spanReceivers = ClientProperty.TRACE_SPAN_RECEIVERS.getValue(properties);
diff --git a/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java b/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
index 7d10ec805d..82c1760e1c 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/ClientConfigurationTest.java
@@ -26,15 +26,17 @@
 import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
 import org.junit.Test;
 
+@Deprecated
 public class ClientConfigurationTest {
+
   @Test
-  public void testOverrides() throws Exception {
+  public void testOverrides() {
     ClientConfiguration clientConfig = createConfig();
     assertExpectedConfig(clientConfig);
   }
 
   @Test
-  public void testSerialization() throws Exception {
+  public void testSerialization() {
     ClientConfiguration clientConfig = createConfig();
     // sanity check that we're starting with what we're expecting
     assertExpectedConfig(clientConfig);
diff --git a/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java b/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
index f35098f7b1..467a4a9f97 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/ZooKeeperInstanceTest.java
@@ -36,19 +36,18 @@
 public class ZooKeeperInstanceTest {
   private static final UUID IID = UUID.randomUUID();
   private static final String IID_STRING = IID.toString();
-  private ClientConfiguration config;
   private ZooCacheFactory zcf;
   private ZooCache zc;
   private ZooKeeperInstance zki;
 
-  private void mockIdConstruction(ClientConfiguration config) {
+  private void mockIdConstruction(@SuppressWarnings("deprecation") ClientConfiguration config) {
     expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(IID_STRING);
     expect(config.get(ClientProperty.INSTANCE_NAME)).andReturn(null);
     expect(config.get(ClientProperty.INSTANCE_ZK_HOST)).andReturn("zk1");
     expect(config.get(ClientProperty.INSTANCE_ZK_TIMEOUT)).andReturn("30");
   }
 
-  private void mockNameConstruction(ClientConfiguration config) {
+  private void mockNameConstruction(@SuppressWarnings("deprecation") ClientConfiguration config) {
     expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(null);
     expect(config.get(ClientProperty.INSTANCE_NAME)).andReturn("instance");
     expect(config.get(ClientProperty.INSTANCE_ZK_HOST)).andReturn("zk1");
@@ -57,7 +56,8 @@ private void mockNameConstruction(ClientConfiguration config) {
 
   @Before
   public void setUp() {
-    config = createMock(ClientConfiguration.class);
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
     mockNameConstruction(config);
     replay(config);
     zcf = createMock(ZooCacheFactory.class);
@@ -73,13 +73,26 @@ public void setUp() {
 
   @Test(expected = IllegalArgumentException.class)
   public void testInvalidConstruction() {
-    config = createMock(ClientConfiguration.class);
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
     expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(IID_STRING);
     mockNameConstruction(config);
     replay(config);
     new ZooKeeperInstance(config);
   }
 
+  @Test(expected = IllegalArgumentException.class)
+  public void testInvalidConstruction2() {
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
+    expect(config.get(ClientProperty.INSTANCE_ID)).andReturn(null);
+    expect(config.get(ClientProperty.INSTANCE_NAME)).andReturn(null);
+    expect(config.get(ClientProperty.INSTANCE_ZK_HOST)).andReturn("zk1");
+    expect(config.get(ClientProperty.INSTANCE_ZK_TIMEOUT)).andReturn("30");
+    replay(config);
+    new ZooKeeperInstance(config);
+  }
+
   @Test
   public void testSimpleGetters() {
     assertEquals("instance", zki.getInstanceName());
@@ -98,7 +111,8 @@ public void testGetInstanceID_FromCache() {
 
   @Test
   public void testGetInstanceID_Direct() {
-    config = createMock(ClientConfiguration.class);
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
     mockIdConstruction(config);
     replay(config);
     zki = new ZooKeeperInstance(config, zcf);
@@ -109,6 +123,8 @@ public void testGetInstanceID_Direct() {
 
   @Test(expected = RuntimeException.class)
   public void testGetInstanceID_NoMapping() {
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
     expect(zc.get(Constants.ZROOT + Constants.ZINSTANCES + "/instance")).andReturn(null);
     replay(zc);
     EasyMock.reset(config, zcf);
@@ -126,7 +142,8 @@ public void testGetInstanceID_IDMissingForName() {
 
   @Test(expected = RuntimeException.class)
   public void testGetInstanceID_IDMissingForID() {
-    config = createMock(ClientConfiguration.class);
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
     mockIdConstruction(config);
     replay(config);
     zki = new ZooKeeperInstance(config, zcf);
@@ -137,7 +154,8 @@ public void testGetInstanceID_IDMissingForID() {
 
   @Test
   public void testGetInstanceName() {
-    config = createMock(ClientConfiguration.class);
+    @SuppressWarnings("deprecation")
+    ClientConfiguration config = createMock(ClientConfiguration.class);
     mockIdConstruction(config);
     replay(config);
     zki = new ZooKeeperInstance(config, zcf);
@@ -164,6 +182,7 @@ public void testAllZooKeepersAreUsed() {
         .andReturn(IID_STRING.getBytes(UTF_8));
     expect(zc.get(Constants.ZROOT + "/" + IID_STRING)).andReturn("yup".getBytes());
     replay(zc, factory);
+    @SuppressWarnings("deprecation")
     ClientConfiguration cfg = ClientConfiguration.loadDefault().withInstance(instanceName)
         .withZkHosts(zookeepers);
     ZooKeeperInstance zki = new ZooKeeperInstance(cfg, factory);
diff --git a/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java b/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
index b15f9b8ccf..648bfb23de 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/impl/ClientContextTest.java
@@ -70,7 +70,7 @@ public void loadSensitivePropertyFromCredentialProvider() {
     ClientConfiguration clientConf = ClientConfiguration.create()
         .with(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(), absPath);
 
-    AccumuloConfiguration accClientConf = ClientContext.convertClientConfig(clientConf);
+    AccumuloConfiguration accClientConf = ClientConfConverter.toAccumuloConf(clientConf);
     Assert.assertEquals("mysecret", accClientConf.get(Property.INSTANCE_SECRET));
   }
 
@@ -82,7 +82,7 @@ public void defaultValueForSensitiveProperty() {
 
     ClientConfiguration clientConf = ClientConfiguration.create();
 
-    AccumuloConfiguration accClientConf = ClientContext.convertClientConfig(clientConf);
+    AccumuloConfiguration accClientConf = ClientConfConverter.toAccumuloConf(clientConf);
     Assert.assertEquals(Property.INSTANCE_SECRET.getDefaultValue(),
         accClientConf.get(Property.INSTANCE_SECRET));
   }
@@ -97,7 +97,7 @@ public void sensitivePropertiesIncludedInProperties() {
     ClientConfiguration clientConf = ClientConfiguration.create()
         .with(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey(), absPath);
 
-    AccumuloConfiguration accClientConf = ClientContext.convertClientConfig(clientConf);
+    AccumuloConfiguration accClientConf = ClientConfConverter.toAccumuloConf(clientConf);
     Map<String,String> props = new HashMap<>();
     accClientConf.getProperties(props, x -> true);
 
diff --git a/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java b/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
index 47b909330c..129ed306c9 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/impl/TableOperationsImplTest.java
@@ -16,9 +16,9 @@
  */
 package org.apache.accumulo.core.client.impl;
 
+import java.util.Properties;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.Scanner;
@@ -38,7 +38,7 @@ public void waitForStoreTransitionScannerConfiguredCorrectly() throws Exception
     Instance instance = EasyMock.createMock(Instance.class);
     Credentials credentials = EasyMock.createMock(Credentials.class);
 
-    ClientContext context = new ClientContext(instance, credentials, ClientConfiguration.create());
+    ClientContext context = new ClientContext(instance, credentials, new Properties());
     TableOperationsImpl topsImpl = new TableOperationsImpl(context);
 
     Connector connector = EasyMock.createMock(Connector.class);
diff --git a/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java b/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
index 5dc1205bba..6d3a133015 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/impl/TabletLocatorImplTest.java
@@ -30,13 +30,13 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Properties;
 import java.util.Set;
 import java.util.SortedMap;
 import java.util.TreeMap;
 
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.impl.TabletLocator.TabletLocation;
@@ -175,8 +175,7 @@ static TabletLocatorImpl createLocators(String table, Object... data) {
   @Before
   public void setUp() {
     testInstance = new TestInstance("instance1", "tserver1");
-    context = new ClientContext(testInstance, new Credentials("test", null),
-        ClientConfiguration.create());
+    context = new ClientContext(testInstance, new Credentials("test", null), new Properties());
   }
 
   private void runTest(Text tableName, List<Range> ranges, TabletLocatorImpl tab1TabletCache,
diff --git a/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java b/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
index fe0ce45880..57fa8cba55 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/impl/ThriftTransportKeyTest.java
@@ -26,10 +26,11 @@
 
 import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
+import java.util.Properties;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.rpc.SaslConnectionParams;
 import org.apache.accumulo.core.rpc.SslConnectionParams;
 import org.apache.accumulo.core.util.HostAndPort;
@@ -42,8 +43,10 @@
 
 public class ThriftTransportKeyTest {
 
+  private static final String primary = "accumulo";
+
   @Before
-  public void setup() throws Exception {
+  public void setup() {
     System.setProperty("java.security.krb5.realm", "accumulo");
     System.setProperty("java.security.krb5.kdc", "fake");
     Configuration conf = new Configuration(false);
@@ -51,6 +54,13 @@ public void setup() throws Exception {
     UserGroupInformation.setConfiguration(conf);
   }
 
+  private static SaslConnectionParams createSaslParams(AuthenticationToken token) {
+    Properties props = new Properties();
+    props.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(), primary);
+    props.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+    return new SaslConnectionParams(props, token);
+  }
+
   @Test(expected = RuntimeException.class)
   public void testSslAndSaslErrors() {
     ClientContext clientCtx = createMock(ClientContext.class);
@@ -74,28 +84,14 @@ public void testSslAndSaslErrors() {
   public void testConnectionCaching() throws IOException, InterruptedException {
     UserGroupInformation user1 = UserGroupInformation.createUserForTesting("user1", new String[0]);
     final KerberosToken token = EasyMock.createMock(KerberosToken.class);
-    final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-    // The primary is the first component of the principal
-    final String primary = "accumulo";
-    clientConf.withSasl(true, primary);
 
     // A first instance of the SASL cnxn params
     SaslConnectionParams saslParams1 = user1
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            return new SaslConnectionParams(clientConf, token);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
 
     // A second instance of what should be the same SaslConnectionParams
     SaslConnectionParams saslParams2 = user1
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            return new SaslConnectionParams(clientConf, token);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
 
     ThriftTransportKey ttk1 = new ThriftTransportKey(HostAndPort.fromParts("localhost", 9997), 1L,
         null, saslParams1),
@@ -112,37 +108,11 @@ public void testSaslPrincipalIsSignificant() throws IOException, InterruptedExce
     UserGroupInformation user1 = UserGroupInformation.createUserForTesting("user1", new String[0]);
     final KerberosToken token = EasyMock.createMock(KerberosToken.class);
     SaslConnectionParams saslParams1 = user1
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-            // The primary is the first component of the principal
-            final String primary = "accumulo";
-            clientConf.withSasl(true, primary);
-
-            assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-            return new SaslConnectionParams(clientConf, token);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
 
     UserGroupInformation user2 = UserGroupInformation.createUserForTesting("user2", new String[0]);
     SaslConnectionParams saslParams2 = user2
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-            // The primary is the first component of the principal
-            final String primary = "accumulo";
-            clientConf.withSasl(true, primary);
-
-            assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-            return new SaslConnectionParams(clientConf, token);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
 
     ThriftTransportKey ttk1 = new ThriftTransportKey(HostAndPort.fromParts("localhost", 9997), 1L,
         null, saslParams1),
@@ -167,5 +137,4 @@ public void testSimpleEquivalence() {
 
     assertTrue("Normal ThriftTransportKey doesn't equal itself", ttk.equals(ttk));
   }
-
 }
diff --git a/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java b/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
index a93f80fb52..0028448436 100644
--- a/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/rpc/SaslConnectionParamsTest.java
@@ -22,16 +22,15 @@
 
 import java.security.PrivilegedExceptionAction;
 import java.util.Map;
+import java.util.Properties;
 
 import javax.security.sasl.Sasl;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
 import org.apache.accumulo.core.client.impl.AuthenticationTokenIdentifier;
-import org.apache.accumulo.core.client.impl.ClientContext;
 import org.apache.accumulo.core.client.impl.DelegationTokenImpl;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
-import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.rpc.SaslConnectionParams.QualityOfProtection;
 import org.apache.accumulo.core.rpc.SaslConnectionParams.SaslMechanism;
@@ -46,9 +45,10 @@
 
   private UserGroupInformation testUser;
   private String username;
+  private static final String primary = "accumulo";
 
   @Before
-  public void setup() throws Exception {
+  public void setup() {
     System.setProperty("java.security.krb5.realm", "accumulo");
     System.setProperty("java.security.krb5.kdc", "fake");
     Configuration conf = new Configuration(false);
@@ -58,64 +58,48 @@ public void setup() throws Exception {
     username = testUser.getUserName();
   }
 
+  private static SaslConnectionParams createSaslParams(AuthenticationToken token) {
+    Properties props = new Properties();
+    props.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(), primary);
+    props.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+    return new SaslConnectionParams(props, token);
+  }
+
   @Test
   public void testDefaultParamsAsClient() throws Exception {
     final KerberosToken token = EasyMock.createMock(KerberosToken.class);
-    testUser.doAs(new PrivilegedExceptionAction<Void>() {
-      @Override
-      public Void run() throws Exception {
-        final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-        // The primary is the first component of the principal
-        final String primary = "accumulo";
-        clientConf.withSasl(true, primary);
-
-        assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-        final SaslConnectionParams saslParams = new SaslConnectionParams(clientConf, token);
-        assertEquals(primary, saslParams.getKerberosServerPrimary());
-
-        final QualityOfProtection defaultQop = QualityOfProtection
-            .get(Property.RPC_SASL_QOP.getDefaultValue());
-        assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
-        Map<String,String> properties = saslParams.getSaslProperties();
-        assertEquals(1, properties.size());
-        assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
-        assertEquals(username, saslParams.getPrincipal());
-        return null;
-      }
+    testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+      final SaslConnectionParams saslParams = createSaslParams(token);
+      assertEquals(primary, saslParams.getKerberosServerPrimary());
+
+      final QualityOfProtection defaultQop = QualityOfProtection
+          .get(Property.RPC_SASL_QOP.getDefaultValue());
+      assertEquals(defaultQop, saslParams.getQualityOfProtection());
+
+      Map<String,String> properties = saslParams.getSaslProperties();
+      assertEquals(1, properties.size());
+      assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+      assertEquals(username, saslParams.getPrincipal());
+      return null;
     });
   }
 
   @Test
   public void testDefaultParams() throws Exception {
     final KerberosToken token = EasyMock.createMock(KerberosToken.class);
-    testUser.doAs(new PrivilegedExceptionAction<Void>() {
-      @Override
-      public Void run() throws Exception {
-        final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-        // The primary is the first component of the principal
-        final String primary = "accumulo";
-        clientConf.withSasl(true, primary);
-
-        final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-        assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-        final SaslConnectionParams saslParams = new SaslConnectionParams(rpcConf, token);
-        assertEquals(primary, saslParams.getKerberosServerPrimary());
-
-        final QualityOfProtection defaultQop = QualityOfProtection
-            .get(Property.RPC_SASL_QOP.getDefaultValue());
-        assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
-        Map<String,String> properties = saslParams.getSaslProperties();
-        assertEquals(1, properties.size());
-        assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
-        assertEquals(username, saslParams.getPrincipal());
-        return null;
-      }
+    testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+      final SaslConnectionParams saslParams = createSaslParams(token);
+      assertEquals(primary, saslParams.getKerberosServerPrimary());
+
+      final QualityOfProtection defaultQop = QualityOfProtection
+          .get(Property.RPC_SASL_QOP.getDefaultValue());
+      assertEquals(defaultQop, saslParams.getQualityOfProtection());
+
+      Map<String,String> properties = saslParams.getSaslProperties();
+      assertEquals(1, properties.size());
+      assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+      assertEquals(username, saslParams.getPrincipal());
+      return null;
     });
   }
 
@@ -123,36 +107,24 @@ public Void run() throws Exception {
   public void testDelegationTokenImpl() throws Exception {
     final DelegationTokenImpl token = new DelegationTokenImpl(new byte[0],
         new AuthenticationTokenIdentifier("user", 1, 10L, 20L, "instanceid"));
-    testUser.doAs(new PrivilegedExceptionAction<Void>() {
-      @Override
-      public Void run() throws Exception {
-        final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-        // The primary is the first component of the principal
-        final String primary = "accumulo";
-        clientConf.withSasl(true, primary);
-
-        final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-        assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-        final SaslConnectionParams saslParams = new SaslConnectionParams(rpcConf, token);
-        assertEquals(primary, saslParams.getKerberosServerPrimary());
-
-        final QualityOfProtection defaultQop = QualityOfProtection
-            .get(Property.RPC_SASL_QOP.getDefaultValue());
-        assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
-        assertEquals(SaslMechanism.DIGEST_MD5, saslParams.getMechanism());
-        assertNotNull(saslParams.getCallbackHandler());
-        assertEquals(SaslClientDigestCallbackHandler.class,
-            saslParams.getCallbackHandler().getClass());
-
-        Map<String,String> properties = saslParams.getSaslProperties();
-        assertEquals(1, properties.size());
-        assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
-        assertEquals(username, saslParams.getPrincipal());
-        return null;
-      }
+    testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+      final SaslConnectionParams saslParams = createSaslParams(token);
+      assertEquals(primary, saslParams.getKerberosServerPrimary());
+
+      final QualityOfProtection defaultQop = QualityOfProtection
+          .get(Property.RPC_SASL_QOP.getDefaultValue());
+      assertEquals(defaultQop, saslParams.getQualityOfProtection());
+
+      assertEquals(SaslMechanism.DIGEST_MD5, saslParams.getMechanism());
+      assertNotNull(saslParams.getCallbackHandler());
+      assertEquals(SaslClientDigestCallbackHandler.class,
+          saslParams.getCallbackHandler().getClass());
+
+      Map<String,String> properties = saslParams.getSaslProperties();
+      assertEquals(1, properties.size());
+      assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+      assertEquals(username, saslParams.getPrincipal());
+      return null;
     });
   }
 
@@ -160,38 +132,10 @@ public Void run() throws Exception {
   public void testEquality() throws Exception {
     final KerberosToken token = EasyMock.createMock(KerberosToken.class);
     SaslConnectionParams params1 = testUser
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-            // The primary is the first component of the principal
-            final String primary = "accumulo";
-            clientConf.withSasl(true, primary);
-
-            final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-            assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-            return new SaslConnectionParams(rpcConf, token);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
 
     SaslConnectionParams params2 = testUser
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-            // The primary is the first component of the principal
-            final String primary = "accumulo";
-            clientConf.withSasl(true, primary);
-
-            final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-            assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-            return new SaslConnectionParams(rpcConf, token);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(token));
 
     assertEquals(params1, params2);
     assertEquals(params1.hashCode(), params2.hashCode());
@@ -199,21 +143,7 @@ public SaslConnectionParams run() throws Exception {
     final DelegationTokenImpl delToken1 = new DelegationTokenImpl(new byte[0],
         new AuthenticationTokenIdentifier("user", 1, 10L, 20L, "instanceid"));
     SaslConnectionParams params3 = testUser
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-            // The primary is the first component of the principal
-            final String primary = "accumulo";
-            clientConf.withSasl(true, primary);
-
-            final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-            assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-            return new SaslConnectionParams(rpcConf, delToken1);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(delToken1));
 
     assertNotEquals(params1, params3);
     assertNotEquals(params1.hashCode(), params3.hashCode());
@@ -223,21 +153,7 @@ public SaslConnectionParams run() throws Exception {
     final DelegationTokenImpl delToken2 = new DelegationTokenImpl(new byte[0],
         new AuthenticationTokenIdentifier("user", 1, 10L, 20L, "instanceid"));
     SaslConnectionParams params4 = testUser
-        .doAs(new PrivilegedExceptionAction<SaslConnectionParams>() {
-          @Override
-          public SaslConnectionParams run() throws Exception {
-            final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-
-            // The primary is the first component of the principal
-            final String primary = "accumulo";
-            clientConf.withSasl(true, primary);
-
-            final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-            assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
-
-            return new SaslConnectionParams(rpcConf, delToken2);
-          }
-        });
+        .doAs((PrivilegedExceptionAction<SaslConnectionParams>) () -> createSaslParams(delToken2));
 
     assertNotEquals(params1, params4);
     assertNotEquals(params1.hashCode(), params4.hashCode());
diff --git a/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java b/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
index ef84f79440..898ea0bd29 100644
--- a/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
+++ b/proxy/src/main/java/org/apache/accumulo/proxy/Proxy.java
@@ -26,7 +26,7 @@
 import org.apache.accumulo.core.cli.Help;
 import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.impl.ClientContext;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.conf.ConfigurationTypeHelper;
 import org.apache.accumulo.core.conf.Property;
@@ -255,7 +255,7 @@ public static ServerAddress createProxyServer(HostAndPort address,
     SaslServerConnectionParams saslParams = null;
     switch (serverType) {
       case SSL:
-        sslParams = SslConnectionParams.forClient(ClientContext.convertClientConfig(clientConf));
+        sslParams = SslConnectionParams.forClient(ClientConfConverter.toAccumuloConf(clientConf));
         break;
       case SASL:
         if (!clientConf.hasSasl()) {
@@ -291,7 +291,8 @@ public static ServerAddress createProxyServer(HostAndPort address,
         clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY, shortName);
 
         KerberosToken token = new KerberosToken();
-        saslParams = new SaslServerConnectionParams(clientConf, token, null);
+        saslParams = new SaslServerConnectionParams(ClientConfConverter.toProperties(clientConf),
+            token, null);
 
         processor = new UGIAssumingProcessor(processor);
 
diff --git a/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java b/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
index d49ce14ed3..d09c40a03f 100644
--- a/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
+++ b/server/base/src/main/java/org/apache/accumulo/server/rpc/SaslServerConnectionParams.java
@@ -16,7 +16,8 @@
  */
 package org.apache.accumulo.server.rpc;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
+import java.util.Properties;
+
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.KerberosToken;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
@@ -41,13 +42,13 @@ public SaslServerConnectionParams(AccumuloConfiguration conf, AuthenticationToke
     setSecretManager(secretManager);
   }
 
-  public SaslServerConnectionParams(ClientConfiguration conf, AuthenticationToken token) {
-    this(conf, token, null);
+  public SaslServerConnectionParams(Properties clientProps, AuthenticationToken token) {
+    this(clientProps, token, null);
   }
 
-  public SaslServerConnectionParams(ClientConfiguration conf, AuthenticationToken token,
+  public SaslServerConnectionParams(Properties clientProps, AuthenticationToken token,
       AuthenticationTokenSecretManager secretManager) {
-    super(conf, token);
+    super(clientProps, token);
     setSecretManager(secretManager);
   }
 
diff --git a/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java b/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
index 6d04cbfd37..5b0122c71d 100644
--- a/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
+++ b/server/base/src/main/java/org/apache/accumulo/server/util/RemoveEntriesForMissingFiles.java
@@ -30,7 +30,6 @@
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.impl.ClientContext;
@@ -214,7 +213,7 @@ public static void main(String[] args) throws Exception {
     opts.parseArgs(RemoveEntriesForMissingFiles.class.getName(), args, scanOpts, bwOpts);
 
     checkAllTables(new ClientContext(opts.getInstance(),
-        new Credentials(opts.getPrincipal(), opts.getToken()), ClientConfiguration.loadDefault()),
+        new Credentials(opts.getPrincipal(), opts.getToken()), opts.getClientProperties()),
         opts.fix);
   }
 }
diff --git a/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java b/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
index 94f214f137..d8df68b167 100644
--- a/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
+++ b/server/base/src/main/java/org/apache/accumulo/server/util/VerifyTabletAssignments.java
@@ -31,7 +31,6 @@
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.impl.ClientConfConverter;
 import org.apache.accumulo.core.client.impl.ClientContext;
 import org.apache.accumulo.core.client.impl.Credentials;
 import org.apache.accumulo.core.client.impl.Table;
@@ -76,8 +75,7 @@ public static void main(String[] args) throws Exception {
     opts.parseArgs(VerifyTabletAssignments.class.getName(), args);
 
     ClientContext context = new ClientContext(opts.getInstance(),
-        new Credentials(opts.getPrincipal(), opts.getToken()),
-        ClientConfConverter.toClientConf(opts.getClientProperties()));
+        new Credentials(opts.getPrincipal(), opts.getToken()), opts.getClientProperties());
     Connector conn = opts.getConnector();
     for (String table : conn.tableOperations().list())
       checkTable(context, opts, table, null);
diff --git a/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java b/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
index 39463411f6..b2855c611c 100644
--- a/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
+++ b/server/base/src/test/java/org/apache/accumulo/server/AccumuloServerContextTest.java
@@ -21,13 +21,13 @@
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.security.PrivilegedExceptionAction;
+import java.util.Properties;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.impl.ClientContext;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
 import org.apache.accumulo.core.client.impl.Credentials;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.conf.SiteConfiguration;
 import org.apache.accumulo.server.conf.ServerConfigurationFactory;
@@ -48,7 +48,7 @@
   private String username;
 
   @Before
-  public void setup() throws Exception {
+  public void setup() {
     System.setProperty("java.security.krb5.realm", "accumulo");
     System.setProperty("java.security.krb5.kdc", "fake");
     Configuration conf = new Configuration(false);
@@ -61,61 +61,58 @@ public void setup() throws Exception {
   @Test
   public void testSasl() throws Exception {
 
-    testUser.doAs(new PrivilegedExceptionAction<Void>() {
-      @Override
-      public Void run() throws Exception {
-
-        ClientConfiguration clientConf = ClientConfiguration.loadDefault();
-        clientConf.setProperty(ClientProperty.INSTANCE_RPC_SASL_ENABLED, "true");
-        clientConf.setProperty(ClientProperty.KERBEROS_SERVER_PRIMARY, "accumulo");
-        final AccumuloConfiguration conf = ClientContext.convertClientConfig(clientConf);
-        SiteConfiguration siteConfig = EasyMock.createMock(SiteConfiguration.class);
-
-        EasyMock.expect(siteConfig.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)).andReturn(true);
-
-        // Deal with SystemToken being private
-        PasswordToken pw = new PasswordToken("fake");
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        pw.write(new DataOutputStream(baos));
-        SystemToken token = new SystemToken();
-        token.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
-
-        ServerConfigurationFactory factory = EasyMock.createMock(ServerConfigurationFactory.class);
-        EasyMock.expect(factory.getSystemConfiguration()).andReturn(conf).anyTimes();
-        EasyMock.expect(factory.getSiteConfiguration()).andReturn(siteConfig).anyTimes();
-
-        AccumuloServerContext context = EasyMock.createMockBuilder(AccumuloServerContext.class)
-            .addMockedMethod("enforceKerberosLogin").addMockedMethod("getConfiguration")
-            .addMockedMethod("getServerConfigurationFactory").addMockedMethod("getCredentials")
-            .createMock();
-        context.enforceKerberosLogin();
-        EasyMock.expectLastCall().anyTimes();
-        EasyMock.expect(context.getConfiguration()).andReturn(conf).anyTimes();
-        EasyMock.expect(context.getServerConfigurationFactory()).andReturn(factory).anyTimes();
-        EasyMock.expect(context.getCredentials())
-            .andReturn(new Credentials("accumulo/hostname@FAKE.COM", token)).once();
-
-        // Just make the SiteConfiguration delegate to our ClientConfiguration (by way of the
-        // AccumuloConfiguration)
-        // Presently, we only need get(Property) and iterator().
-        EasyMock.expect(siteConfig.get(EasyMock.anyObject(Property.class))).andAnswer(() -> {
-          Object[] args = EasyMock.getCurrentArguments();
-          return conf.get((Property) args[0]);
-        }).anyTimes();
-
-        EasyMock.expect(siteConfig.iterator()).andAnswer(() -> conf.iterator()).anyTimes();
-
-        EasyMock.replay(factory, context, siteConfig);
-
-        Assert.assertEquals(ThriftServerType.SASL, context.getThriftServerType());
-        SaslServerConnectionParams saslParams = context.getSaslParams();
-        Assert.assertEquals(new SaslServerConnectionParams(conf, token), saslParams);
-        Assert.assertEquals(username, saslParams.getPrincipal());
-
-        EasyMock.verify(factory, context, siteConfig);
-
-        return null;
-      }
+    testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+
+      Properties clientProps = new Properties();
+      clientProps.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+      clientProps.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(), "accumulo");
+      final AccumuloConfiguration conf = ClientConfConverter.toAccumuloConf(clientProps);
+      SiteConfiguration siteConfig = EasyMock.createMock(SiteConfiguration.class);
+
+      EasyMock.expect(siteConfig.getBoolean(Property.INSTANCE_RPC_SASL_ENABLED)).andReturn(true);
+
+      // Deal with SystemToken being private
+      PasswordToken pw = new PasswordToken("fake");
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      pw.write(new DataOutputStream(baos));
+      SystemToken token = new SystemToken();
+      token.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
+
+      ServerConfigurationFactory factory = EasyMock.createMock(ServerConfigurationFactory.class);
+      EasyMock.expect(factory.getSystemConfiguration()).andReturn(conf).anyTimes();
+      EasyMock.expect(factory.getSiteConfiguration()).andReturn(siteConfig).anyTimes();
+
+      AccumuloServerContext context = EasyMock.createMockBuilder(AccumuloServerContext.class)
+          .addMockedMethod("enforceKerberosLogin").addMockedMethod("getConfiguration")
+          .addMockedMethod("getServerConfigurationFactory").addMockedMethod("getCredentials")
+          .createMock();
+      context.enforceKerberosLogin();
+      EasyMock.expectLastCall().anyTimes();
+      EasyMock.expect(context.getConfiguration()).andReturn(conf).anyTimes();
+      EasyMock.expect(context.getServerConfigurationFactory()).andReturn(factory).anyTimes();
+      EasyMock.expect(context.getCredentials())
+          .andReturn(new Credentials("accumulo/hostname@FAKE.COM", token)).once();
+
+      // Just make the SiteConfiguration delegate to our ClientConfiguration (by way of the
+      // AccumuloConfiguration)
+      // Presently, we only need get(Property) and iterator().
+      EasyMock.expect(siteConfig.get(EasyMock.anyObject(Property.class))).andAnswer(() -> {
+        Object[] args = EasyMock.getCurrentArguments();
+        return conf.get((Property) args[0]);
+      }).anyTimes();
+
+      EasyMock.expect(siteConfig.iterator()).andAnswer(conf::iterator).anyTimes();
+
+      EasyMock.replay(factory, context, siteConfig);
+
+      Assert.assertEquals(ThriftServerType.SASL, context.getThriftServerType());
+      SaslServerConnectionParams saslParams = context.getSaslParams();
+      Assert.assertEquals(new SaslServerConnectionParams(conf, token), saslParams);
+      Assert.assertEquals(username, saslParams.getPrincipal());
+
+      EasyMock.verify(factory, context, siteConfig);
+
+      return null;
     });
   }
 
diff --git a/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java b/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
index 3535473784..c19f60af2d 100644
--- a/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
+++ b/server/base/src/test/java/org/apache/accumulo/server/rpc/SaslServerConnectionParamsTest.java
@@ -25,14 +25,14 @@
 import java.io.DataOutputStream;
 import java.security.PrivilegedExceptionAction;
 import java.util.Map;
+import java.util.Properties;
 
 import javax.security.sasl.Sasl;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
-import org.apache.accumulo.core.client.impl.ClientContext;
+import org.apache.accumulo.core.client.impl.ClientConfConverter;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.rpc.SaslConnectionParams;
 import org.apache.accumulo.core.rpc.SaslConnectionParams.QualityOfProtection;
@@ -50,7 +50,7 @@
   private String username;
 
   @Before
-  public void setup() throws Exception {
+  public void setup() {
     System.setProperty("java.security.krb5.realm", "accumulo");
     System.setProperty("java.security.krb5.kdc", "fake");
     Configuration conf = new Configuration(false);
@@ -62,40 +62,36 @@ public void setup() throws Exception {
 
   @Test
   public void testDefaultParamsAsServer() throws Exception {
-    testUser.doAs(new PrivilegedExceptionAction<Void>() {
-      @Override
-      public Void run() throws Exception {
-        final ClientConfiguration clientConf = ClientConfiguration.loadDefault();
+    testUser.doAs((PrivilegedExceptionAction<Void>) () -> {
+      Properties clientProps = new Properties();
+      clientProps.setProperty(ClientProperty.SASL_ENABLED.getKey(), "true");
+      final String primary = "accumulo";
+      clientProps.setProperty(ClientProperty.SASL_KERBEROS_SERVER_PRIMARY.getKey(), primary);
 
-        // The primary is the first component of the principal
-        final String primary = "accumulo";
-        clientConf.withSasl(true, primary);
+      final AccumuloConfiguration rpcConf = ClientConfConverter.toAccumuloConf(clientProps);
+      assertEquals("true", rpcConf.get(Property.INSTANCE_RPC_SASL_ENABLED));
 
-        final AccumuloConfiguration rpcConf = ClientContext.convertClientConfig(clientConf);
-        assertEquals("true", clientConf.get(ClientProperty.INSTANCE_RPC_SASL_ENABLED));
+      // Deal with SystemToken being private
+      PasswordToken pw = new PasswordToken("fake");
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      pw.write(new DataOutputStream(baos));
+      SystemToken token = new SystemToken();
+      token.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
 
-        // Deal with SystemToken being private
-        PasswordToken pw = new PasswordToken("fake");
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        pw.write(new DataOutputStream(baos));
-        SystemToken token = new SystemToken();
-        token.readFields(new DataInputStream(new ByteArrayInputStream(baos.toByteArray())));
+      final SaslConnectionParams saslParams = new SaslServerConnectionParams(rpcConf, token);
+      assertEquals(primary, saslParams.getKerberosServerPrimary());
+      assertEquals(SaslMechanism.GSSAPI, saslParams.getMechanism());
+      assertNull(saslParams.getCallbackHandler());
 
-        final SaslConnectionParams saslParams = new SaslServerConnectionParams(rpcConf, token);
-        assertEquals(primary, saslParams.getKerberosServerPrimary());
-        assertEquals(SaslMechanism.GSSAPI, saslParams.getMechanism());
-        assertNull(saslParams.getCallbackHandler());
+      final QualityOfProtection defaultQop = QualityOfProtection
+          .get(Property.RPC_SASL_QOP.getDefaultValue());
+      assertEquals(defaultQop, saslParams.getQualityOfProtection());
 
-        final QualityOfProtection defaultQop = QualityOfProtection
-            .get(Property.RPC_SASL_QOP.getDefaultValue());
-        assertEquals(defaultQop, saslParams.getQualityOfProtection());
-
-        Map<String,String> properties = saslParams.getSaslProperties();
-        assertEquals(1, properties.size());
-        assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
-        assertEquals(username, saslParams.getPrincipal());
-        return null;
-      }
+      Map<String,String> properties = saslParams.getSaslProperties();
+      assertEquals(1, properties.size());
+      assertEquals(defaultQop.getQuality(), properties.get(Sasl.QOP));
+      assertEquals(username, saslParams.getPrincipal());
+      return null;
     });
   }
 
diff --git a/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java b/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
index 1277e0c9f4..1aff86433f 100644
--- a/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
+++ b/server/tserver/src/test/java/org/apache/accumulo/tserver/replication/ReplicationProcessorTest.java
@@ -20,8 +20,8 @@
 
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Properties;
 
-import org.apache.accumulo.core.client.ClientConfiguration;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.impl.ClientContext;
 import org.apache.accumulo.core.client.impl.Credentials;
@@ -47,7 +47,7 @@ public void peerTypeExtractionFromConfiguration() {
     Instance inst = EasyMock.createMock(Instance.class);
     VolumeManager fs = EasyMock.createMock(VolumeManager.class);
     Credentials creds = new Credentials("foo", new PasswordToken("bar"));
-    ClientContext context = new ClientContext(inst, creds, ClientConfiguration.create());
+    ClientContext context = new ClientContext(inst, creds, new Properties());
 
     Map<String,String> data = new HashMap<>();
 
@@ -66,7 +66,7 @@ public void noPeerConfigurationThrowsAnException() {
     Instance inst = EasyMock.createMock(Instance.class);
     VolumeManager fs = EasyMock.createMock(VolumeManager.class);
     Credentials creds = new Credentials("foo", new PasswordToken("bar"));
-    ClientContext context = new ClientContext(inst, creds, ClientConfiguration.create());
+    ClientContext context = new ClientContext(inst, creds, new Properties());
 
     Map<String,String> data = new HashMap<>();
     ConfigurationCopy conf = new ConfigurationCopy(data);
diff --git a/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java b/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
index d9d6bf6ca6..1e0c2a71d2 100644
--- a/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
+++ b/test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloInputFormatIT.java
@@ -35,8 +35,6 @@
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientConfiguration;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.admin.NewTableConfiguration;
@@ -46,10 +44,6 @@
 import org.apache.accumulo.core.client.sample.RowSampler;
 import org.apache.accumulo.core.client.sample.SamplerConfiguration;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.ConfigurationCopy;
-import org.apache.accumulo.core.conf.DefaultConfiguration;
-import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Range;
@@ -107,32 +101,9 @@ public void testGetSplits() throws Exception {
     conn.tableOperations().create(table);
     insertData(table, currentTimeMillis());
 
-    ClientConfiguration clientConf = cluster.getClientConfig();
-    AccumuloConfiguration clusterClientConf = new ConfigurationCopy(
-        DefaultConfiguration.getInstance());
-
-    // Pass SSL and CredentialProvider options into the ClientConfiguration given to
-    // AccumuloInputFormat
-    boolean sslEnabled = Boolean.valueOf(clusterClientConf.get(Property.INSTANCE_RPC_SSL_ENABLED));
-    if (sslEnabled) {
-      ClientProperty[] sslProperties = new ClientProperty[] {
-          ClientProperty.INSTANCE_RPC_SSL_ENABLED, ClientProperty.INSTANCE_RPC_SSL_CLIENT_AUTH,
-          ClientProperty.RPC_SSL_KEYSTORE_PATH, ClientProperty.RPC_SSL_KEYSTORE_TYPE,
-          ClientProperty.RPC_SSL_KEYSTORE_PASSWORD, ClientProperty.RPC_SSL_TRUSTSTORE_PATH,
-          ClientProperty.RPC_SSL_TRUSTSTORE_TYPE, ClientProperty.RPC_SSL_TRUSTSTORE_PASSWORD,
-          ClientProperty.RPC_USE_JSSE, ClientProperty.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS};
-
-      for (ClientProperty prop : sslProperties) {
-        // The default property is returned if it's not in the ClientConfiguration so we don't have
-        // to check if the value is actually defined
-        clientConf.setProperty(prop, clusterClientConf.get(prop.getKey()));
-      }
-    }
-
     Job job = Job.getInstance();
     AccumuloInputFormat.setInputTableName(job, table);
-    AccumuloInputFormat.setZooKeeperInstance(job, clientConf);
-    AccumuloInputFormat.setConnectorInfo(job, getAdminPrincipal(), getAdminToken());
+    AccumuloInputFormat.setConnectionInfo(job, getConnectionInfo());
 
     // split table
     TreeSet<Text> splitsToAdd = new TreeSet<>();


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message