accumulo-notifications mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] milleruntime closed pull request #796: Replace ClientInfo with properties in MR
Date Wed, 05 Dec 2018 17:21:53 GMT
milleruntime closed pull request #796: Replace ClientInfo with properties in MR
URL: https://github.com/apache/accumulo/pull/796
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/hadoop-mapreduce/pom.xml b/hadoop-mapreduce/pom.xml
index 8e2b9fc062..c5f430ea69 100644
--- a/hadoop-mapreduce/pom.xml
+++ b/hadoop-mapreduce/pom.xml
@@ -66,11 +66,6 @@
       <artifactId>accumulo-test</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-log4j12</artifactId>
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormat.java
index 3c391a858d..168765c3ee 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormat.java
@@ -41,7 +41,7 @@
  * job using the {@link #configure()} method, which provides a fluent API. For Example:
  *
  * <pre>
- * AccumuloInputFormat.configure().clientInfo(info).table(name).auths(auths) // required
+ * AccumuloInputFormat.configure().clientProperties(props).table(name).auths(auths) // required
  *     .addIterator(iter1).ranges(ranges).fetchColumns(columns).executionHints(hints)
  *     .samplerConfiguration(sampleConf).disableAutoAdjustRanges() // enabled by default
  *     .scanIsolation() // not available with batchScan()
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
index e6d4204fe8..16f7395ceb 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormat.java
@@ -45,7 +45,7 @@
  * Here is an example with all possible options:
  *
  * <pre>
- * AccumuloOutputFormat.configure().clientInfo(clientInfo).batchWriterOptions(bwConfig)
+ * AccumuloOutputFormat.configure().clientProperties(props).batchWriterOptions(bwConfig)
  *     .defaultTable(name).createTables() // disabled by default
  *     .simulationMode() // disabled by default
  *     .store(job);
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloRowInputFormat.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloRowInputFormat.java
index 899eb28292..2a47434cd0 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloRowInputFormat.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/AccumuloRowInputFormat.java
@@ -43,7 +43,7 @@
  * For Example:
  *
  * <pre>
- * AccumuloRowInputFormat.configure().clientInfo(info).table(name).auths(auths) // required
+ * AccumuloRowInputFormat.configure().clientProperties(props).table(name).auths(auths) //
required
  *     .addIterator(iter1).ranges(ranges).fetchColumns(columns).executionHints(hints)
  *     .samplerConfiguration(sampleConf).disableAutoAdjustRanges() // enabled by default
  *     .scanIsolation() // not available with batchScan()
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
index a1e6fd907b..383bc9219c 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/InputFormatBuilder.java
@@ -20,9 +20,9 @@
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.core.client.ClientSideIteratorScanner;
 import org.apache.accumulo.core.client.IsolatedScanner;
 import org.apache.accumulo.core.client.IteratorSetting;
@@ -46,13 +46,13 @@
    */
   interface ClientParams<T> {
     /**
-     * Set the connection information needed to communicate with Accumulo in this job. ClientInfo
-     * param can be created using {@link ClientInfo#from(Properties)}
+     * Set the connection information needed to communicate with Accumulo in this job.
+     * clientProperties param can be created using {@link Accumulo#newClientProperties()}
      *
-     * @param clientInfo
+     * @param clientProperties
      *          Accumulo connection information
      */
-    TableParams<T> clientInfo(ClientInfo clientInfo);
+    TableParams<T> clientProperties(Properties clientProperties);
   }
 
   /**
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
index e12d803ef9..44a5cc0f62 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoop/mapreduce/OutputFormatBuilder.java
@@ -18,7 +18,7 @@
 
 import java.util.Properties;
 
-import org.apache.accumulo.core.client.ClientInfo;
+import org.apache.accumulo.core.client.Accumulo;
 
 /**
  * Builder for all the information needed for the Map Reduce job. Fluent API used by
@@ -35,13 +35,13 @@
    */
   interface ClientParams<T> {
     /**
-     * Set the connection information needed to communicate with Accumulo in this job. ClientInfo
-     * param can be created using {@link ClientInfo#from(Properties)}
+     * Set the connection information needed to communicate with Accumulo in this job.
+     * clientProperties param can be created using {@link Accumulo#newClientProperties()}
      *
-     * @param clientInfo
+     * @param clientProperties
      *          Accumulo connection information
      */
-    OutputOptions<T> clientInfo(ClientInfo clientInfo);
+    OutputOptions<T> clientProperties(Properties clientProperties);
   }
 
   /**
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
index 6350c9e1f7..71101ff5ab 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/InputFormatBuilderImpl.java
@@ -22,6 +22,7 @@
 import java.util.Map;
 import java.util.Objects;
 import java.util.Optional;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
@@ -63,8 +64,9 @@ public InputFormatBuilderImpl(Class<?> callingClass) {
   }
 
   @Override
-  public InputFormatBuilder.TableParams<T> clientInfo(ClientInfo clientInfo) {
-    this.clientInfo = Objects.requireNonNull(clientInfo, "ClientInfo must not be null");
+  public InputFormatBuilder.TableParams<T> clientProperties(Properties clientProperties)
{
+    this.clientInfo = ClientInfo
+        .from(Objects.requireNonNull(clientProperties, "clientProperties must not be null"));
     return this;
   }
 
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
index 0e9a1ca422..a0e760e95b 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/OutputFormatBuilderImpl.java
@@ -23,6 +23,7 @@
 
 import java.util.Objects;
 import java.util.Optional;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.hadoop.mapreduce.OutputFormatBuilder;
@@ -39,8 +40,8 @@
   boolean simulationMode = false;
 
   @Override
-  public OutputFormatBuilder.OutputOptions<T> clientInfo(ClientInfo clientInfo) {
-    this.clientInfo = Objects.requireNonNull(clientInfo, "ClientInfo must not be null");
+  public OutputFormatBuilder.OutputOptions<T> clientProperties(Properties clientProperties)
{
+    this.clientInfo = ClientInfo.from(Objects.requireNonNull(clientProperties, "ClientInfo
must not be null"));
     return this;
   }
 
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnDefaultTable.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnDefaultTable.java
index adfa62c629..84bc0bb972 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnDefaultTable.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnDefaultTable.java
@@ -44,9 +44,10 @@ public void setAccumuloConfigs(Job job)
       throws IOException, AccumuloException, AccumuloSecurityException {
     final String tableName = getTableName();
     final ClientInfo info = getClientInfo();
-    AccumuloInputFormat.configure().clientInfo(info).table(tableName).auths(auths).store(job);
-    AccumuloOutputFormat.configure().clientInfo(info).defaultTable(tableName).createTables()
-        .store(job);
+    AccumuloInputFormat.configure().clientProperties(info.getProperties()).table(tableName)
+        .auths(auths).store(job);
+    AccumuloOutputFormat.configure().clientProperties(info.getProperties()).defaultTable(tableName)
+        .createTables().store(job);
   }
 
 }
diff --git a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnRequiredTable.java
b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnRequiredTable.java
index aed3382d70..1a63d59af0 100644
--- a/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnRequiredTable.java
+++ b/hadoop-mapreduce/src/main/java/org/apache/accumulo/hadoopImpl/mapreduce/lib/MapReduceClientOnRequiredTable.java
@@ -37,9 +37,10 @@ public void setAccumuloConfigs(Job job)
       throws IOException, AccumuloException, AccumuloSecurityException {
     final String tableName = getTableName();
     final ClientInfo info = getClientInfo();
-    AccumuloInputFormat.configure().clientInfo(info).table(tableName).auths(auths).store(job);
-    AccumuloOutputFormat.configure().clientInfo(info).defaultTable(tableName).createTables()
-        .store(job);
+    AccumuloInputFormat.configure().clientProperties(info.getProperties()).table(tableName)
+        .auths(auths).store(job);
+    AccumuloOutputFormat.configure().clientProperties(info.getProperties()).defaultTable(tableName)
+        .createTables().store(job);
   }
 
   public String getTableName() {
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloFileOutputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloFileOutputFormatIT.java
index 41ae3683dd..bc21ad4c7e 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloFileOutputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloFileOutputFormatIT.java
@@ -155,7 +155,7 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormat(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+      AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
           .auths(Authorizations.EMPTY).store(job);
       AccumuloFileOutputFormat.configure().outputPath(new Path(args[1])).sampler(SAMPLER_CONFIG)
           .store(job);
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloInputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloInputFormatIT.java
index 083865f632..6f7ca84ca7 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloInputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloInputFormatIT.java
@@ -125,7 +125,8 @@ public int run(String[] args) throws Exception {
       job.setInputFormat(AccumuloInputFormat.class);
 
       InputFormatBuilder.InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure()
-          .clientInfo(getClientInfo()).table(table).auths(Authorizations.EMPTY);
+          .clientProperties(getClientInfo().getProperties()).table(table)
+          .auths(Authorizations.EMPTY);
       if (batchScan)
         opts.batchScan();
       if (sample) {
@@ -220,8 +221,8 @@ public void testCorrectRangeInputSplits() throws Exception {
     try (AccumuloClient accumuloClient = getAccumuloClient()) {
       accumuloClient.tableOperations().create(table);
 
-      AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table).auths(auths)
-          .fetchColumns(fetchColumns).scanIsolation().localIterators().store(job);
+      AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
+          .auths(auths).fetchColumns(fetchColumns).scanIsolation().localIterators().store(job);
 
       AccumuloInputFormat aif = new AccumuloInputFormat();
 
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
index 98eadd47f2..87438e6868 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloOutputFormatIT.java
@@ -27,13 +27,13 @@
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.Map.Entry;
+import java.util.Properties;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.core.client.MutationsRejectedException;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.conf.Property;
@@ -83,7 +83,8 @@ public void testMapred() throws Exception {
       // set the max memory so that we ensure we don't flush on the write.
       batchConfig.setMaxMemory(Long.MAX_VALUE);
       AccumuloOutputFormat outputFormat = new AccumuloOutputFormat();
-      AccumuloOutputFormat.configure().clientInfo(getClientInfo(batchConfig)).store(job);
+      AccumuloOutputFormat.configure().clientProperties(getClientInfo(batchConfig).getProperties())
+          .store(job);
       RecordWriter<Text,Mutation> writer = outputFormat.getRecordWriter(null, job,
"Test", null);
 
       try {
@@ -168,10 +169,10 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormat(AccumuloInputFormat.class);
 
-      ClientInfo info = ClientInfo
-          .from(Accumulo.newClientProperties().to(instanceName, zooKeepers).as(user, pass).build());
+      Properties cp = Accumulo.newClientProperties().to(instanceName, zooKeepers).as(user,
pass)
+          .build();
 
-      AccumuloInputFormat.configure().clientInfo(info).table(table1).auths(Authorizations.EMPTY)
+      AccumuloInputFormat.configure().clientProperties(cp).table(table1).auths(Authorizations.EMPTY)
           .store(job);
 
       job.setMapperClass(TestMapper.class);
@@ -181,7 +182,7 @@ public int run(String[] args) throws Exception {
       job.setOutputKeyClass(Text.class);
       job.setOutputValueClass(Mutation.class);
 
-      AccumuloOutputFormat.configure().clientInfo(info).defaultTable(table2).store(job);
+      AccumuloOutputFormat.configure().clientProperties(cp).defaultTable(table2).store(job);
 
       job.setNumReduceTasks(0);
 
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloRowInputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloRowInputFormatIT.java
index c3854377c0..c5676e70c0 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloRowInputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/AccumuloRowInputFormatIT.java
@@ -165,8 +165,8 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormat(AccumuloRowInputFormat.class);
 
-      AccumuloRowInputFormat.configure().clientInfo(getClientInfo()).table(table)
-          .auths(Authorizations.EMPTY).store(job);
+      AccumuloRowInputFormat.configure().clientProperties(getClientInfo().getProperties())
+          .table(table).auths(Authorizations.EMPTY).store(job);
 
       job.setMapperClass(TestMapper.class);
       job.setMapOutputKeyClass(Key.class);
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/TokenFileIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/TokenFileIT.java
index dbf42d059a..ba153a5c06 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/TokenFileIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapred/TokenFileIT.java
@@ -27,11 +27,12 @@
 import java.nio.file.Paths;
 import java.util.Iterator;
 import java.util.Map.Entry;
+import java.util.Properties;
 
+import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
@@ -104,7 +105,7 @@ public int run(String[] args) throws Exception {
       }
 
       String tokenFile = args[0];
-      ClientInfo ci = ClientInfo.from(Paths.get(tokenFile));
+      Properties cp = Accumulo.newClientProperties().from(Paths.get(tokenFile)).build();
       String table1 = args[1];
       String table2 = args[2];
 
@@ -113,7 +114,7 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormat(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.configure().clientInfo(ci).table(table1).auths(Authorizations.EMPTY)
+      AccumuloInputFormat.configure().clientProperties(cp).table(table1).auths(Authorizations.EMPTY)
           .store(job);
 
       job.setMapperClass(TestMapper.class);
@@ -123,7 +124,7 @@ public int run(String[] args) throws Exception {
       job.setOutputKeyClass(Text.class);
       job.setOutputValueClass(Mutation.class);
 
-      AccumuloOutputFormat.configure().clientInfo(ci).defaultTable(table2).store(job);
+      AccumuloOutputFormat.configure().clientProperties(cp).defaultTable(table2).store(job);
 
       job.setNumReduceTasks(0);
 
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloFileOutputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloFileOutputFormatIT.java
index 2c8c8ff550..4797454084 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloFileOutputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloFileOutputFormatIT.java
@@ -166,7 +166,7 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormatClass(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+      AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
           .auths(Authorizations.EMPTY).store(job);
       AccumuloFileOutputFormat.configure().outputPath(new Path(args[1])).sampler(SAMPLER_CONFIG)
           .store(job);
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloInputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloInputFormatIT.java
index a80672677a..a1b4b6f3df 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloInputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloInputFormatIT.java
@@ -106,7 +106,7 @@ public void testGetSplits() throws Exception {
     insertData(table, currentTimeMillis());
 
     Job job = Job.getInstance();
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).scanIsolation().store(job);
 
     // split table
@@ -126,13 +126,13 @@ public void testGetSplits() throws Exception {
     List<Range> ranges = new ArrayList<>();
     for (Text text : actualSplits)
       ranges.add(new Range(text));
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).ranges(ranges).store(job);
     splits = inputFormat.getSplits(job);
     assertEquals(actualSplits.size(), splits.size());
 
     // offline mode
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).offlineScan().store(job);
     try {
       inputFormat.getSplits(job);
@@ -148,18 +148,18 @@ public void testGetSplits() throws Exception {
     for (int i = 0; i < 5; i++)
       // overlapping ranges
       ranges.add(new Range(String.format("%09d", i), String.format("%09d", i + 2)));
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).ranges(ranges).offlineScan().store(job);
     splits = inputFormat.getSplits(job);
     assertEquals(2, splits.size());
 
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).disableAutoAdjustRanges().offlineScan().store(job);
     splits = inputFormat.getSplits(job);
     assertEquals(ranges.size(), splits.size());
 
     // BatchScan not available for offline scans
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).batchScan().store(job);
     try {
       inputFormat.getSplits(job);
@@ -168,27 +168,27 @@ public void testGetSplits() throws Exception {
 
     // table online tests
     client.tableOperations().online(table, true);
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).store(job);
     // test for resumption of success
     splits = inputFormat.getSplits(job);
     assertEquals(2, splits.size());
 
     // BatchScan not available with isolated iterators
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).scanIsolation().store(job);
 
     splits = inputFormat.getSplits(job);
     assertEquals(2, splits.size());
 
     // BatchScan not available with local iterators
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).localIterators().store(job);
 
     splits = inputFormat.getSplits(job);
     assertEquals(2, splits.size());
 
-    AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table)
+    AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties()).table(table)
         .auths(Authorizations.EMPTY).batchScan().store(job);
 
     // Check we are getting back correct type pf split
@@ -285,8 +285,9 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormatClass(inputFormatClass);
 
-      InputFormatOptions<Job> opts = AccumuloInputFormat.configure().clientInfo(getClientInfo())
-          .table(table).auths(Authorizations.EMPTY);
+      InputFormatOptions<Job> opts = AccumuloInputFormat.configure()
+          .clientProperties(getClientInfo().getProperties()).table(table)
+          .auths(Authorizations.EMPTY);
       if (sample)
         opts = opts.samplerConfiguration(SAMPLER_CONFIG);
       if (batchScan)
@@ -406,8 +407,8 @@ public void testCorrectRangeInputSplits() throws Exception {
     AccumuloClient accumuloClient = getAccumuloClient();
     accumuloClient.tableOperations().create(table);
 
-    InputFormatOptions<Job> opts = AccumuloInputFormat.configure().clientInfo(getClientInfo())
-        .table(table).auths(auths);
+    InputFormatOptions<Job> opts = AccumuloInputFormat.configure()
+        .clientProperties(getClientInfo().getProperties()).table(table).auths(auths);
     opts.fetchColumns(fetchColumns).scanIsolation().localIterators().store(job);
 
     AccumuloInputFormat aif = new AccumuloInputFormat();
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloOutputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloOutputFormatIT.java
index badebe9fbc..288fe70f00 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloOutputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloOutputFormatIT.java
@@ -93,8 +93,8 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormatClass(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.configure().clientInfo(getClientInfo()).table(table1)
-          .auths(Authorizations.EMPTY).store(job);
+      AccumuloInputFormat.configure().clientProperties(getClientInfo().getProperties())
+          .table(table1).auths(Authorizations.EMPTY).store(job);
 
       job.setMapperClass(TestMapper.class);
       job.setMapOutputKeyClass(Key.class);
@@ -103,7 +103,8 @@ public int run(String[] args) throws Exception {
       job.setOutputKeyClass(Text.class);
       job.setOutputValueClass(Mutation.class);
 
-      AccumuloOutputFormat.configure().clientInfo(getClientInfo()).defaultTable(table2).store(job);
+      AccumuloOutputFormat.configure().clientProperties(getClientInfo().getProperties())
+          .defaultTable(table2).store(job);
 
       job.setNumReduceTasks(0);
 
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloRowInputFormatIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloRowInputFormatIT.java
index 6a836cf809..2d07dbd30d 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloRowInputFormatIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/AccumuloRowInputFormatIT.java
@@ -159,8 +159,8 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormatClass(AccumuloRowInputFormat.class);
 
-      AccumuloRowInputFormat.configure().clientInfo(getClientInfo()).table(table)
-          .auths(Authorizations.EMPTY).store(job);
+      AccumuloRowInputFormat.configure().clientProperties(getClientInfo().getProperties())
+          .table(table).auths(Authorizations.EMPTY).store(job);
 
       job.setMapperClass(TestMapper.class);
       job.setMapOutputKeyClass(Key.class);
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/RowHashIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/RowHashIT.java
index 801bb812f5..0b40cdf6e1 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/RowHashIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/RowHashIT.java
@@ -151,8 +151,8 @@ public int run(String[] args) throws Exception {
       Text cf = new Text(idx < 0 ? col : col.substring(0, idx));
       Text cq = idx < 0 ? null : new Text(col.substring(idx + 1));
       if (cf.getLength() > 0)
-        AccumuloInputFormat.configure().clientInfo(opts.getClientInfo()).table(opts.getTableName())
-            .auths(Authorizations.EMPTY)
+        AccumuloInputFormat.configure().clientProperties(opts.getClientInfo().getProperties())
+            .table(opts.getTableName()).auths(Authorizations.EMPTY)
             .fetchColumns(Collections.singleton(new IteratorSetting.Column(cf, cq))).store(job);
 
       job.setMapperClass(RowHash.HashDataMapper.class);
@@ -162,7 +162,8 @@ public int run(String[] args) throws Exception {
       job.setNumReduceTasks(0);
 
       job.setOutputFormatClass(AccumuloOutputFormat.class);
-      AccumuloOutputFormat.configure().clientInfo(opts.getClientInfo()).store(job);
+      AccumuloOutputFormat.configure().clientProperties(opts.getClientInfo().getProperties())
+          .store(job);
 
       job.waitForCompletion(true);
       return job.isSuccessful() ? 0 : 1;
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/TokenFileIT.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/TokenFileIT.java
index 6837c79ccc..dfb010324e 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/TokenFileIT.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/its/mapreduce/TokenFileIT.java
@@ -27,11 +27,12 @@
 import java.nio.file.Paths;
 import java.util.Iterator;
 import java.util.Map.Entry;
+import java.util.Properties;
 
+import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.BatchWriter;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
@@ -94,7 +95,7 @@ public int run(String[] args) throws Exception {
       }
 
       String tokenFile = args[0];
-      ClientInfo ci = ClientInfo.from(Paths.get(tokenFile));
+      Properties cp = Accumulo.newClientProperties().from(Paths.get(tokenFile)).build();
       String table1 = args[1];
       String table2 = args[2];
 
@@ -104,7 +105,7 @@ public int run(String[] args) throws Exception {
 
       job.setInputFormatClass(AccumuloInputFormat.class);
 
-      AccumuloInputFormat.configure().clientInfo(ci).table(table1).auths(Authorizations.EMPTY)
+      AccumuloInputFormat.configure().clientProperties(cp).table(table1).auths(Authorizations.EMPTY)
           .store(job);
 
       job.setMapperClass(TestMapper.class);
@@ -114,7 +115,7 @@ public int run(String[] args) throws Exception {
       job.setOutputKeyClass(Text.class);
       job.setOutputValueClass(Mutation.class);
 
-      AccumuloOutputFormat.configure().clientInfo(ci).defaultTable(table2).store(job);
+      AccumuloOutputFormat.configure().clientProperties(cp).defaultTable(table2).store(job);
 
       job.setNumReduceTasks(0);
 
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloInputFormatTest.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloInputFormatTest.java
index 9f5b4fbf87..2401477166 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloInputFormatTest.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloInputFormatTest.java
@@ -16,9 +16,6 @@
  */
 package org.apache.accumulo.hadoop.mapred;
 
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertEquals;
 
 import java.io.ByteArrayOutputStream;
@@ -29,9 +26,7 @@
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.iterators.system.CountingIterator;
 import org.apache.accumulo.core.iterators.user.RegExFilter;
 import org.apache.accumulo.core.iterators.user.VersioningIterator;
@@ -50,6 +45,7 @@
 public class AccumuloInputFormatTest {
 
   private JobConf job;
+  private static Properties clientProperties;
 
   @Rule
   public TestName test = new TestName();
@@ -59,16 +55,10 @@ public void createJob() {
     job = new JobConf();
   }
 
-  static ClientInfo clientInfo;
-
   @BeforeClass
   public static void setupClientInfo() {
-    clientInfo = createMock(ClientInfo.class);
-    AuthenticationToken token = createMock(AuthenticationToken.class);
-    Properties props = createMock(Properties.class);
-    expect(clientInfo.getAuthenticationToken()).andReturn(token).anyTimes();
-    expect(clientInfo.getProperties()).andReturn(props).anyTimes();
-    replay(clientInfo);
+    clientProperties = org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormatTest
+        .setupClientProperties();
   }
 
   /**
@@ -76,8 +66,8 @@ public static void setupClientInfo() {
    */
   @Test
   public void testSetIterator() throws Exception {
-    InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure().clientInfo(clientInfo)
-        .table("test").auths(Authorizations.EMPTY);
+    InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure()
+        .clientProperties(clientProperties).table("test").auths(Authorizations.EMPTY);
 
     IteratorSetting is = new IteratorSetting(1, "WholeRow", WholeRowIterator.class);
     opts.addIterator(is).store(job);
@@ -89,8 +79,8 @@ public void testSetIterator() throws Exception {
 
   @Test
   public void testAddIterator() throws Exception {
-    InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure().clientInfo(clientInfo)
-        .table("test").auths(Authorizations.EMPTY);
+    InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure()
+        .clientProperties(clientProperties).table("test").auths(Authorizations.EMPTY);
 
     IteratorSetting iter1 = new IteratorSetting(1, "WholeRow", WholeRowIterator.class);
     IteratorSetting iter2 = new IteratorSetting(2, "Versions", VersioningIterator.class);
@@ -141,8 +131,8 @@ public void testIteratorOptionEncoding() throws Throwable {
     IteratorSetting iter1 = new IteratorSetting(1, "iter1", WholeRowIterator.class);
     iter1.addOption(key, value);
     // also test if reusing options will create duplicate iterators
-    InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure().clientInfo(clientInfo)
-        .table("test").auths(Authorizations.EMPTY);
+    InputFormatOptions<JobConf> opts = AccumuloInputFormat.configure()
+        .clientProperties(clientProperties).table("test").auths(Authorizations.EMPTY);
     opts.addIterator(iter1).store(job);
 
     List<IteratorSetting> list = InputConfigurator.getIterators(AccumuloInputFormat.class,
job);
@@ -171,8 +161,9 @@ public void testGetIteratorSettings() throws Exception {
     IteratorSetting iter1 = new IteratorSetting(1, "WholeRow", WholeRowIterator.class.getName());
     IteratorSetting iter2 = new IteratorSetting(2, "Versions", VersioningIterator.class.getName());
     IteratorSetting iter3 = new IteratorSetting(3, "Count", CountingIterator.class.getName());
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .addIterator(iter1).addIterator(iter2).addIterator(iter3).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).addIterator(iter1).addIterator(iter2).addIterator(iter3)
+        .store(job);
 
     List<IteratorSetting> list = InputConfigurator.getIterators(AccumuloInputFormat.class,
job);
 
@@ -203,8 +194,8 @@ public void testSetRegex() throws Exception {
 
     IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class);
     RegExFilter.setRegexs(is, regex, null, null, null, false);
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .addIterator(is).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).addIterator(is).store(job);
 
     assertEquals(regex,
         InputConfigurator.getIterators(AccumuloInputFormat.class, job).get(0).getName());
@@ -218,8 +209,8 @@ public void testEmptyColumnFamily() throws Exception {
     cols.add(new IteratorSetting.Column(new Text(""), new Text("bar")));
     cols.add(new IteratorSetting.Column(new Text(""), new Text("")));
     cols.add(new IteratorSetting.Column(new Text("foo"), new Text("")));
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .fetchColumns(cols).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).fetchColumns(cols).store(job);
 
     assertEquals(cols, InputConfigurator.getFetchedColumns(AccumuloInputFormat.class, job));
   }
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormatTest.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormatTest.java
index 46d86e77ad..4cdd872991 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormatTest.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapred/AccumuloOutputFormatTest.java
@@ -20,11 +20,11 @@
 import static org.junit.Assert.assertNotEquals;
 
 import java.io.IOException;
+import java.util.Properties;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.hadoopImpl.mapreduce.lib.OutputConfigurator;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobConf;
@@ -48,9 +48,9 @@ public void testBWSettings() throws IOException {
     bwConfig.setMaxWriteThreads(42);
     bwConfig.setMaxMemory(1123581321L);
 
-    ClientInfo info = ClientInfo.from(Accumulo.newClientProperties().to("test", "zk")
-        .as("blah", "blah").batchWriterConfig(bwConfig).build());
-    AccumuloOutputFormat.configure().clientInfo(info).store(job);
+    Properties cp = Accumulo.newClientProperties().to("test", "zk").as("blah", "blah")
+        .batchWriterConfig(bwConfig).build();
+    AccumuloOutputFormat.configure().clientProperties(cp).store(job);
 
     AccumuloOutputFormat myAOF = new AccumuloOutputFormat() {
       @Override
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormatTest.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormatTest.java
index 0d9ecac905..227eb847a9 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormatTest.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloInputFormatTest.java
@@ -16,9 +16,6 @@
  */
 package org.apache.accumulo.hadoop.mapreduce;
 
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
 import static org.junit.Assert.assertEquals;
 
 import java.io.ByteArrayOutputStream;
@@ -29,9 +26,8 @@
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.core.client.IteratorSetting;
-import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.conf.ClientProperty;
 import org.apache.accumulo.core.iterators.system.CountingIterator;
 import org.apache.accumulo.core.iterators.user.RegExFilter;
 import org.apache.accumulo.core.iterators.user.VersioningIterator;
@@ -46,16 +42,21 @@
 import org.junit.Test;
 
 public class AccumuloInputFormatTest {
-  static ClientInfo clientInfo;
+  static Properties clientProperties;
 
   @BeforeClass
-  public static void setupClientInfo() {
-    clientInfo = createMock(ClientInfo.class);
-    AuthenticationToken token = createMock(AuthenticationToken.class);
-    Properties props = createMock(Properties.class);
-    expect(clientInfo.getAuthenticationToken()).andReturn(token).anyTimes();
-    expect(clientInfo.getProperties()).andReturn(props).anyTimes();
-    replay(clientInfo);
+  public static void setup() {
+    clientProperties = setupClientProperties();
+  }
+
+  public static Properties setupClientProperties() {
+    Properties cp = new Properties();
+    cp.setProperty(ClientProperty.INSTANCE_NAME.getKey(), "test-instance");
+    cp.setProperty(ClientProperty.INSTANCE_ZOOKEEPERS.getKey(), "test-zk:2181");
+    cp.setProperty(ClientProperty.AUTH_TYPE.getKey(), "password");
+    cp.setProperty(ClientProperty.AUTH_PRINCIPAL.getKey(), "test-principal");
+    cp.setProperty(ClientProperty.AUTH_TOKEN.getKey(), "test-token");
+    return cp;
   }
 
   /**
@@ -66,8 +67,8 @@ public void testSetIterator() throws Exception {
     Job job = Job.getInstance();
 
     IteratorSetting is = new IteratorSetting(1, "WholeRow", WholeRowIterator.class);
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .addIterator(is).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).addIterator(is).store(job);
     Configuration conf = job.getConfiguration();
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     is.write(new DataOutputStream(baos));
@@ -84,8 +85,9 @@ public void testAddIterator() throws Exception {
     IteratorSetting iter3 = new IteratorSetting(3, "Count", CountingIterator.class);
     iter3.addOption("v1", "1");
     iter3.addOption("junk", "\0omg:!\\xyzzy");
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .addIterator(iter1).addIterator(iter2).addIterator(iter3).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).addIterator(iter1).addIterator(iter2).addIterator(iter3)
+        .store(job);
 
     List<IteratorSetting> list = InputConfigurator.getIterators(AccumuloInputFormat.class,
         job.getConfiguration());
@@ -131,8 +133,8 @@ public void testIteratorOptionEncoding() throws Throwable {
     iter1.addOption(key, value);
     Job job = Job.getInstance();
     // also test if reusing options will create duplicate iterators
-    InputFormatOptions<Job> opts = AccumuloInputFormat.configure().clientInfo(clientInfo)
-        .table("test").auths(Authorizations.EMPTY);
+    InputFormatOptions<Job> opts = AccumuloInputFormat.configure()
+        .clientProperties(clientProperties).table("test").auths(Authorizations.EMPTY);
     opts.addIterator(iter1).store(job);
 
     List<IteratorSetting> list = InputConfigurator.getIterators(AccumuloInputFormat.class,
@@ -164,8 +166,9 @@ public void testGetIteratorSettings() throws Exception {
     IteratorSetting iter1 = new IteratorSetting(1, "WholeRow", WholeRowIterator.class.getName());
     IteratorSetting iter2 = new IteratorSetting(2, "Versions", VersioningIterator.class.getName());
     IteratorSetting iter3 = new IteratorSetting(3, "Count", CountingIterator.class.getName());
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .addIterator(iter1).addIterator(iter2).addIterator(iter3).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).addIterator(iter1).addIterator(iter2).addIterator(iter3)
+        .store(job);
 
     List<IteratorSetting> list = InputConfigurator.getIterators(AccumuloInputFormat.class,
         job.getConfiguration());
@@ -199,8 +202,8 @@ public void testSetRegex() throws Exception {
 
     IteratorSetting is = new IteratorSetting(50, regex, RegExFilter.class);
     RegExFilter.setRegexs(is, regex, null, null, null, false);
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .addIterator(is).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).addIterator(is).store(job);
 
     assertEquals(regex, InputConfigurator
         .getIterators(AccumuloInputFormat.class, job.getConfiguration()).get(0).getName());
@@ -215,8 +218,8 @@ public void testEmptyColumnFamily() throws Exception {
     cols.add(new IteratorSetting.Column(new Text(""), new Text("bar")));
     cols.add(new IteratorSetting.Column(new Text(""), new Text("")));
     cols.add(new IteratorSetting.Column(new Text("foo"), new Text("")));
-    AccumuloInputFormat.configure().clientInfo(clientInfo).table("test").auths(Authorizations.EMPTY)
-        .fetchColumns(cols).store(job);
+    AccumuloInputFormat.configure().clientProperties(clientProperties).table("test")
+        .auths(Authorizations.EMPTY).fetchColumns(cols).store(job);
 
     assertEquals(cols,
         InputConfigurator.getFetchedColumns(AccumuloInputFormat.class, job.getConfiguration()));
diff --git a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormatTest.java
b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormatTest.java
index f841183d06..35839be2da 100644
--- a/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormatTest.java
+++ b/hadoop-mapreduce/src/test/java/org/apache/accumulo/hadoop/mapreduce/AccumuloOutputFormatTest.java
@@ -20,11 +20,11 @@
 import static org.junit.Assert.assertNotEquals;
 
 import java.io.IOException;
+import java.util.Properties;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.Accumulo;
 import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.ClientInfo;
 import org.apache.accumulo.hadoopImpl.mapreduce.lib.OutputConfigurator;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
@@ -49,10 +49,10 @@ public void testBWSettings() throws IOException {
     bwConfig.setMaxWriteThreads(42);
     bwConfig.setMaxMemory(1123581321L);
 
-    ClientInfo info = ClientInfo.from(Accumulo.newClientProperties().to("test", "zk")
-        .as("blah", "blah").batchWriterConfig(bwConfig).build());
+    Properties cp = Accumulo.newClientProperties().to("test", "zk").as("blah", "blah")
+        .batchWriterConfig(bwConfig).build();
 
-    AccumuloOutputFormat.configure().clientInfo(info).store(job);
+    AccumuloOutputFormat.configure().clientProperties(cp).store(job);
 
     AccumuloOutputFormat myAOF = new AccumuloOutputFormat() {
       @Override


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

Mime
View raw message