accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mwa...@apache.org
Subject [accumulo-testing] branch master updated: Updates due to Accumulo 2.0 changes (#48)
Date Wed, 02 Jan 2019 17:50:56 GMT
This is an automated email from the ASF dual-hosted git repository.

mwalch pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/accumulo-testing.git


The following commit(s) were added to refs/heads/master by this push:
     new eb34fd9  Updates due to Accumulo 2.0 changes (#48)
eb34fd9 is described below

commit eb34fd93753e759b2b0a76fce270f14c43c0c947
Author: Mike Walch <mwalch@apache.org>
AuthorDate: Wed Jan 2 12:50:52 2019 -0500

    Updates due to Accumulo 2.0 changes (#48)
    
    * Use new MapReduce API and client classloader
    * Use createClient and close client
    * Updated codestyle.xml to match file in Accumulo repo
---
 bin/rwalk                                          |   2 +-
 conf/env.sh.example                                |   2 +
 contrib/Eclipse-Accumulo-Codestyle.xml             |   4 +-
 pom.xml                                            |   5 +
 .../java/org/apache/accumulo/testing/TestEnv.java  |   3 +-
 .../testing/continuous/ContinuousBatchWalker.java  |   6 +-
 .../testing/continuous/ContinuousIngest.java       |  29 ++--
 .../testing/continuous/ContinuousMoru.java         |  44 +++--
 .../testing/continuous/ContinuousOpts.java         |  15 +-
 .../testing/continuous/ContinuousUtil.java         |   6 +-
 .../testing/continuous/ContinuousVerify.java       |  25 +--
 .../testing/continuous/ContinuousWalk.java         |   3 +-
 .../accumulo/testing/continuous/CreateTable.java   |   6 +-
 .../accumulo/testing/continuous/TimeBinner.java    |  18 ++-
 .../testing/continuous/UndefinedAnalyzer.java      | 159 ++++++++----------
 .../testing/ingest/BulkImportDirectory.java        |  12 +-
 .../apache/accumulo/testing/ingest/TestIngest.java |  56 ++++---
 .../accumulo/testing/ingest/VerifyIngest.java      |  46 ++++--
 .../apache/accumulo/testing/mapreduce/RowHash.java |  23 ++-
 .../accumulo/testing/mapreduce/TeraSortIngest.java |  43 +++--
 .../apache/accumulo/testing/merkle/MerkleTree.java |   6 +-
 .../accumulo/testing/merkle/MerkleTreeNode.java    |  15 +-
 .../accumulo/testing/merkle/cli/CompareTables.java |  93 +++++------
 .../testing/merkle/cli/ComputeRootHash.java        |  29 ++--
 .../testing/merkle/cli/GenerateHashes.java         | 178 +++++++++------------
 .../testing/merkle/cli/ManualComparison.java       |  73 ++++-----
 .../testing/merkle/ingest/RandomWorkload.java      |  28 ++--
 .../testing/merkle/skvi/DigestIterator.java        |  15 +-
 .../accumulo/testing/performance/Report.java       |   8 +-
 .../accumulo/testing/performance/impl/Compare.java |   3 +-
 .../testing/performance/impl/ContextualReport.java |   3 +-
 .../testing/performance/impl/ListTests.java        |   6 +-
 .../testing/performance/impl/PerfTestRunner.java   |   6 +-
 .../performance/tests/RandomCachedLookupsPT.java   |  50 ++++--
 .../testing/performance/tests/ScanExecutorPT.java  |  21 ++-
 .../performance/tests/ScanFewFamiliesPT.java       |   9 +-
 .../testing/performance/util/TestData.java         |   3 +-
 .../apache/accumulo/testing/randomwalk/Module.java |  24 ++-
 .../accumulo/testing/randomwalk/RandWalkEnv.java   |  11 +-
 .../apache/accumulo/testing/randomwalk/State.java  |   3 +-
 .../testing/randomwalk/bulk/BulkImportTest.java    |  17 +-
 .../testing/randomwalk/bulk/BulkPlusOne.java       |   9 +-
 .../accumulo/testing/randomwalk/bulk/Compact.java  |   3 +-
 .../testing/randomwalk/bulk/ConsistencyCheck.java  |   6 +-
 .../accumulo/testing/randomwalk/bulk/Merge.java    |   6 +-
 .../testing/randomwalk/bulk/SelectiveBulkTest.java |   6 +-
 .../testing/randomwalk/bulk/SelectiveQueueing.java |   3 +-
 .../accumulo/testing/randomwalk/bulk/Split.java    |   3 +-
 .../accumulo/testing/randomwalk/bulk/Verify.java   |  67 ++++----
 .../testing/randomwalk/concurrent/BulkImport.java  |  12 +-
 .../concurrent/ChangeAuthorizations.java           |   3 +-
 .../randomwalk/concurrent/ChangePermissions.java   |  10 +-
 .../randomwalk/concurrent/CheckPermission.java     |  10 +-
 .../testing/randomwalk/concurrent/CloneTable.java  |   3 +-
 .../testing/randomwalk/concurrent/Compact.java     |   9 +-
 .../randomwalk/concurrent/ConcurrentFixture.java   |   6 +-
 .../testing/randomwalk/concurrent/Config.java      |  42 +++--
 .../randomwalk/concurrent/IsolatedScan.java        |   3 +-
 .../testing/randomwalk/concurrent/Merge.java       |   6 +-
 .../testing/randomwalk/concurrent/Replication.java |   7 +-
 .../testing/randomwalk/conditional/Setup.java      |   6 +-
 .../testing/randomwalk/conditional/Transfer.java   |   6 +-
 .../testing/randomwalk/conditional/Verify.java     |   6 +-
 .../accumulo/testing/randomwalk/image/Commit.java  |   3 +-
 .../testing/randomwalk/image/ImageFixture.java     |   6 +-
 .../testing/randomwalk/image/ScanMeta.java         |   3 +-
 .../accumulo/testing/randomwalk/image/Verify.java  |   3 +-
 .../accumulo/testing/randomwalk/image/Write.java   |   6 +-
 .../testing/randomwalk/multitable/CopyTool.java    |  16 +-
 .../randomwalk/multitable/MultiTableFixture.java   |   3 +-
 .../testing/randomwalk/security/AlterTable.java    |  25 +--
 .../randomwalk/security/AlterTablePerm.java        |  27 ++--
 .../testing/randomwalk/security/Authenticate.java  |  21 ++-
 .../testing/randomwalk/security/ChangePass.java    |  10 +-
 .../testing/randomwalk/security/CreateTable.java   |  12 +-
 .../testing/randomwalk/security/CreateUser.java    |  15 +-
 .../testing/randomwalk/security/DropTable.java     |  15 +-
 .../testing/randomwalk/security/DropUser.java      |   9 +-
 .../randomwalk/security/SecurityFixture.java       |   6 +-
 .../randomwalk/security/SecurityHelper.java        |   9 +-
 .../testing/randomwalk/security/SetAuths.java      |   9 +-
 .../testing/randomwalk/security/TableOp.java       |  77 ++++++---
 .../testing/randomwalk/security/Validate.java      |  30 ++--
 .../randomwalk/security/WalkingSecurity.java       |  42 +++--
 .../testing/randomwalk/sequential/BatchVerify.java |   9 +-
 .../testing/randomwalk/sequential/Commit.java      |   3 +-
 .../randomwalk/sequential/MapRedVerifyTool.java    |  23 +--
 .../randomwalk/sequential/SequentialFixture.java   |   6 +-
 .../testing/randomwalk/shard/BulkInsert.java       |  27 ++--
 .../testing/randomwalk/shard/CloneIndex.java       |   8 +-
 .../testing/randomwalk/shard/CompactFilter.java    |  12 +-
 .../accumulo/testing/randomwalk/shard/Delete.java  |   3 +-
 .../testing/randomwalk/shard/DeleteSomeDocs.java   |   6 +-
 .../testing/randomwalk/shard/DeleteWord.java       |   6 +-
 .../testing/randomwalk/shard/ExportIndex.java      |  28 ++--
 .../accumulo/testing/randomwalk/shard/Grep.java    |  12 +-
 .../accumulo/testing/randomwalk/shard/Insert.java  |  17 +-
 .../accumulo/testing/randomwalk/shard/Merge.java   |   6 +-
 .../accumulo/testing/randomwalk/shard/Reindex.java |   3 +-
 .../accumulo/testing/randomwalk/shard/Search.java  |   9 +-
 .../testing/randomwalk/shard/ShardFixture.java     |  18 ++-
 .../accumulo/testing/randomwalk/shard/Split.java   |   3 +-
 .../testing/randomwalk/shard/VerifyIndex.java      |  12 +-
 .../accumulo/testing/scalability/Ingest.java       |   7 +-
 .../apache/accumulo/testing/scalability/Run.java   |   9 +-
 .../accumulo/testing/scalability/ScaleTest.java    |   6 +-
 .../accumulo/testing/stress/RandomMutations.java   |   8 +-
 .../org/apache/accumulo/testing/stress/Scan.java   |  51 +++---
 .../apache/accumulo/testing/stress/ScanOpts.java   |   6 +-
 .../org/apache/accumulo/testing/stress/Stream.java |   3 +-
 .../org/apache/accumulo/testing/stress/Write.java  |  71 ++++----
 .../accumulo/testing/stress/WriteOptions.java      |  45 ++++--
 .../accumulo/testing/randomwalk/FrameworkTest.java |   6 +-
 .../randomwalk/ReplicationRandomWalkIT.java        |   2 +-
 114 files changed, 1238 insertions(+), 874 deletions(-)

diff --git a/bin/rwalk b/bin/rwalk
index 30cf0a7..6bd7299 100755
--- a/bin/rwalk
+++ b/bin/rwalk
@@ -34,7 +34,7 @@ else
   . "$at_home"/conf/env.sh.example
 fi
 
-export CLASSPATH="$TEST_JAR_PATH:$HADOOP_API_JAR:$HADOOP_RUNTIME_JAR:$CLASSPATH"
+export CLASSPATH="$TEST_JAR_PATH:$HADOOP_API_JAR:$HADOOP_RUNTIME_JAR:$HADOOP_CONF_DIR:$CLASSPATH"
 
 randomwalk_main="org.apache.accumulo.testing.randomwalk.Framework"
 
diff --git a/conf/env.sh.example b/conf/env.sh.example
index bd372c3..a48451c 100644
--- a/conf/env.sh.example
+++ b/conf/env.sh.example
@@ -18,6 +18,8 @@
 
 ## Hadoop installation
 export HADOOP_HOME="${HADOOP_HOME:-/path/to/hadoop}"
+## Hadoop configuration
+export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/etc/hadoop}"
 ## Accumulo installation
 export ACCUMULO_HOME="${ACCUMULO_HOME:-/path/to/accumulo}"
 ## Path to Accumulo client properties
diff --git a/contrib/Eclipse-Accumulo-Codestyle.xml b/contrib/Eclipse-Accumulo-Codestyle.xml
index 42e03bf..3b04c4d 100644
--- a/contrib/Eclipse-Accumulo-Codestyle.xml
+++ b/contrib/Eclipse-Accumulo-Codestyle.xml
@@ -90,7 +90,7 @@
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
 <setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="160"/>
+<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="100"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
@@ -221,7 +221,7 @@
 <setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
 <setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
 <setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="160"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="100"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
 <setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
 <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
diff --git a/pom.xml b/pom.xml
index a9c534f..b03c116 100644
--- a/pom.xml
+++ b/pom.xml
@@ -154,6 +154,11 @@
     </dependency>
     <dependency>
       <groupId>org.apache.accumulo</groupId>
+      <artifactId>accumulo-hadoop-mapreduce</artifactId>
+      <version>${accumulo.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.accumulo</groupId>
       <artifactId>accumulo-minicluster</artifactId>
       <version>${accumulo.version}</version>
       <scope>test</scope>
diff --git a/src/main/java/org/apache/accumulo/testing/TestEnv.java b/src/main/java/org/apache/accumulo/testing/TestEnv.java
index 1df78c1..e5ffa1a 100644
--- a/src/main/java/org/apache/accumulo/testing/TestEnv.java
+++ b/src/main/java/org/apache/accumulo/testing/TestEnv.java
@@ -92,7 +92,8 @@ public class TestEnv implements AutoCloseable {
       hadoopConfig.set("fs.defaultFS", getHdfsRoot());
       // Below is required due to bundled jar breaking default config.
       // See http://stackoverflow.com/questions/17265002/hadoop-no-filesystem-for-scheme-file
-      hadoopConfig.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+      hadoopConfig
+          .set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
       hadoopConfig.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
       hadoopConfig.set("mapreduce.framework.name", "yarn");
       hadoopConfig.set("yarn.resourcemanager.hostname", getYarnResourceManager());
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousBatchWalker.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousBatchWalker.java
index c596041..1716bdf 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousBatchWalker.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousBatchWalker.java
@@ -72,7 +72,8 @@ public class ContinuousBatchWalker {
     }
   }
 
-  private static void runBatchScan(int batchSize, BatchScanner bs, Set<Text> batch, List<Range> ranges) {
+  private static void runBatchScan(int batchSize, BatchScanner bs, Set<Text> batch,
+      List<Range> ranges) {
     bs.setRanges(ranges);
 
     Set<Text> rowsSeen = new HashSet<>();
@@ -106,7 +107,8 @@ public class ContinuousBatchWalker {
       System.err.println("Extra seen : " + copy1);
       System.err.println("Not seen   : " + copy2);
     } else {
-      System.out.printf("BRQ %d %d %d %d %d%n", t1, (t2 - t1), rowsSeen.size(), count, (int) (rowsSeen.size() / ((t2 - t1) / 1000.0)));
+      System.out.printf("BRQ %d %d %d %d %d%n", t1, (t2 - t1), rowsSeen.size(), count,
+          (int) (rowsSeen.size() / ((t2 - t1) / 1000.0)));
     }
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousIngest.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousIngest.java
index 6888357..a62ed13 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousIngest.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousIngest.java
@@ -126,7 +126,8 @@ public class ContinuousIngest {
     AccumuloClient client = env.getAccumuloClient();
     String tableName = env.getAccumuloTableName();
     if (!client.tableOperations().exists(tableName)) {
-      throw new TableNotFoundException(null, tableName, "Consult the README and create the table before starting ingest.");
+      throw new TableNotFoundException(null, tableName,
+          "Consult the README and create the table before starting ingest.");
     }
 
     BatchWriter bw = client.createBatchWriter(tableName);
@@ -136,7 +137,8 @@ public class ContinuousIngest {
 
     byte[] ingestInstanceId = UUID.randomUUID().toString().getBytes(UTF_8);
 
-    log.info(String.format("UUID %d %s", System.currentTimeMillis(), new String(ingestInstanceId, UTF_8)));
+    log.info(String.format("UUID %d %s", System.currentTimeMillis(), new String(ingestInstanceId,
+        UTF_8)));
 
     long count = 0;
     final int flushInterval = 1000000;
@@ -200,7 +202,8 @@ public class ContinuousIngest {
           long rowLong = genLong(rowMin, rowMax, r);
           byte[] prevRow = genRow(prevRows[index]);
           prevRows[index] = rowLong;
-          Mutation m = genMutation(rowLong, r.nextInt(maxColF), r.nextInt(maxColQ), cv, ingestInstanceId, count, prevRow, checksum);
+          Mutation m = genMutation(rowLong, r.nextInt(maxColF), r.nextInt(maxColQ), cv,
+              ingestInstanceId, count, prevRow, checksum);
           count++;
           bw.addMutation(m);
         }
@@ -214,8 +217,8 @@ public class ContinuousIngest {
       // create one big linked list, this makes all of the first inserts
       // point to something
       for (int index = 0; index < flushInterval - 1; index++) {
-        Mutation m = genMutation(firstRows[index], firstColFams[index], firstColQuals[index], cv, ingestInstanceId, count, genRow(prevRows[index + 1]),
-            checksum);
+        Mutation m = genMutation(firstRows[index], firstColFams[index], firstColQuals[index], cv,
+            ingestInstanceId, count, genRow(prevRows[index + 1]), checksum);
         count++;
         bw.addMutation(m);
       }
@@ -227,17 +230,19 @@ public class ContinuousIngest {
     bw.close();
   }
 
-  private static long flush(BatchWriter bw, long count, final int flushInterval, long lastFlushTime) throws MutationsRejectedException {
+  private static long flush(BatchWriter bw, long count, final int flushInterval, long lastFlushTime)
+      throws MutationsRejectedException {
     long t1 = System.currentTimeMillis();
     bw.flush();
     long t2 = System.currentTimeMillis();
-    log.info(String.format("FLUSH %d %d %d %d %d", t2, (t2 - lastFlushTime), (t2 - t1), count, flushInterval));
+    log.info(String.format("FLUSH %d %d %d %d %d", t2, (t2 - lastFlushTime), (t2 - t1), count,
+        flushInterval));
     lastFlushTime = t2;
     return lastFlushTime;
   }
 
-  public static Mutation genMutation(long rowLong, int cfInt, int cqInt, ColumnVisibility cv, byte[] ingestInstanceId, long count, byte[] prevRow,
-      boolean checksum) {
+  public static Mutation genMutation(long rowLong, int cfInt, int cqInt, ColumnVisibility cv,
+      byte[] ingestInstanceId, long count, byte[] prevRow, boolean checksum) {
     // Adler32 is supposed to be faster, but according to wikipedia is not
     // good for small data.... so used CRC32 instead
     CRC32 cksum = null;
@@ -257,7 +262,8 @@ public class ContinuousIngest {
 
     Mutation m = new Mutation(new Text(rowString));
 
-    m.put(new Text(cfString), new Text(cqString), cv, createValue(ingestInstanceId, count, prevRow, cksum));
+    m.put(new Text(cfString), new Text(cqString), cv,
+        createValue(ingestInstanceId, count, prevRow, cksum));
     return m;
   }
 
@@ -273,7 +279,8 @@ public class ContinuousIngest {
     return FastFormat.toZeroPaddedString(rowLong, 16, 16, EMPTY_BYTES);
   }
 
-  private static Value createValue(byte[] ingestInstanceId, long count, byte[] prevRow, Checksum cksum) {
+  private static Value createValue(byte[] ingestInstanceId, long count, byte[] prevRow,
+      Checksum cksum) {
     int dataLen = ingestInstanceId.length + 16 + (prevRow == null ? 0 : prevRow.length) + 3;
     if (cksum != null)
       dataLen += 8;
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java
index 67adc0b..663ed33 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousMoru.java
@@ -23,9 +23,8 @@ import java.util.Random;
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
-import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloOutputFormat;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Range;
@@ -42,8 +41,10 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
 /**
- * A map only job that reads a table created by continuous ingest and creates doubly linked list. This map reduce job tests the ability of a map only job to
- * read and write to accumulo at the same time. This map reduce job mutates the table in such a way that it should not create any undefined nodes.
+ * A map only job that reads a table created by continuous ingest and creates doubly linked list.
+ * This map reduce job tests the ability of a map only job to read and write to accumulo at the same
+ * time. This map reduce job mutates the table in such a way that it should not create any undefined
+ * nodes.
  */
 public class ContinuousMoru extends Configured implements Tool {
   private static final String PREFIX = ContinuousMoru.class.getSimpleName() + ".";
@@ -69,7 +70,7 @@ public class ContinuousMoru extends Configured implements Tool {
     private static final ColumnVisibility EMPTY_VIS = new ColumnVisibility();
 
     @Override
-    public void setup(Context context) throws IOException, InterruptedException {
+    public void setup(Context context) {
       int max_cf = context.getConfiguration().getInt(MAX_CF, -1);
       int max_cq = context.getConfiguration().getInt(MAX_CQ, -1);
 
@@ -98,8 +99,8 @@ public class ContinuousMoru extends Configured implements Tool {
         int offset = ContinuousWalk.getPrevRowOffset(val);
         if (offset > 0) {
           long rowLong = Long.parseLong(new String(val, offset, 16, UTF_8), 16);
-          Mutation m = ContinuousIngest.genMutation(rowLong, random.nextInt(max_cf), random.nextInt(max_cq), EMPTY_VIS, iiId, count++, key.getRowData()
-              .toArray(), true);
+          Mutation m = ContinuousIngest.genMutation(rowLong, random.nextInt(max_cf),
+              random.nextInt(max_cq), EMPTY_VIS, iiId, count++, key.getRowData().toArray(), true);
           context.write(null, m);
         }
 
@@ -110,7 +111,7 @@ public class ContinuousMoru extends Configured implements Tool {
   }
 
   @Override
-  public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException, AccumuloSecurityException {
+  public int run(String[] args) throws Exception {
 
     if (args.length != 2) {
       System.err.println("Usage: ContinuousMoru <testPropsPath> <clientPropsPath>");
@@ -118,31 +119,24 @@ public class ContinuousMoru extends Configured implements Tool {
     }
     ContinuousEnv env = new ContinuousEnv(args[0], args[1]);
 
-    Job job = Job.getInstance(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+    Job job = Job.getInstance(getConf(),
+        this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
     job.setJarByClass(this.getClass());
-
     job.setInputFormatClass(AccumuloInputFormat.class);
 
-    AccumuloInputFormat.setClientProperties(job, env.getClientProps());
-    AccumuloInputFormat.setInputTableName(job, env.getAccumuloTableName());
-
     int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
+    Set<Range> ranges = env.getAccumuloClient().tableOperations()
+        .splitRangeByTablets(env.getAccumuloTableName(), new Range(), maxMaps);
 
-    // set up ranges
-    try {
-      Set<Range> ranges = env.getAccumuloClient().tableOperations().splitRangeByTablets(env.getAccumuloTableName(), new Range(), maxMaps);
-      AccumuloInputFormat.setRanges(job, ranges);
-      AccumuloInputFormat.setAutoAdjustRanges(job, false);
-    } catch (Exception e) {
-      throw new IOException(e);
-    }
+    AccumuloInputFormat.configure().clientProperties(env.getClientProps())
+        .table(env.getAccumuloTableName()).ranges(ranges).autoAdjustRanges(false).store(job);
 
     job.setMapperClass(CMapper.class);
     job.setNumReduceTasks(0);
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setClientProperties(job, env.getClientProps());
-    AccumuloOutputFormat.setCreateTables(job, true);
-    AccumuloOutputFormat.setDefaultTableName(job, env.getAccumuloTableName());
+
+    AccumuloOutputFormat.configure().clientProperties(env.getClientProps()).createTables(true)
+        .defaultTable(env.getAccumuloTableName()).store(job);
 
     Configuration conf = job.getConfiguration();
     conf.setLong(MIN, env.getRowMin());
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousOpts.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousOpts.java
index a2dc69f..cda9f4d 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousOpts.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousOpts.java
@@ -38,7 +38,8 @@ public class ContinuousOpts {
       logger.setLevel(Level.TRACE);
       logger.setAdditivity(false);
       try {
-        logger.addAppender(new FileAppender(new PatternLayout("%d{dd HH:mm:ss,SSS} [%-8c{2}] %-5p: %m%n"), debugLog, true));
+        logger.addAppender(new FileAppender(new PatternLayout(
+            "%d{dd HH:mm:ss,SSS} [%-8c{2}] %-5p: %m%n"), debugLog, true));
       } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
@@ -59,21 +60,25 @@ public class ContinuousOpts {
   @Parameter(names = "--max", description = "maximum random row number to use")
   long max = Long.MAX_VALUE;
 
-  @Parameter(names = "--debugLog", description = "file to write debugging output", converter = DebugConverter.class)
+  @Parameter(names = "--debugLog", description = "file to write debugging output",
+      converter = DebugConverter.class)
   String debugLog = null;
 
   @Parameter(names = "--num", description = "the number of entries to ingest")
   long num = Long.MAX_VALUE;
 
-  @Parameter(names = "--maxColF", description = "maximum column family value to use", converter = ShortConverter.class)
+  @Parameter(names = "--maxColF", description = "maximum column family value to use",
+      converter = ShortConverter.class)
   short maxColF = Short.MAX_VALUE;
 
-  @Parameter(names = "--maxColQ", description = "maximum column qualifier value to use", converter = ShortConverter.class)
+  @Parameter(names = "--maxColQ", description = "maximum column qualifier value to use",
+      converter = ShortConverter.class)
   short maxColQ = Short.MAX_VALUE;
 
   @Parameter(names = "--addCheckSum", description = "turn on checksums")
   boolean checksum = false;
 
-  @Parameter(names = "--visibilities", description = "read the visibilities to ingest with from a file")
+  @Parameter(names = "--visibilities",
+      description = "read the visibilities to ingest with from a file")
   String visFile = null;
 }
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousUtil.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousUtil.java
index b4f0148..365da29 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousUtil.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousUtil.java
@@ -40,9 +40,11 @@ final class ContinuousUtil {
    * @throws TableNotFoundException
    *           If the table does not exist
    */
-  static Scanner createScanner(AccumuloClient client, String table, Authorizations auths) throws TableNotFoundException {
+  static Scanner createScanner(AccumuloClient client, String table, Authorizations auths)
+      throws TableNotFoundException {
     if (!client.tableOperations().exists(table)) {
-      throw new TableNotFoundException(null, table, "Consult the README and create the table before starting test processes.");
+      throw new TableNotFoundException(null, table,
+          "Consult the README and create the table before starting test processes.");
     }
     return client.createScanner(table, auths);
   }
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java
index def4198..e862d66 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousVerify.java
@@ -26,7 +26,7 @@ import java.util.Random;
 import java.util.Set;
 
 import org.apache.accumulo.core.client.AccumuloClient;
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormat;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
@@ -46,7 +46,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * A map reduce job that verifies a table created by continuous ingest. It verifies that all referenced nodes are defined.
+ * A map reduce job that verifies a table created by continuous ingest. It verifies that all
+ * referenced nodes are defined.
  */
 public class ContinuousVerify extends Configured implements Tool {
 
@@ -102,7 +103,8 @@ public class ContinuousVerify extends Configured implements Tool {
     private ArrayList<Long> refs = new ArrayList<>();
 
     @Override
-    public void reduce(LongWritable key, Iterable<VLongWritable> values, Context context) throws IOException, InterruptedException {
+    public void reduce(LongWritable key, Iterable<VLongWritable> values, Context context)
+        throws IOException, InterruptedException {
 
       int defCount = 0;
 
@@ -144,12 +146,14 @@ public class ContinuousVerify extends Configured implements Tool {
     }
     ContinuousEnv env = new ContinuousEnv(args[0], args[1]);
 
-    Job job = Job.getInstance(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+    Job job = Job.getInstance(getConf(),
+        this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
     job.setJarByClass(this.getClass());
 
     job.setInputFormatClass(AccumuloInputFormat.class);
 
-    boolean scanOffline = Boolean.parseBoolean(env.getTestProperty(TestProps.CI_VERIFY_SCAN_OFFLINE));
+    boolean scanOffline = Boolean.parseBoolean(env
+        .getTestProperty(TestProps.CI_VERIFY_SCAN_OFFLINE));
     String tableName = env.getAccumuloTableName();
     int maxMaps = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_MAX_MAPS));
     int reducers = Integer.parseInt(env.getTestProperty(TestProps.CI_VERIFY_REDUCERS));
@@ -158,6 +162,7 @@ public class ContinuousVerify extends Configured implements Tool {
     Set<Range> ranges;
     String clone = "";
     AccumuloClient client = env.getAccumuloClient();
+    String table;
 
     if (scanOffline) {
       Random random = new Random();
@@ -165,16 +170,14 @@ public class ContinuousVerify extends Configured implements Tool {
       client.tableOperations().clone(tableName, clone, true, new HashMap<>(), new HashSet<>());
       ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
       client.tableOperations().offline(clone);
-      AccumuloInputFormat.setInputTableName(job, clone);
-      AccumuloInputFormat.setOfflineTableScan(job, true);
+      table = clone;
     } else {
       ranges = client.tableOperations().splitRangeByTablets(tableName, new Range(), maxMaps);
-      AccumuloInputFormat.setInputTableName(job, tableName);
+      table = tableName;
     }
 
-    AccumuloInputFormat.setRanges(job, ranges);
-    AccumuloInputFormat.setAutoAdjustRanges(job, false);
-    AccumuloInputFormat.setClientProperties(job, env.getClientProps());
+    AccumuloInputFormat.configure().clientProperties(env.getClientProps()).table(table)
+        .ranges(ranges).autoAdjustRanges(false).offlineScan(scanOffline).store(job);
 
     job.setMapperClass(CMapper.class);
     job.setMapOutputKeyClass(LongWritable.class);
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousWalk.java b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousWalk.java
index 7cdc55d..7f8a024 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/ContinuousWalk.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/ContinuousWalk.java
@@ -60,7 +60,8 @@ public class ContinuousWalk {
     int sleepTime = Integer.parseInt(env.getTestProperty(TestProps.CI_WALKER_SLEEP_MS));
 
     while (true) {
-      Scanner scanner = ContinuousUtil.createScanner(client, env.getAccumuloTableName(), env.getRandomAuthorizations());
+      Scanner scanner = ContinuousUtil.createScanner(client, env.getAccumuloTableName(),
+          env.getRandomAuthorizations());
       String row = findAStartRow(env.getRowMin(), env.getRowMax(), scanner, r);
 
       while (row != null) {
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/CreateTable.java b/src/main/java/org/apache/accumulo/testing/continuous/CreateTable.java
index 0c7902a..fb4eeea 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/CreateTable.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/CreateTable.java
@@ -40,7 +40,8 @@ public class CreateTable {
       System.exit(-1);
     }
 
-    int numTablets = Integer.parseInt(env.getTestProperty(TestProps.CI_COMMON_ACCUMULO_NUM_TABLETS));
+    int numTablets = Integer
+        .parseInt(env.getTestProperty(TestProps.CI_COMMON_ACCUMULO_NUM_TABLETS));
     if (numTablets < 1) {
       System.err.println("ERROR: numTablets < 1");
       System.exit(-1);
@@ -66,6 +67,7 @@ public class CreateTable {
     }
 
     client.tableOperations().addSplits(tableName, splits);
-    System.out.println("Created Accumulo table '" + tableName + "' with " + numTablets + " tablets");
+    System.out
+        .println("Created Accumulo table '" + tableName + "' with " + numTablets + " tablets");
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/TimeBinner.java b/src/main/java/org/apache/accumulo/testing/continuous/TimeBinner.java
index b3bf00f..89b5ad8 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/TimeBinner.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/TimeBinner.java
@@ -54,15 +54,18 @@ public class TimeBinner {
   }
 
   static class Opts extends Help {
-    @Parameter(names = "--period", description = "period", converter = TimeConverter.class, required = true)
+    @Parameter(names = "--period", description = "period", converter = TimeConverter.class,
+        required = true)
     long period = 0;
     @Parameter(names = "--timeColumn", description = "time column", required = true)
     int timeColumn = 0;
     @Parameter(names = "--dataColumn", description = "data column", required = true)
     int dataColumn = 0;
-    @Parameter(names = "--operation", description = "one of: AVG, SUM, MIN, MAX, COUNT", required = true)
+    @Parameter(names = "--operation", description = "one of: AVG, SUM, MIN, MAX, COUNT",
+        required = true)
     String operation;
-    @Parameter(names = "--dateFormat", description = "a SimpleDataFormat string that describes the data format")
+    @Parameter(names = "--dateFormat",
+        description = "a SimpleDataFormat string that describes the data format")
     String dateFormat = "MM/dd/yy-HH:mm:ss";
   }
 
@@ -156,7 +159,8 @@ public class TimeBinner {
         case AMM_HACK1:
         case AMM: {
           DoubleWrapper countdw = aggregation2.get(entry.getKey());
-          value = "" + (entry.getValue().d / countdw.d) + " " + aggregation3.get(entry.getKey()).d + " " + aggregation4.get(entry.getKey()).d;
+          value = "" + (entry.getValue().d / countdw.d) + " " + aggregation3.get(entry.getKey()).d
+              + " " + aggregation4.get(entry.getKey()).d;
           break;
         }
         case AVG: {
@@ -182,13 +186,15 @@ public class TimeBinner {
     get(time, aggregation, 0).d += amount;
   }
 
-  private static void updateMax(long time, HashMap<Long,DoubleWrapper> aggregation, double data, double new_max) {
+  private static void updateMax(long time, HashMap<Long,DoubleWrapper> aggregation, double data,
+      double new_max) {
     DoubleWrapper maxdw = get(time, aggregation, Double.NEGATIVE_INFINITY);
     if (data > maxdw.d)
       maxdw.d = new_max;
   }
 
-  private static void updateMin(long time, HashMap<Long,DoubleWrapper> aggregation, double data, double new_min) {
+  private static void updateMin(long time, HashMap<Long,DoubleWrapper> aggregation, double data,
+      double new_min) {
     DoubleWrapper mindw = get(time, aggregation, Double.POSITIVE_INFINITY);
     if (data < mindw.d)
       mindw.d = new_min;
diff --git a/src/main/java/org/apache/accumulo/testing/continuous/UndefinedAnalyzer.java b/src/main/java/org/apache/accumulo/testing/continuous/UndefinedAnalyzer.java
index 2232c52..27c3239 100644
--- a/src/main/java/org/apache/accumulo/testing/continuous/UndefinedAnalyzer.java
+++ b/src/main/java/org/apache/accumulo/testing/continuous/UndefinedAnalyzer.java
@@ -16,13 +16,11 @@
  */
 package org.apache.accumulo.testing.continuous;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
-
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FilenameFilter;
 import java.io.InputStreamReader;
+import java.nio.file.Files;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -35,6 +33,7 @@ import java.util.Map.Entry;
 import java.util.TreeMap;
 import java.util.concurrent.TimeUnit;
 
+import com.beust.jcommander.Parameter;
 import org.apache.accumulo.core.cli.BatchScannerOpts;
 import org.apache.accumulo.core.cli.ClientOnDefaultTable;
 import org.apache.accumulo.core.client.AccumuloClient;
@@ -44,18 +43,19 @@ import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.hadoop.io.Text;
 
-import com.beust.jcommander.Parameter;
+import static java.nio.charset.StandardCharsets.UTF_8;
 
 /**
- * BUGS This code does not handle the fact that these files could include log events from previous months. It therefore it assumes all dates are in the current
- * month. One solution might be to skip log files that haven't been touched in the last month, but that doesn't prevent newer files that have old dates in them.
- *
+ * BUGS This code does not handle the fact that these files could include log events from previous
+ * months. It therefore it assumes all dates are in the current month. One solution might be to skip
+ * log files that haven't been touched in the last month, but that doesn't prevent newer files that
+ * have old dates in them.
  */
 public class UndefinedAnalyzer {
 
   static class UndefinedNode {
 
-    public UndefinedNode(String undef2, String ref2) {
+    UndefinedNode(String undef2, String ref2) {
       this.undef = undef2;
       this.ref = ref2;
     }
@@ -68,14 +68,9 @@ public class UndefinedAnalyzer {
 
     Map<String,TreeMap<Long,Long>> flushes = new HashMap<>();
 
-    public IngestInfo(String logDir) throws Exception {
+    IngestInfo(String logDir) throws Exception {
       File dir = new File(logDir);
-      File[] ingestLogs = dir.listFiles(new FilenameFilter() {
-        @Override
-        public boolean accept(File dir, String name) {
-          return name.endsWith("ingest.out");
-        }
-      });
+      File[] ingestLogs = dir.listFiles((dir1, name) -> name.endsWith("ingest.out"));
 
       if (ingestLogs != null) {
         for (File log : ingestLogs) {
@@ -85,10 +80,9 @@ public class UndefinedAnalyzer {
     }
 
     private void parseLog(File log) throws Exception {
-      BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(log), UTF_8));
       String line;
       TreeMap<Long,Long> tm = null;
-      try {
+      try (BufferedReader reader = Files.newBufferedReader(log.toPath())) {
         while ((line = reader.readLine()) != null) {
           if (!line.startsWith("UUID"))
             continue;
@@ -102,7 +96,7 @@ public class UndefinedAnalyzer {
           }
 
           tm = new TreeMap<>(Collections.reverseOrder());
-          tm.put(0l, Long.parseLong(time));
+          tm.put(0L, Long.parseLong(time));
           flushes.put(uuid, tm);
           break;
 
@@ -123,8 +117,6 @@ public class UndefinedAnalyzer {
 
           tm.put(Long.parseLong(count), Long.parseLong(time));
         }
-      } finally {
-        reader.close();
       }
     }
 
@@ -174,21 +166,15 @@ public class UndefinedAnalyzer {
       // The command may need to be adjusted if formatting changes.
 
       File dir = new File(acuLogDir);
-      File[] masterLogs = dir.listFiles(new FilenameFilter() {
-        @Override
-        public boolean accept(File dir, String name) {
-          return name.matches("load_events.log");
-        }
-      });
+      File[] masterLogs = dir.listFiles((dir1, name) -> name.matches("load_events.log"));
 
       SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
 
       if (masterLogs != null) {
         for (File masterLog : masterLogs) {
 
-          BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(masterLog), UTF_8));
           String line;
-          try {
+          try (BufferedReader reader = Files.newBufferedReader(masterLog.toPath())) {
             while ((line = reader.readLine()) != null) {
               String[] tokens = line.split("\\s+");
               String day = tokens[0];
@@ -228,8 +214,6 @@ public class UndefinedAnalyzer {
                 System.err.println("Cannot parse tablet " + tablet);
               }
             }
-          } finally {
-            reader.close();
           }
         }
       }
@@ -252,7 +236,8 @@ public class UndefinedAnalyzer {
   }
 
   static class Opts extends ClientOnDefaultTable {
-    @Parameter(names = "--logdir", description = "directory containing the log files", required = true)
+    @Parameter(names = "--logdir", description = "directory containing the log files",
+        required = true)
     String logDir;
 
     Opts() {
@@ -261,7 +246,8 @@ public class UndefinedAnalyzer {
   }
 
   /**
-   * Class to analyze undefined references and accumulo logs to isolate the time/tablet where data was lost.
+   * Class to analyze undefined references and accumulo logs to isolate the time/tablet where data
+   * was lost.
    */
   public static void main(String[] args) throws Exception {
     Opts opts = new Opts();
@@ -270,87 +256,80 @@ public class UndefinedAnalyzer {
 
     List<UndefinedNode> undefs = new ArrayList<>();
 
-    BufferedReader reader = new BufferedReader(new InputStreamReader(System.in, UTF_8));
-    String line;
-    while ((line = reader.readLine()) != null) {
-      String[] tokens = line.split("\\s");
-      String undef = tokens[0];
-      String ref = tokens[1];
+    try (BufferedReader reader = new BufferedReader(new InputStreamReader(System.in, UTF_8))) {
+      String line;
+      while ((line = reader.readLine()) != null) {
+        String[] tokens = line.split("\\s");
+        String undef = tokens[0];
+        String ref = tokens[1];
 
-      undefs.add(new UndefinedNode(undef, ref));
+        undefs.add(new UndefinedNode(undef, ref));
+      }
     }
 
-    AccumuloClient client = opts.getClient();
-    BatchScanner bscanner = client.createBatchScanner(opts.getTableName(), opts.auths, bsOpts.scanThreads);
-    bscanner.setTimeout(bsOpts.scanTimeout, TimeUnit.MILLISECONDS);
-    List<Range> refs = new ArrayList<>();
+    try (AccumuloClient client = opts.createClient();
+         BatchScanner bscanner = client.createBatchScanner(opts.getTableName(), opts.auths,
+             bsOpts.scanThreads)) {
+      bscanner.setTimeout(bsOpts.scanTimeout, TimeUnit.MILLISECONDS);
+      List<Range> refs = new ArrayList<>();
 
-    for (UndefinedNode undefinedNode : undefs)
-      refs.add(new Range(new Text(undefinedNode.ref)));
+      for (UndefinedNode undefinedNode : undefs)
+        refs.add(new Range(new Text(undefinedNode.ref)));
 
-    bscanner.setRanges(refs);
+      bscanner.setRanges(refs);
 
-    HashMap<String,List<String>> refInfo = new HashMap<>();
+      HashMap<String, List<String>> refInfo = new HashMap<>();
 
-    for (Entry<Key,Value> entry : bscanner) {
-      String ref = entry.getKey().getRow().toString();
-      List<String> vals = refInfo.get(ref);
-      if (vals == null) {
-        vals = new ArrayList<>();
-        refInfo.put(ref, vals);
+      for (Entry<Key, Value> entry : bscanner) {
+        String ref = entry.getKey().getRow().toString();
+        List<String> vals = refInfo.computeIfAbsent(ref, k -> new ArrayList<>());
+        vals.add(entry.getValue().toString());
       }
 
-      vals.add(entry.getValue().toString());
-    }
-
-    bscanner.close();
-
-    IngestInfo ingestInfo = new IngestInfo(opts.logDir);
-    String tableId = client.tableOperations().tableIdMap().get(opts.getTableName());
-    TabletHistory tabletHistory = new TabletHistory(tableId, opts.logDir);
+      IngestInfo ingestInfo = new IngestInfo(opts.logDir);
+      String tableId = client.tableOperations().tableIdMap().get(opts.getTableName());
+      TabletHistory tabletHistory = new TabletHistory(tableId, opts.logDir);
 
-    SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+      SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
 
-    for (UndefinedNode undefinedNode : undefs) {
+      for (UndefinedNode undefinedNode : undefs) {
 
-      List<String> refVals = refInfo.get(undefinedNode.ref);
-      if (refVals != null) {
-        for (String refVal : refVals) {
-          TabletAssignment ta = null;
+        List<String> refVals = refInfo.get(undefinedNode.ref);
+        if (refVals != null) {
+          for (String refVal : refVals) {
+            TabletAssignment ta = null;
 
-          String[] tokens = refVal.split(":");
+            String[] tokens = refVal.split(":");
 
-          String uuid = tokens[0];
-          String count = tokens[1];
+            String uuid = tokens[0];
+            String count = tokens[1];
 
-          String t1 = "";
-          String t2 = "";
+            String t1 = "";
+            String t2 = "";
 
-          Iterator<Long> times = ingestInfo.getTimes(uuid, Long.parseLong(count, 16));
-          if (times != null) {
-            if (times.hasNext()) {
-              long time2 = times.next();
-              t2 = sdf.format(new Date(time2));
+            Iterator<Long> times = ingestInfo.getTimes(uuid, Long.parseLong(count, 16));
+            if (times != null) {
               if (times.hasNext()) {
-                long time1 = times.next();
-                t1 = sdf.format(new Date(time1));
-                ta = tabletHistory.findMostRecentAssignment(undefinedNode.undef, time1, time2);
+                long time2 = times.next();
+                t2 = sdf.format(new Date(time2));
+                if (times.hasNext()) {
+                  long time1 = times.next();
+                  t1 = sdf.format(new Date(time1));
+                  ta = tabletHistory.findMostRecentAssignment(undefinedNode.undef, time1, time2);
+                }
               }
             }
-          }
 
-          if (ta == null)
-            System.out.println(undefinedNode.undef + " " + undefinedNode.ref + " " + uuid + " " + t1 + " " + t2);
-          else
-            System.out.println(undefinedNode.undef + " " + undefinedNode.ref + " " + ta.tablet + " " + ta.server + " " + uuid + " " + t1 + " " + t2);
+            if (ta == null)
+              System.out.println(undefinedNode.undef + " " + undefinedNode.ref + " " + uuid + " " + t1 + " " + t2);
+            else
+              System.out.println(undefinedNode.undef + " " + undefinedNode.ref + " " + ta.tablet + " " + ta.server + " " + uuid + " " + t1 + " " + t2);
 
+          }
+        } else {
+          System.out.println(undefinedNode.undef + " " + undefinedNode.ref);
         }
-      } else {
-        System.out.println(undefinedNode.undef + " " + undefinedNode.ref);
       }
-
     }
-
   }
-
 }
diff --git a/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java b/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java
index 9ac53a5..6f0b3cb 100644
--- a/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java
+++ b/src/main/java/org/apache/accumulo/testing/ingest/BulkImportDirectory.java
@@ -32,17 +32,21 @@ public class BulkImportDirectory {
   static class Opts extends ClientOnRequiredTable {
     @Parameter(names = {"-s", "--source"}, description = "directory to import from")
     String source = null;
-    @Parameter(names = {"-f", "--failures"}, description = "directory to copy failures into: will be deleted before the bulk import")
+    @Parameter(names = {"-f", "--failures"},
+        description = "directory to copy failures into: will be deleted before the bulk import")
     String failures = null;
   }
 
-  public static void main(String[] args) throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException {
+  public static void main(String[] args) throws IOException, AccumuloException,
+      AccumuloSecurityException, TableNotFoundException {
     final FileSystem fs = FileSystem.get(new Configuration());
     Opts opts = new Opts();
-    System.err.println("Deprecated syntax for BulkImportDirectory, please use the new style (see --help)");
+    System.err
+        .println("Deprecated syntax for BulkImportDirectory, please use the new style (see --help)");
     opts.parseArgs(BulkImportDirectory.class.getName(), args);
     fs.delete(new Path(opts.failures), true);
     fs.mkdirs(new Path(opts.failures));
-    opts.getClient().tableOperations().importDirectory(opts.getTableName(), opts.source, opts.failures, false);
+    opts.createClient().tableOperations()
+        .importDirectory(opts.getTableName(), opts.source, opts.failures, false);
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/ingest/TestIngest.java b/src/main/java/org/apache/accumulo/testing/ingest/TestIngest.java
index 5f058ab..5ba2092 100644
--- a/src/main/java/org/apache/accumulo/testing/ingest/TestIngest.java
+++ b/src/main/java/org/apache/accumulo/testing/ingest/TestIngest.java
@@ -66,7 +66,8 @@ public class TestIngest {
     @Parameter(names = "--createTable")
     public boolean createTable = false;
 
-    @Parameter(names = "--splits", description = "the number of splits to use when creating the table")
+    @Parameter(names = "--splits",
+        description = "the number of splits to use when creating the table")
     public int numsplits = 1;
 
     @Parameter(names = "--start", description = "the starting row number")
@@ -78,7 +79,9 @@ public class TestIngest {
     @Parameter(names = "--cols", description = "the number of columns to ingest per row")
     public int cols = 1;
 
-    @Parameter(names = "--random", description = "insert random rows and use the given number to seed the psuedo-random number generator")
+    @Parameter(
+        names = "--random",
+        description = "insert random rows and use the given number to seed the psuedo-random number generator")
     public Integer random = null;
 
     @Parameter(names = "--size", description = "the size of the value to ingest")
@@ -96,10 +99,12 @@ public class TestIngest {
     @Parameter(names = "--stride", description = "the difference between successive row ids")
     public int stride;
 
-    @Parameter(names = {"-cf", "--columnFamily"}, description = "place columns in this column family")
+    @Parameter(names = {"-cf", "--columnFamily"},
+        description = "place columns in this column family")
     public String columnFamily = "colf";
 
-    @Parameter(names = {"-cv", "--columnVisibility"}, description = "place columns in this column family", converter = VisibilityConverter.class)
+    @Parameter(names = {"-cv", "--columnVisibility"},
+        description = "place columns in this column family", converter = VisibilityConverter.class)
     public ColumnVisibility columnVisibility = new ColumnVisibility();
 
     public Configuration conf = null;
@@ -110,9 +115,11 @@ public class TestIngest {
     }
   }
 
-  public static void createTable(AccumuloClient client, Opts args) throws AccumuloException, AccumuloSecurityException, TableExistsException {
+  public static void createTable(AccumuloClient client, Opts args) throws AccumuloException,
+      AccumuloSecurityException, TableExistsException {
     if (args.createTable) {
-      TreeSet<Text> splits = getSplitPoints(args.startRow, args.startRow + args.rows, args.numsplits);
+      TreeSet<Text> splits = getSplitPoints(args.startRow, args.startRow + args.rows,
+          args.numsplits);
 
       if (!client.tableOperations().exists(args.getTableName()))
         client.tableOperations().create(args.getTableName());
@@ -184,7 +191,7 @@ public class TestIngest {
     String name = TestIngest.class.getSimpleName();
     DistributedTrace.enable(name);
 
-    try {
+    try (AccumuloClient client = opts.createClient()) {
       opts.startTracing(name);
 
       if (opts.debug)
@@ -192,7 +199,7 @@ public class TestIngest {
 
       // test batch update
 
-      ingest(opts.getClient(), opts, bwOpts);
+      ingest(client, opts, bwOpts);
     } catch (Exception e) {
       throw new RuntimeException(e);
     } finally {
@@ -201,8 +208,9 @@ public class TestIngest {
     }
   }
 
-  public static void ingest(AccumuloClient client, FileSystem fs, Opts opts, BatchWriterOpts bwOpts) throws IOException, AccumuloException,
-      AccumuloSecurityException, TableNotFoundException, MutationsRejectedException, TableExistsException {
+  public static void ingest(AccumuloClient client, FileSystem fs, Opts opts, BatchWriterOpts bwOpts)
+      throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException,
+      MutationsRejectedException, TableExistsException {
     long stopTime;
 
     byte[][] bytevals = generateValues(opts.dataSize);
@@ -219,8 +227,11 @@ public class TestIngest {
 
     if (opts.outputFile != null) {
       Configuration conf = CachedConfiguration.getInstance();
-      writer = FileOperations.getInstance().newWriterBuilder()
-          .forFile(opts.outputFile + "." + RFile.EXTENSION, fs, conf, CryptoServiceFactory.newDefaultInstance())
+      writer = FileOperations
+          .getInstance()
+          .newWriterBuilder()
+          .forFile(opts.outputFile + "." + RFile.EXTENSION, fs, conf,
+              CryptoServiceFactory.newDefaultInstance())
           .withTableConfiguration(DefaultConfiguration.getInstance()).build();
       writer.startDefaultLocalityGroup();
     } else {
@@ -265,7 +276,8 @@ public class TestIngest {
           } else {
             byte value[];
             if (opts.random != null) {
-              value = genRandomValue(random, randomValue, opts.random.intValue(), rowid + opts.startRow, j);
+              value = genRandomValue(random, randomValue, opts.random.intValue(), rowid
+                  + opts.startRow, j);
             } else {
               value = bytevals[j % bytevals.length];
             }
@@ -287,7 +299,8 @@ public class TestIngest {
           } else {
             byte value[];
             if (opts.random != null) {
-              value = genRandomValue(random, randomValue, opts.random.intValue(), rowid + opts.startRow, j);
+              value = genRandomValue(random, randomValue, opts.random.intValue(), rowid
+                  + opts.startRow, j);
             } else {
               value = bytevals[j % bytevals.length];
             }
@@ -316,7 +329,8 @@ public class TestIngest {
       } catch (MutationsRejectedException e) {
         if (e.getSecurityErrorCodes().size() > 0) {
           for (Entry<TabletId,Set<SecurityErrorCode>> entry : e.getSecurityErrorCodes().entrySet()) {
-            System.err.println("ERROR : Not authorized to write to : " + entry.getKey() + " due to " + entry.getValue());
+            System.err.println("ERROR : Not authorized to write to : " + entry.getKey()
+                + " due to " + entry.getValue());
           }
         }
 
@@ -335,12 +349,16 @@ public class TestIngest {
     int totalValues = opts.rows * opts.cols;
     double elapsed = (stopTime - startTime) / 1000.0;
 
-    System.out.printf("%,12d records written | %,8d records/sec | %,12d bytes written | %,8d bytes/sec | %6.3f secs   %n", totalValues,
-        (int) (totalValues / elapsed), bytesWritten, (int) (bytesWritten / elapsed), elapsed);
+    System.out
+        .printf(
+            "%,12d records written | %,8d records/sec | %,12d bytes written | %,8d bytes/sec | %6.3f secs   %n",
+            totalValues, (int) (totalValues / elapsed), bytesWritten,
+            (int) (bytesWritten / elapsed), elapsed);
   }
 
-  public static void ingest(AccumuloClient c, Opts opts, BatchWriterOpts batchWriterOpts) throws MutationsRejectedException, IOException, AccumuloException,
-      AccumuloSecurityException, TableNotFoundException, TableExistsException {
+  public static void ingest(AccumuloClient c, Opts opts, BatchWriterOpts batchWriterOpts)
+      throws MutationsRejectedException, IOException, AccumuloException, AccumuloSecurityException,
+      TableNotFoundException, TableExistsException {
     ingest(c, FileSystem.get(CachedConfiguration.getInstance()), opts, batchWriterOpts);
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java b/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java
index 884befe..02d9091 100644
--- a/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java
+++ b/src/main/java/org/apache/accumulo/testing/ingest/VerifyIngest.java
@@ -48,20 +48,20 @@ public class VerifyIngest {
     return Integer.parseInt(k.getRow().toString().split("_")[1]);
   }
 
-  public static int getCol(Key k) {
+  private static int getCol(Key k) {
     return Integer.parseInt(k.getColumnQualifier().toString().split("_")[1]);
   }
 
   public static class Opts extends TestIngest.Opts {
     @Parameter(names = "-useGet", description = "fetches values one at a time, instead of scanning")
-    public boolean useGet = false;
+    boolean useGet = false;
   }
 
   public static void main(String[] args) throws Exception {
     Opts opts = new Opts();
     ScannerOpts scanOpts = new ScannerOpts();
     opts.parseArgs(VerifyIngest.class.getName(), args, scanOpts);
-    try {
+    try (AccumuloClient client = opts.createClient()) {
       if (opts.trace) {
         String name = VerifyIngest.class.getSimpleName();
         DistributedTrace.enable();
@@ -69,7 +69,7 @@ public class VerifyIngest {
         Trace.data("cmdLine", Arrays.asList(args).toString());
       }
 
-      verifyIngest(opts.getClient(), opts, scanOpts);
+      verifyIngest(client, opts, scanOpts);
 
     } finally {
       Trace.off();
@@ -77,8 +77,8 @@ public class VerifyIngest {
     }
   }
 
-  public static void verifyIngest(AccumuloClient client, Opts opts, ScannerOpts scanOpts) throws AccumuloException, AccumuloSecurityException,
-      TableNotFoundException {
+  private static void verifyIngest(AccumuloClient client, Opts opts, ScannerOpts scanOpts)
+      throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
     byte[][] bytevals = TestIngest.generateValues(opts.dataSize);
 
     Authorizations labelAuths = new Authorizations("L1", "L2", "G1", "GROUP2");
@@ -121,7 +121,8 @@ public class VerifyIngest {
 
         byte ev[];
         if (opts.random != null) {
-          ev = TestIngest.genRandomValue(random, randomValue, opts.random.intValue(), expectedRow, expectedCol);
+          ev = TestIngest
+              .genRandomValue(random, randomValue, opts.random, expectedRow, expectedCol);
         } else {
           ev = bytevals[expectedCol % bytevals.length];
         }
@@ -134,7 +135,8 @@ public class VerifyIngest {
           bytesRead += val.length;
           Value value = new Value(val);
           if (value.compareTo(ev) != 0) {
-            log.error("unexpected value  (" + rowKey + " " + colf + " " + colq + " : saw " + value + " expected " + new Value(ev));
+            log.error("unexpected value  (" + rowKey + " " + colf + " " + colq + " : saw " + value
+                + " expected " + new Value(ev));
             errors++;
           }
         }
@@ -153,7 +155,8 @@ public class VerifyIngest {
         scanner.setBatchSize(scanOpts.scanBatchSize);
         scanner.setRange(new Range(startKey, endKey));
         for (int j = 0; j < opts.cols; j++) {
-          scanner.fetchColumn(new Text(opts.columnFamily), new Text("col_" + String.format("%07d", j)));
+          scanner.fetchColumn(new Text(opts.columnFamily),
+              new Text("col_" + String.format("%07d", j)));
         }
 
         int recsReadBefore = recsRead;
@@ -175,12 +178,14 @@ public class VerifyIngest {
           }
 
           if (colNum != expectedCol) {
-            log.error("colNum != expectedCol  " + colNum + " != " + expectedCol + "  rowNum : " + rowNum);
+            log.error("colNum != expectedCol  " + colNum + " != " + expectedCol + "  rowNum : "
+                + rowNum);
             errors++;
           }
 
           if (expectedRow >= (opts.rows + opts.startRow)) {
-            log.error("expectedRow (" + expectedRow + ") >= (ingestArgs.rows + ingestArgs.startRow)  (" + (opts.rows + opts.startRow)
+            log.error("expectedRow (" + expectedRow
+                + ") >= (ingestArgs.rows + ingestArgs.startRow)  (" + (opts.rows + opts.startRow)
                 + "), get batch returned data passed end key");
             errors++;
             break;
@@ -188,19 +193,22 @@ public class VerifyIngest {
 
           byte value[];
           if (opts.random != null) {
-            value = TestIngest.genRandomValue(random, randomValue, opts.random.intValue(), expectedRow, colNum);
+            value = TestIngest
+                .genRandomValue(random, randomValue, opts.random, expectedRow, colNum);
           } else {
             value = bytevals[colNum % bytevals.length];
           }
 
           if (entry.getValue().compareTo(value) != 0) {
             log.error("unexpected value, rowNum : " + rowNum + " colNum : " + colNum);
-            log.error(" saw = " + new String(entry.getValue().get()) + " expected = " + new String(value));
+            log.error(" saw = " + new String(entry.getValue().get()) + " expected = "
+                + new String(value));
             errors++;
           }
 
           if (opts.timestamp >= 0 && entry.getKey().getTimestamp() != opts.timestamp) {
-            log.error("unexpected timestamp " + entry.getKey().getTimestamp() + ", rowNum : " + rowNum + " colNum : " + colNum);
+            log.error("unexpected timestamp " + entry.getKey().getTimestamp() + ", rowNum : "
+                + rowNum + " colNum : " + colNum);
             errors++;
           }
 
@@ -227,10 +235,14 @@ public class VerifyIngest {
     }
 
     if (expectedRow != (opts.rows + opts.startRow)) {
-      throw new AccumuloException("Did not read expected number of rows. Saw " + (expectedRow - opts.startRow) + " expected " + opts.rows);
+      throw new AccumuloException("Did not read expected number of rows. Saw "
+          + (expectedRow - opts.startRow) + " expected " + opts.rows);
     } else {
-      System.out.printf("%,12d records read | %,8d records/sec | %,12d bytes read | %,8d bytes/sec | %6.3f secs   %n", recsRead,
-          (int) ((recsRead) / ((t2 - t1) / 1000.0)), bytesRead, (int) (bytesRead / ((t2 - t1) / 1000.0)), (t2 - t1) / 1000.0);
+      System.out
+          .printf(
+              "%,12d records read | %,8d records/sec | %,12d bytes read | %,8d bytes/sec | %6.3f secs   %n",
+              recsRead, (int) ((recsRead) / ((t2 - t1) / 1000.0)), bytesRead,
+              (int) (bytesRead / ((t2 - t1) / 1000.0)), (t2 - t1) / 1000.0);
     }
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java b/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java
index 3a41713..086025c 100644
--- a/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java
+++ b/src/main/java/org/apache/accumulo/testing/mapreduce/RowHash.java
@@ -18,11 +18,15 @@ package org.apache.accumulo.testing.mapreduce;
 
 import java.io.IOException;
 import java.util.Base64;
+import java.util.Collection;
 import java.util.Collections;
+import java.util.List;
+import java.util.Set;
 
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
-import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
-import org.apache.accumulo.core.clientImpl.mapreduce.lib.MapReduceClientOnRequiredTable;
+import org.apache.accumulo.core.client.IteratorSetting;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloOutputFormat;
+import org.apache.accumulo.hadoopImpl.mapreduce.lib.MapReduceClientOnRequiredTable;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
@@ -46,7 +50,8 @@ public class RowHash extends Configured implements Tool {
     @Override
     public void map(Key row, Value data, Context context) throws IOException, InterruptedException {
       Mutation m = new Mutation(row.getRow());
-      m.put(new Text("cf-HASHTYPE"), new Text("cq-MD5BASE64"), new Value(Base64.getEncoder().encode(MD5Hash.digest(data.toString()).getDigest())));
+      m.put(new Text("cf-HASHTYPE"), new Text("cq-MD5BASE64"), new Value(Base64.getEncoder()
+          .encode(MD5Hash.digest(data.toString()).getDigest())));
       context.write(null, m);
       context.progress();
     }
@@ -68,14 +73,20 @@ public class RowHash extends Configured implements Tool {
     Opts opts = new Opts();
     opts.parseArgs(RowHash.class.getName(), args);
     job.setInputFormatClass(AccumuloInputFormat.class);
-    opts.setAccumuloConfigs(job);
 
     String col = opts.column;
     int idx = col.indexOf(":");
     Text cf = new Text(idx < 0 ? col : col.substring(0, idx));
     Text cq = idx < 0 ? null : new Text(col.substring(idx + 1));
+    Collection<IteratorSetting.Column> cols = Collections.emptyList();
     if (cf.getLength() > 0)
-      AccumuloInputFormat.fetchColumns(job, Collections.singleton(new Pair<>(cf, cq)));
+      cols = Collections.singleton(new IteratorSetting.Column(cf, cq));
+
+    AccumuloInputFormat.configure().clientProperties(opts.getClientProperties())
+        .table(opts.getTableName()).auths(opts.auths).fetchColumns(cols).store(job);
+
+    AccumuloOutputFormat.configure().clientProperties(opts.getClientProperties())
+        .defaultTable(opts.getTableName()).createTables(true).store(job);
 
     job.setMapperClass(HashDataMapper.class);
     job.setMapOutputKeyClass(Text.class);
diff --git a/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java b/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java
index f505f1a..b4ae57c 100644
--- a/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java
+++ b/src/main/java/org/apache/accumulo/testing/mapreduce/TeraSortIngest.java
@@ -27,7 +27,7 @@ import java.util.Random;
 
 import org.apache.accumulo.core.client.BatchWriterConfig;
 import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
-import org.apache.accumulo.core.clientImpl.mapreduce.lib.MapReduceClientOnRequiredTable;
+import org.apache.accumulo.hadoopImpl.mapreduce.lib.MapReduceClientOnRequiredTable;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
 import org.apache.hadoop.conf.Configuration;
@@ -50,8 +50,9 @@ import org.apache.hadoop.util.ToolRunner;
 import com.beust.jcommander.Parameter;
 
 /**
- * Generate the *almost* official terasort input data set. (See below) The user specifies the number of rows and the output directory and this class runs a
- * map/reduce program to generate the data. The format of the data is:
+ * Generate the *almost* official terasort input data set. (See below) The user specifies the number
+ * of rows and the output directory and this class runs a map/reduce program to generate the data.
+ * The format of the data is:
  * <ul>
  * <li>(10 bytes key) (10 bytes rowid) (78 bytes filler) \r \n
  * <li>The keys are random characters from the set ' ' .. '~'.
@@ -59,9 +60,11 @@ import com.beust.jcommander.Parameter;
  * <li>The filler consists of 7 runs of 10 characters from 'A' to 'Z'.
  * </ul>
  *
- * This TeraSort is slightly modified to allow for variable length key sizes and value sizes. The row length isn't variable. To generate a terabyte of data in
- * the same way TeraSort does use 10000000000 rows and 10/10 byte key length and 78/78 byte value length. Along with the 10 byte row id and \r\n this gives you
- * 100 byte row * 10000000000 rows = 1tb. Min/Max ranges for key and value parameters are inclusive/inclusive respectively.
+ * This TeraSort is slightly modified to allow for variable length key sizes and value sizes. The
+ * row length isn't variable. To generate a terabyte of data in the same way TeraSort does use
+ * 10000000000 rows and 10/10 byte key length and 78/78 byte value length. Along with the 10 byte
+ * row id and \r\n this gives you 100 byte row * 10000000000 rows = 1tb. Min/Max ranges for key and
+ * value parameters are inclusive/inclusive respectively.
  *
  *
  */
@@ -77,8 +80,6 @@ public class TeraSortIngest extends Configured implements Tool {
       long firstRow;
       long rowCount;
 
-      public RangeInputSplit() {}
-
       public RangeInputSplit(long offset, long length) {
         firstRow = offset;
         rowCount = length;
@@ -140,7 +141,8 @@ public class TeraSortIngest extends Configured implements Tool {
       }
 
       @Override
-      public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {}
+      public void initialize(InputSplit split, TaskAttemptContext context) throws IOException,
+          InterruptedException {}
 
       @Override
       public boolean nextKeyValue() throws IOException, InterruptedException {
@@ -153,7 +155,8 @@ public class TeraSortIngest extends Configured implements Tool {
     }
 
     @Override
-    public RecordReader<LongWritable,NullWritable> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException {
+    public RecordReader<LongWritable,NullWritable> createRecordReader(InputSplit split,
+        TaskAttemptContext context) throws IOException {
       // reporter.setStatus("Creating record reader");
       return new RangeRecordReader((RangeInputSplit) split);
     }
@@ -166,7 +169,8 @@ public class TeraSortIngest extends Configured implements Tool {
       long totalRows = job.getConfiguration().getLong(NUMROWS, 0);
       int numSplits = job.getConfiguration().getInt(NUMSPLITS, 1);
       long rowsPerSplit = totalRows / numSplits;
-      System.out.println("Generating " + totalRows + " using " + numSplits + " maps with step of " + rowsPerSplit);
+      System.out.println("Generating " + totalRows + " using " + numSplits + " maps with step of "
+          + rowsPerSplit);
       ArrayList<InputSplit> splits = new ArrayList<>(numSplits);
       long currentRow = 0;
       for (int split = 0; split < numSplits - 1; ++split) {
@@ -191,11 +195,14 @@ public class TeraSortIngest extends Configured implements Tool {
      */
     private static final int seedSkip = 128 * 1024 * 1024;
     /**
-     * The precomputed seed values after every seedSkip iterations. There should be enough values so that a 2**32 iterations are covered.
+     * The precomputed seed values after every seedSkip iterations. There should be enough values so
+     * that a 2**32 iterations are covered.
      */
-    private static final long[] seeds = new long[] {0L, 4160749568L, 4026531840L, 3892314112L, 3758096384L, 3623878656L, 3489660928L, 3355443200L, 3221225472L,
-        3087007744L, 2952790016L, 2818572288L, 2684354560L, 2550136832L, 2415919104L, 2281701376L, 2147483648L, 2013265920L, 1879048192L, 1744830464L,
-        1610612736L, 1476395008L, 1342177280L, 1207959552L, 1073741824L, 939524096L, 805306368L, 671088640L, 536870912L, 402653184L, 268435456L, 134217728L,};
+    private static final long[] seeds = new long[] {0L, 4160749568L, 4026531840L, 3892314112L,
+        3758096384L, 3623878656L, 3489660928L, 3355443200L, 3221225472L, 3087007744L, 2952790016L,
+        2818572288L, 2684354560L, 2550136832L, 2415919104L, 2281701376L, 2147483648L, 2013265920L,
+        1879048192L, 1744830464L, 1610612736L, 1476395008L, 1342177280L, 1207959552L, 1073741824L,
+        939524096L, 805306368L, 671088640L, 536870912L, 402653184L, 268435456L, 134217728L,};
 
     /**
      * Start the random number generator on the given iteration.
@@ -286,7 +293,8 @@ public class TeraSortIngest extends Configured implements Tool {
     }
 
     /**
-     * Add the required filler bytes. Each row consists of 7 blocks of 10 characters and 1 block of 8 characters.
+     * Add the required filler bytes. Each row consists of 7 blocks of 10 characters and 1 block of
+     * 8 characters.
      *
      * @param rowId
      *          the current row number
@@ -310,7 +318,8 @@ public class TeraSortIngest extends Configured implements Tool {
     }
 
     @Override
-    public void map(LongWritable row, NullWritable ignored, Context context) throws IOException, InterruptedException {
+    public void map(LongWritable row, NullWritable ignored, Context context) throws IOException,
+        InterruptedException {
       context.setStatus("Entering");
       long rowId = row.get();
       if (rand == null) {
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/MerkleTree.java b/src/main/java/org/apache/accumulo/testing/merkle/MerkleTree.java
index 337d391..bf384b8 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/MerkleTree.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/MerkleTree.java
@@ -46,7 +46,8 @@ public class MerkleTree {
       Pair<Integer,Integer> pairToJoin = findNextPair(buffer);
 
       // Make a parent node from them
-      MerkleTreeNode parent = new MerkleTreeNode(Arrays.asList(buffer.get(pairToJoin.getFirst()), buffer.get(pairToJoin.getSecond())), digestAlgorithm);
+      MerkleTreeNode parent = new MerkleTreeNode(Arrays.asList(buffer.get(pairToJoin.getFirst()),
+          buffer.get(pairToJoin.getSecond())), digestAlgorithm);
 
       // Insert it back into the "tree" at the position of the first child
       buffer.set(pairToJoin.getFirst(), parent);
@@ -82,7 +83,8 @@ public class MerkleTree {
     }
 
     if (2 < nodes.size()) {
-      throw new IllegalStateException("Should not have exited loop without pairing two elements when we have at least 3 nodes");
+      throw new IllegalStateException(
+          "Should not have exited loop without pairing two elements when we have at least 3 nodes");
     } else if (2 == nodes.size()) {
       return new Pair<>(0, 1);
     } else {
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/MerkleTreeNode.java b/src/main/java/org/apache/accumulo/testing/merkle/MerkleTreeNode.java
index 4469c32..a83e13c 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/MerkleTreeNode.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/MerkleTreeNode.java
@@ -56,7 +56,8 @@ public class MerkleTreeNode {
     hash = v.get();
   }
 
-  public MerkleTreeNode(List<MerkleTreeNode> children, String digestAlgorithm) throws NoSuchAlgorithmException {
+  public MerkleTreeNode(List<MerkleTreeNode> children, String digestAlgorithm)
+      throws NoSuchAlgorithmException {
     level = 0;
     this.children = new ArrayList<>(children.size());
     MessageDigest digest = MessageDigest.getInstance(digestAlgorithm);
@@ -70,10 +71,12 @@ public class MerkleTreeNode {
       if (null == childrenRange) {
         childrenRange = child.getRange();
       } else {
-        List<Range> overlappingRanges = Range.mergeOverlapping(Arrays.asList(childrenRange, child.getRange()));
+        List<Range> overlappingRanges = Range.mergeOverlapping(Arrays.asList(childrenRange,
+            child.getRange()));
         if (1 != overlappingRanges.size()) {
           log.error("Tried to merge non-contiguous ranges: {} {}", childrenRange, child.getRange());
-          throw new IllegalArgumentException("Ranges must be contiguous: " + childrenRange + ", " + child.getRange());
+          throw new IllegalArgumentException("Ranges must be contiguous: " + childrenRange + ", "
+              + child.getRange());
         }
 
         childrenRange = overlappingRanges.get(0);
@@ -109,7 +112,8 @@ public class MerkleTreeNode {
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(32);
-    sb.append("range=").append(range).append(" level=").append(level).append(" hash=").append(Hex.encodeHexString(hash)).append(" children=").append(children);
+    sb.append("range=").append(range).append(" level=").append(level).append(" hash=")
+        .append(Hex.encodeHexString(hash)).append(" children=").append(children);
     return sb.toString();
   }
 
@@ -117,7 +121,8 @@ public class MerkleTreeNode {
   public boolean equals(Object o) {
     if (o instanceof MerkleTreeNode) {
       MerkleTreeNode other = (MerkleTreeNode) o;
-      return range.equals(other.getRange()) && level == other.getLevel() && children.equals(other.getChildren()) && Arrays.equals(hash, other.getHash());
+      return range.equals(other.getRange()) && level == other.getLevel()
+          && children.equals(other.getChildren()) && Arrays.equals(hash, other.getHash());
     }
 
     return false;
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/cli/CompareTables.java b/src/main/java/org/apache/accumulo/testing/merkle/cli/CompareTables.java
index ac7eb27..b10b4cb 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/cli/CompareTables.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/cli/CompareTables.java
@@ -39,10 +39,11 @@ import org.slf4j.LoggerFactory;
 import com.beust.jcommander.Parameter;
 
 /**
- * Accepts a set of tables, computes the hashes for each, and prints the top-level hash for each table.
+ * Accepts a set of tables, computes the hashes for each, and prints the top-level hash for each
+ * table.
  * <p>
- * Will automatically create output tables for intermediate hashes instead of requiring their existence. This will raise an exception when the table we want to
- * use already exists.
+ * Will automatically create output tables for intermediate hashes instead of requiring their
+ * existence. This will raise an exception when the table we want to use already exists.
  */
 public class CompareTables {
   private static final Logger log = LoggerFactory.getLogger(CompareTables.class);
@@ -51,16 +52,17 @@ public class CompareTables {
     @Parameter(names = {"--tables"}, description = "Tables to compare", variableArity = true)
     public List<String> tables;
 
-    @Parameter(names = {"-nt", "--numThreads"}, required = false, description = "number of concurrent threads calculating digests")
+    @Parameter(names = {"-nt", "--numThreads"},
+        description = "number of concurrent threads calculating digests")
     private int numThreads = 4;
 
     @Parameter(names = {"-hash", "--hash"}, required = true, description = "type of hash to use")
     private String hashName;
 
-    @Parameter(names = {"-iter", "--iterator"}, required = false, description = "Should pushdown digest to iterators")
+    @Parameter(names = {"-iter", "--iterator"}, description = "Should pushdown digest to iterators")
     private boolean iteratorPushdown = false;
 
-    @Parameter(names = {"-s", "--splits"}, required = false, description = "File of splits to use for merkle tree")
+    @Parameter(names = {"-s", "--splits"}, description = "File of splits to use for merkle tree")
     private String splitsFile = null;
 
     public List<String> getTables() {
@@ -71,78 +73,64 @@ public class CompareTables {
       this.tables = tables;
     }
 
-    public int getNumThreads() {
+    int getNumThreads() {
       return numThreads;
     }
 
-    public void setNumThreads(int numThreads) {
-      this.numThreads = numThreads;
-    }
-
-    public String getHashName() {
+    String getHashName() {
       return hashName;
     }
 
-    public void setHashName(String hashName) {
-      this.hashName = hashName;
-    }
-
-    public boolean isIteratorPushdown() {
+    boolean isIteratorPushdown() {
       return iteratorPushdown;
     }
 
-    public void setIteratorPushdown(boolean iteratorPushdown) {
-      this.iteratorPushdown = iteratorPushdown;
-    }
-
-    public String getSplitsFile() {
+    String getSplitsFile() {
       return splitsFile;
     }
-
-    public void setSplitsFile(String splitsFile) {
-      this.splitsFile = splitsFile;
-    }
   }
 
   private CompareTablesOpts opts;
 
-  protected CompareTables() {}
-
-  public CompareTables(CompareTablesOpts opts) {
+  private CompareTables(CompareTablesOpts opts) {
     this.opts = opts;
   }
 
-  public Map<String,String> computeAllHashes() throws AccumuloException, AccumuloSecurityException, TableExistsException, NoSuchAlgorithmException,
+  private Map<String,String> computeAllHashes() throws AccumuloException,
+      AccumuloSecurityException, TableExistsException, NoSuchAlgorithmException,
       TableNotFoundException, FileNotFoundException {
-    final AccumuloClient client = opts.getClient();
-    final Map<String,String> hashesByTable = new HashMap<>();
+    try (AccumuloClient client = opts.createClient()) {
+      final Map<String,String> hashesByTable = new HashMap<>();
 
-    for (String table : opts.getTables()) {
-      final String outputTableName = table + "_merkle";
+      for (String table : opts.getTables()) {
+        final String outputTableName = table + "_merkle";
 
-      if (client.tableOperations().exists(outputTableName)) {
-        throw new IllegalArgumentException("Expected output table name to not yet exist: " + outputTableName);
-      }
+        if (client.tableOperations().exists(outputTableName)) {
+          throw new IllegalArgumentException("Expected output table name to not yet exist: "
+              + outputTableName);
+        }
 
-      client.tableOperations().create(outputTableName);
+        client.tableOperations().create(outputTableName);
 
-      GenerateHashes genHashes = new GenerateHashes();
-      Collection<Range> ranges = genHashes.getRanges(opts.getClient(), table, opts.getSplitsFile());
+        GenerateHashes genHashes = new GenerateHashes();
+        Collection<Range> ranges = genHashes.getRanges(client, table, opts.getSplitsFile());
 
-      try {
-        genHashes.run(opts.getClient(), table, table + "_merkle", opts.getHashName(), opts.getNumThreads(), opts.isIteratorPushdown(), ranges);
-      } catch (Exception e) {
-        log.error("Error generating hashes for {}", table, e);
-        throw new RuntimeException(e);
-      }
+        try {
+          genHashes.run(client, table, table + "_merkle", opts.getHashName(), opts.getNumThreads(),
+              opts.isIteratorPushdown(), ranges);
+        } catch (Exception e) {
+          log.error("Error generating hashes for {}", table, e);
+          throw new RuntimeException(e);
+        }
 
-      ComputeRootHash computeRootHash = new ComputeRootHash();
-      String hash = Hex.encodeHexString(computeRootHash.getHash(client, outputTableName, opts.getHashName()));
+        ComputeRootHash computeRootHash = new ComputeRootHash();
+        String hash = Hex.encodeHexString(computeRootHash.getHash(client, outputTableName,
+            opts.getHashName()));
 
-      hashesByTable.put(table, hash);
+        hashesByTable.put(table, hash);
+      }
+      return hashesByTable;
     }
-
-    return hashesByTable;
   }
 
   public static void main(String[] args) throws Exception {
@@ -151,7 +139,8 @@ public class CompareTables {
     opts.parseArgs("CompareTables", args, bwOpts);
 
     if (opts.isIteratorPushdown() && null != opts.getSplitsFile()) {
-      throw new IllegalArgumentException("Cannot use iterator pushdown with anything other than table split points");
+      throw new IllegalArgumentException(
+          "Cannot use iterator pushdown with anything other than table split points");
     }
 
     CompareTables compareTables = new CompareTables(opts);
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/cli/ComputeRootHash.java b/src/main/java/org/apache/accumulo/testing/merkle/cli/ComputeRootHash.java
index bd1502e..1b5eea7 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/cli/ComputeRootHash.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/cli/ComputeRootHash.java
@@ -40,8 +40,9 @@ import org.apache.commons.codec.binary.Hex;
 import com.beust.jcommander.Parameter;
 
 /**
- * Given a table created by {@link GenerateHashes} which contains the leaves of a Merkle tree, compute the root node of the Merkle tree which can be quickly
- * compared to the root node of another Merkle tree to ascertain equality.
+ * Given a table created by {@link GenerateHashes} which contains the leaves of a Merkle tree,
+ * compute the root node of the Merkle tree which can be quickly compared to the root node of
+ * another Merkle tree to ascertain equality.
  */
 public class ComputeRootHash {
 
@@ -49,23 +50,22 @@ public class ComputeRootHash {
     @Parameter(names = {"-hash", "--hash"}, required = true, description = "type of hash to use")
     private String hashName;
 
-    public String getHashName() {
+    String getHashName() {
       return hashName;
     }
 
-    public void setHashName(String hashName) {
-      this.hashName = hashName;
-    }
   }
 
-  public byte[] getHash(ComputeRootHashOpts opts) throws AccumuloException, AccumuloSecurityException, TableNotFoundException, NoSuchAlgorithmException {
-    AccumuloClient client = opts.getClient();
-    String table = opts.getTableName();
-
-    return getHash(client, table, opts.getHashName());
+  private byte[] getHash(ComputeRootHashOpts opts) throws TableNotFoundException,
+      NoSuchAlgorithmException {
+    try (AccumuloClient client = opts.createClient()) {
+      String table = opts.getTableName();
+      return getHash(client, table, opts.getHashName());
+    }
   }
 
-  public byte[] getHash(AccumuloClient client, String table, String hashName) throws TableNotFoundException, NoSuchAlgorithmException {
+  byte[] getHash(AccumuloClient client, String table, String hashName)
+      throws TableNotFoundException, NoSuchAlgorithmException {
     List<MerkleTreeNode> leaves = getLeaves(client, table);
 
     MerkleTree tree = new MerkleTree(leaves, hashName);
@@ -73,7 +73,8 @@ public class ComputeRootHash {
     return tree.getRootNode().getHash();
   }
 
-  protected ArrayList<MerkleTreeNode> getLeaves(AccumuloClient client, String tableName) throws TableNotFoundException {
+  private ArrayList<MerkleTreeNode> getLeaves(AccumuloClient client, String tableName)
+      throws TableNotFoundException {
     // TODO make this a bit more resilient to very large merkle trees by
     // lazily reading more data from the table when necessary
     final Scanner s = client.createScanner(tableName, Authorizations.EMPTY);
@@ -83,7 +84,7 @@ public class ComputeRootHash {
       Range range = RangeSerialization.toRange(entry.getKey());
       byte[] hash = entry.getValue().get();
 
-      leaves.add(new MerkleTreeNode(range, 0, Collections.<Range> emptyList(), hash));
+      leaves.add(new MerkleTreeNode(range, 0, Collections.emptyList(), hash));
     }
 
     return leaves;
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/cli/GenerateHashes.java b/src/main/java/org/apache/accumulo/testing/merkle/cli/GenerateHashes.java
index 21bb41f..5f33f88 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/cli/GenerateHashes.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/cli/GenerateHashes.java
@@ -59,7 +59,8 @@ import com.beust.jcommander.Parameter;
 import com.google.common.collect.Iterables;
 
 /**
- * Read from a table, compute a Merkle tree and output it to a table. Each key-value pair in the destination table is a leaf node of the Merkle tree.
+ * Read from a table, compute a Merkle tree and output it to a table. Each key-value pair in the
+ * destination table is a leaf node of the Merkle tree.
  */
 public class GenerateHashes {
   private static final Logger log = LoggerFactory.getLogger(GenerateHashes.class);
@@ -68,61 +69,47 @@ public class GenerateHashes {
     @Parameter(names = {"-hash", "--hash"}, required = true, description = "type of hash to use")
     private String hashName;
 
-    @Parameter(names = {"-o", "--output"}, required = true, description = "output table name, expected to exist and be writable")
+    @Parameter(names = {"-o", "--output"}, required = true,
+        description = "output table name, expected to exist and be writable")
     private String outputTableName;
 
-    @Parameter(names = {"-nt", "--numThreads"}, required = false, description = "number of concurrent threads calculating digests")
+    @Parameter(names = {"-nt", "--numThreads"}, required = false,
+        description = "number of concurrent threads calculating digests")
     private int numThreads = 4;
 
-    @Parameter(names = {"-iter", "--iterator"}, required = false, description = "Should we push down logic with an iterator")
+    @Parameter(names = {"-iter", "--iterator"}, required = false,
+        description = "Should we push down logic with an iterator")
     private boolean iteratorPushdown = false;
 
-    @Parameter(names = {"-s", "--splits"}, required = false, description = "File of splits to use for merkle tree")
+    @Parameter(names = {"-s", "--splits"}, required = false,
+        description = "File of splits to use for merkle tree")
     private String splitsFile = null;
 
-    public String getHashName() {
+    String getHashName() {
       return hashName;
     }
 
-    public void setHashName(String hashName) {
-      this.hashName = hashName;
-    }
-
-    public String getOutputTableName() {
+    String getOutputTableName() {
       return outputTableName;
     }
 
-    public void setOutputTableName(String outputTableName) {
-      this.outputTableName = outputTableName;
-    }
-
-    public int getNumThreads() {
+    int getNumThreads() {
       return numThreads;
     }
 
-    public void setNumThreads(int numThreads) {
-      this.numThreads = numThreads;
-    }
-
-    public boolean isIteratorPushdown() {
+    boolean isIteratorPushdown() {
       return iteratorPushdown;
     }
 
-    public void setIteratorPushdown(boolean iteratorPushdown) {
-      this.iteratorPushdown = iteratorPushdown;
-    }
-
-    public String getSplitsFile() {
+    String getSplitsFile() {
       return splitsFile;
     }
 
-    public void setSplitsFile(String splitsFile) {
-      this.splitsFile = splitsFile;
-    }
   }
 
-  public Collection<Range> getRanges(AccumuloClient client, String tableName, String splitsFile) throws TableNotFoundException, AccumuloSecurityException,
-      AccumuloException, FileNotFoundException {
+  Collection<Range> getRanges(AccumuloClient client, String tableName, String splitsFile)
+      throws TableNotFoundException, AccumuloSecurityException, AccumuloException,
+      FileNotFoundException {
     if (null == splitsFile) {
       log.info("Using table split points");
       Collection<Text> endRows = client.tableOperations().listSplits(tableName);
@@ -132,16 +119,13 @@ public class GenerateHashes {
       ArrayList<Text> splits = new ArrayList<>();
 
       String line;
-      java.util.Scanner file = new java.util.Scanner(new File(splitsFile), UTF_8.name());
-      try {
+      try (java.util.Scanner file = new java.util.Scanner(new File(splitsFile), UTF_8.name())) {
         while (file.hasNextLine()) {
           line = file.nextLine();
           if (!line.isEmpty()) {
             splits.add(new Text(line));
           }
         }
-      } finally {
-        file.close();
       }
 
       Collections.sort(splits);
@@ -149,15 +133,17 @@ public class GenerateHashes {
     }
   }
 
-  public void run(GenerateHashesOpts opts) throws TableNotFoundException, AccumuloSecurityException, AccumuloException, NoSuchAlgorithmException,
-      FileNotFoundException {
-    Collection<Range> ranges = getRanges(opts.getClient(), opts.getTableName(), opts.getSplitsFile());
-
-    run(opts.getClient(), opts.getTableName(), opts.getOutputTableName(), opts.getHashName(), opts.getNumThreads(), opts.isIteratorPushdown(), ranges);
+  public void run(GenerateHashesOpts opts) throws TableNotFoundException,
+      AccumuloSecurityException, AccumuloException, NoSuchAlgorithmException, FileNotFoundException {
+    try (AccumuloClient client = opts.createClient()) {
+      Collection<Range> ranges = getRanges(client, opts.getTableName(), opts.getSplitsFile());
+      run(client, opts.getTableName(), opts.getOutputTableName(), opts.getHashName(),
+          opts.getNumThreads(), opts.isIteratorPushdown(), ranges);
+    }
   }
 
   public void run(final AccumuloClient client, final String inputTableName, final String outputTableName, final String digestName, int numThreads,
-      final boolean iteratorPushdown, final Collection<Range> ranges) throws TableNotFoundException, AccumuloSecurityException, AccumuloException,
+      final boolean iteratorPushdown, final Collection<Range> ranges) throws TableNotFoundException, AccumuloException,
       NoSuchAlgorithmException {
     if (!client.tableOperations().exists(outputTableName)) {
       throw new IllegalArgumentException(outputTableName + " does not exist, please create it");
@@ -165,68 +151,63 @@ public class GenerateHashes {
 
     // Get some parallelism
     ExecutorService svc = Executors.newFixedThreadPool(numThreads);
-    final BatchWriter bw = client.createBatchWriter(outputTableName, new BatchWriterConfig());
 
-    try {
+    try (BatchWriter bw = client.createBatchWriter(outputTableName)) {
       for (final Range range : ranges) {
         final MessageDigest digest = getDigestAlgorithm(digestName);
 
-        svc.execute(new Runnable() {
-
-          @Override
-          public void run() {
-            Scanner s;
-            try {
-              s = client.createScanner(inputTableName, Authorizations.EMPTY);
-            } catch (Exception e) {
-              log.error("Could not get scanner for " + inputTableName, e);
-              throw new RuntimeException(e);
-            }
+        svc.execute(() -> {
+          Scanner s;
+          try {
+            s = client.createScanner(inputTableName, Authorizations.EMPTY);
+          } catch (Exception e) {
+            log.error("Could not get scanner for " + inputTableName, e);
+            throw new RuntimeException(e);
+          }
 
-            s.setRange(range);
-
-            Value v = null;
-            Mutation m = null;
-            if (iteratorPushdown) {
-              IteratorSetting cfg = new IteratorSetting(50, DigestIterator.class);
-              cfg.addOption(DigestIterator.HASH_NAME_KEY, digestName);
-              s.addScanIterator(cfg);
-
-              // The scanner should only ever return us one
-              // Key-Value, otherwise this approach won't work
-              Entry<Key,Value> entry = Iterables.getOnlyElement(s);
-
-              v = entry.getValue();
-              m = RangeSerialization.toMutation(range, v);
-            } else {
-              ByteArrayOutputStream baos = new ByteArrayOutputStream();
-              for (Entry<Key,Value> entry : s) {
-                DataOutputStream out = new DataOutputStream(baos);
-                try {
-                  entry.getKey().write(out);
-                  entry.getValue().write(out);
-                } catch (Exception e) {
-                  log.error("Error writing {}", entry, e);
-                  throw new RuntimeException(e);
-                }
-
-                digest.update(baos.toByteArray());
-                baos.reset();
+          s.setRange(range);
+
+          Value v;
+          Mutation m;
+          if (iteratorPushdown) {
+            IteratorSetting cfg = new IteratorSetting(50, DigestIterator.class);
+            cfg.addOption(DigestIterator.HASH_NAME_KEY, digestName);
+            s.addScanIterator(cfg);
+
+            // The scanner should only ever return us one
+            // Key-Value, otherwise this approach won't work
+            Entry<Key,Value> entry = Iterables.getOnlyElement(s);
+
+            v = entry.getValue();
+            m = RangeSerialization.toMutation(range, v);
+          } else {
+            ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            for (Entry<Key,Value> entry : s) {
+              DataOutputStream out = new DataOutputStream(baos);
+              try {
+                entry.getKey().write(out);
+                entry.getValue().write(out);
+              } catch (Exception e) {
+                log.error("Error writing {}", entry, e);
+                throw new RuntimeException(e);
               }
 
-              v = new Value(digest.digest());
-              m = RangeSerialization.toMutation(range, v);
+              digest.update(baos.toByteArray());
+              baos.reset();
             }
 
-            // Log some progress
-            log.info("{} computed digest for {} of {}", Thread.currentThread().getName(), range, Hex.encodeHexString(v.get()));
+            v = new Value(digest.digest());
+            m = RangeSerialization.toMutation(range, v);
+          }
+
+          // Log some progress
+          log.info("{} computed digest for {} of {}", Thread.currentThread().getName(), range, Hex.encodeHexString(v.get()));
 
-            try {
-              bw.addMutation(m);
-            } catch (MutationsRejectedException e) {
-              log.error("Could not write mutation", e);
-              throw new RuntimeException(e);
-            }
+          try {
+            bw.addMutation(m);
+          } catch (MutationsRejectedException e) {
+            log.error("Could not write mutation", e);
+            throw new RuntimeException(e);
           }
         });
       }
@@ -243,14 +224,10 @@ public class GenerateHashes {
           return;
         }
       }
-    } finally {
-      // We can only safely close this when we're exiting or we've
-      // completely all tasks
-      bw.close();
     }
   }
 
-  public TreeSet<Range> endRowsToRanges(Collection<Text> endRows) {
+  private TreeSet<Range> endRowsToRanges(Collection<Text> endRows) {
     ArrayList<Text> sortedEndRows = new ArrayList<>(endRows);
     Collections.sort(sortedEndRows);
 
@@ -270,7 +247,7 @@ public class GenerateHashes {
     return ranges;
   }
 
-  protected MessageDigest getDigestAlgorithm(String digestName) throws NoSuchAlgorithmException {
+  private MessageDigest getDigestAlgorithm(String digestName) throws NoSuchAlgorithmException {
     return MessageDigest.getInstance(digestName);
   }
 
@@ -280,7 +257,8 @@ public class GenerateHashes {
     opts.parseArgs(GenerateHashes.class.getName(), args, bwOpts);
 
     if (opts.isIteratorPushdown() && null != opts.getSplitsFile()) {
-      throw new IllegalArgumentException("Cannot use iterator pushdown with anything other than table split points");
+      throw new IllegalArgumentException(
+          "Cannot use iterator pushdown with anything other than table split points");
     }
 
     GenerateHashes generate = new GenerateHashes();
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/cli/ManualComparison.java b/src/main/java/org/apache/accumulo/testing/merkle/cli/ManualComparison.java
index 367d99a..728dbc8 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/cli/ManualComparison.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/cli/ManualComparison.java
@@ -29,8 +29,8 @@ import org.apache.accumulo.core.security.Authorizations;
 import com.beust.jcommander.Parameter;
 
 /**
- * Accepts two table names and enumerates all key-values pairs in both checking for correctness. All differences between the two tables will be printed to the
- * console.
+ * Accepts two table names and enumerates all key-values pairs in both checking for correctness. All
+ * differences between the two tables will be printed to the console.
  */
 public class ManualComparison {
 
@@ -46,50 +46,51 @@ public class ManualComparison {
     ManualComparisonOpts opts = new ManualComparisonOpts();
     opts.parseArgs("ManualComparison", args);
 
-    AccumuloClient client = opts.getClient();
+    try (AccumuloClient client = opts.createClient();
+        Scanner s1 = client.createScanner(opts.table1, Authorizations.EMPTY);
+        Scanner s2 = client.createScanner(opts.table2, Authorizations.EMPTY)) {
+      Iterator<Entry<Key,Value>> iter1 = s1.iterator(), iter2 = s2.iterator();
+      boolean incrementFirst = true, incrementSecond = true;
 
-    Scanner s1 = client.createScanner(opts.table1, Authorizations.EMPTY), s2 = client.createScanner(opts.table2, Authorizations.EMPTY);
-    Iterator<Entry<Key,Value>> iter1 = s1.iterator(), iter2 = s2.iterator();
-    boolean incrementFirst = true, incrementSecond = true;
-
-    Entry<Key,Value> entry1 = iter1.next(), entry2 = iter2.next();
-    while (iter1.hasNext() && iter2.hasNext()) {
-      if (incrementFirst) {
-        entry1 = iter1.next();
-      }
-      if (incrementSecond) {
-        entry2 = iter2.next();
-      }
-      incrementFirst = false;
-      incrementSecond = false;
+      Entry<Key,Value> entry1 = iter1.next(), entry2 = iter2.next();
+      while (iter1.hasNext() && iter2.hasNext()) {
+        if (incrementFirst) {
+          entry1 = iter1.next();
+        }
+        if (incrementSecond) {
+          entry2 = iter2.next();
+        }
+        incrementFirst = false;
+        incrementSecond = false;
 
-      if (!entry1.equals(entry2)) {
+        if (!entry1.equals(entry2)) {
 
-        if (entry1.getKey().compareTo(entry2.getKey()) < 0) {
-          System.out.println("Exist in original " + entry1);
-          incrementFirst = true;
-        } else if (entry2.getKey().compareTo(entry1.getKey()) < 0) {
-          System.out.println("Exist in replica " + entry2);
-          incrementSecond = true;
+          if (entry1.getKey().compareTo(entry2.getKey()) < 0) {
+            System.out.println("Exist in original " + entry1);
+            incrementFirst = true;
+          } else if (entry2.getKey().compareTo(entry1.getKey()) < 0) {
+            System.out.println("Exist in replica " + entry2);
+            incrementSecond = true;
+          } else {
+            System.out.println("Differ... " + entry1 + " " + entry2);
+            incrementFirst = true;
+            incrementSecond = true;
+          }
         } else {
-          System.out.println("Differ... " + entry1 + " " + entry2);
           incrementFirst = true;
           incrementSecond = true;
         }
-      } else {
-        incrementFirst = true;
-        incrementSecond = true;
       }
-    }
 
-    System.out.println("\nExtra entries from " + opts.table1);
-    while (iter1.hasNext()) {
-      System.out.println(iter1.next());
-    }
+      System.out.println("\nExtra entries from " + opts.table1);
+      while (iter1.hasNext()) {
+        System.out.println(iter1.next());
+      }
 
-    System.out.println("\nExtra entries from " + opts.table2);
-    while (iter2.hasNext()) {
-      System.out.println(iter2.next());
+      System.out.println("\nExtra entries from " + opts.table2);
+      while (iter2.hasNext()) {
+        System.out.println(iter2.next());
+      }
     }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/ingest/RandomWorkload.java b/src/main/java/org/apache/accumulo/testing/merkle/ingest/RandomWorkload.java
index 57cb839..8ac202c 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/ingest/RandomWorkload.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/ingest/RandomWorkload.java
@@ -39,16 +39,20 @@ public class RandomWorkload {
     @Parameter(names = {"-n", "--num"}, required = true, description = "Num records to write")
     public long numRecords;
 
-    @Parameter(names = {"-r", "--rows"}, required = true, description = "Range of rows that can be generated")
+    @Parameter(names = {"-r", "--rows"}, required = true,
+        description = "Range of rows that can be generated")
     public int rowMax;
 
-    @Parameter(names = {"-cf", "--colfams"}, required = true, description = "Range of column families that can be generated")
+    @Parameter(names = {"-cf", "--colfams"}, required = true,
+        description = "Range of column families that can be generated")
     public int cfMax;
 
-    @Parameter(names = {"-cq", "--colquals"}, required = true, description = "Range of column qualifiers that can be generated")
+    @Parameter(names = {"-cq", "--colquals"}, required = true,
+        description = "Range of column qualifiers that can be generated")
     public int cqMax;
 
-    @Parameter(names = {"-d", "--deletes"}, required = false, description = "Percentage of updates that should be deletes")
+    @Parameter(names = {"-d", "--deletes"}, required = false,
+        description = "Percentage of updates that should be deletes")
     public int deletePercent = 5;
 
     public RandomWorkloadOpts() {
@@ -61,24 +65,26 @@ public class RandomWorkload {
   }
 
   public void run(RandomWorkloadOpts opts, BatchWriterConfig cfg) throws Exception {
-    run(opts.getClient(), opts.getTableName(), cfg, opts.numRecords, opts.rowMax, opts.cfMax, opts.cqMax, opts.deletePercent);
+    try (AccumuloClient client = opts.createClient()) {
+      run(client, opts.getTableName(), cfg, opts.numRecords, opts.rowMax, opts.cfMax, opts.cqMax,
+          opts.deletePercent);
+    }
   }
 
-  public void run(final AccumuloClient client, final String tableName, final BatchWriterConfig cfg, final long numRecords, int rowMax, int cfMax, int cqMax,
-      int deletePercent) throws Exception {
+  public void run(final AccumuloClient client, final String tableName, final BatchWriterConfig cfg,
+      final long numRecords, int rowMax, int cfMax, int cqMax, int deletePercent) throws Exception {
 
     final Random rowRand = new Random(12345);
     final Random cfRand = new Random(12346);
     final Random cqRand = new Random(12347);
     final Random deleteRand = new Random(12348);
-    long valueCounter = 0l;
+    long valueCounter = 0L;
 
     if (!client.tableOperations().exists(tableName)) {
       client.tableOperations().create(tableName);
     }
 
-    BatchWriter bw = client.createBatchWriter(tableName, cfg);
-    try {
+    try (BatchWriter bw = client.createBatchWriter(tableName, cfg)) {
       final Text row = new Text(), cf = new Text(), cq = new Text();
       final Value value = new Value();
       for (long i = 0; i < numRecords; i++) {
@@ -104,8 +110,6 @@ public class RandomWorkload {
 
         valueCounter++;
       }
-    } finally {
-      bw.close();
     }
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/merkle/skvi/DigestIterator.java b/src/main/java/org/apache/accumulo/testing/merkle/skvi/DigestIterator.java
index 741c63d..a00f9a4 100644
--- a/src/main/java/org/apache/accumulo/testing/merkle/skvi/DigestIterator.java
+++ b/src/main/java/org/apache/accumulo/testing/merkle/skvi/DigestIterator.java
@@ -32,10 +32,13 @@ import org.apache.accumulo.core.iterators.IteratorEnvironment;
 import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
 
 /**
- * {@link SortedKeyValueIterator} which attempts to compute a hash over some range of Key-Value pairs.
+ * {@link SortedKeyValueIterator} which attempts to compute a hash over some range of Key-Value
+ * pairs.
  * <p>
- * For the purposes of constructing a Merkle tree, this class will only generate a meaningful result if the (Batch)Scanner will compute a single digest over a
- * Range. If the (Batch)Scanner stops and restarts in the middle of a session, incorrect values will be returned and the merkle tree will be invalid.
+ * For the purposes of constructing a Merkle tree, this class will only generate a meaningful result
+ * if the (Batch)Scanner will compute a single digest over a Range. If the (Batch)Scanner stops and
+ * restarts in the middle of a session, incorrect values will be returned and the merkle tree will
+ * be invalid.
  */
 public class DigestIterator implements SortedKeyValueIterator<Key,Value> {
   public static final String HASH_NAME_KEY = "hash.name";
@@ -46,7 +49,8 @@ public class DigestIterator implements SortedKeyValueIterator<Key,Value> {
   private SortedKeyValueIterator<Key,Value> source;
 
   @Override
-  public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) throws IOException {
+  public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options,
+      IteratorEnvironment env) throws IOException {
     String hashName = options.get(HASH_NAME_KEY);
     if (null == hashName) {
       throw new IOException(HASH_NAME_KEY + " must be provided as option");
@@ -83,7 +87,8 @@ public class DigestIterator implements SortedKeyValueIterator<Key,Value> {
   }
 
   @Override
-  public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
+  public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive)
+      throws IOException {
     this.source.seek(range, columnFamilies, inclusive);
 
     consume();
diff --git a/src/main/java/org/apache/accumulo/testing/performance/Report.java b/src/main/java/org/apache/accumulo/testing/performance/Report.java
index 45eb936..a8bb1c8 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/Report.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/Report.java
@@ -56,8 +56,8 @@ public class Report {
     }
 
     public Builder result(String id, LongSummaryStatistics stats, String description) {
-      results.add(new Result(id, new Stats(stats.getMin(), stats.getMax(), stats.getSum(), stats.getAverage(), stats.getCount()), description,
-          Purpose.COMPARISON));
+      results.add(new Result(id, new Stats(stats.getMin(), stats.getMax(), stats.getSum(), stats
+          .getAverage(), stats.getCount()), description, Purpose.COMPARISON));
       return this;
     }
 
@@ -72,8 +72,8 @@ public class Report {
     }
 
     public Builder info(String id, LongSummaryStatistics stats, String description) {
-      results.add(new Result(id, new Stats(stats.getMin(), stats.getMax(), stats.getSum(), stats.getAverage(), stats.getCount()), description,
-          Purpose.INFORMATIONAL));
+      results.add(new Result(id, new Stats(stats.getMin(), stats.getMax(), stats.getSum(), stats
+          .getAverage(), stats.getCount()), description, Purpose.INFORMATIONAL));
       return this;
     }
 
diff --git a/src/main/java/org/apache/accumulo/testing/performance/impl/Compare.java b/src/main/java/org/apache/accumulo/testing/performance/impl/Compare.java
index 325b4d0..3360c34 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/impl/Compare.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/impl/Compare.java
@@ -79,7 +79,8 @@ public class Compare {
         System.out.printf("%s %s %.2f %.2f\n", testId.testClass, testId.id, oldResult, newResult);
       } else {
         double change = (newResult - oldResult) / oldResult;
-        System.out.printf("%s %s %.2f %.2f %.2f%s\n", testId.testClass, testId.id, oldResult, newResult, change * 100, "%");
+        System.out.printf("%s %s %.2f %.2f %.2f%s\n", testId.testClass, testId.id, oldResult,
+            newResult, change * 100, "%");
       }
     }
   }
diff --git a/src/main/java/org/apache/accumulo/testing/performance/impl/ContextualReport.java b/src/main/java/org/apache/accumulo/testing/performance/impl/ContextualReport.java
index 45f40ad..cf00789 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/impl/ContextualReport.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/impl/ContextualReport.java
@@ -28,7 +28,8 @@ public class ContextualReport extends Report {
   public final String startTime;
   public final String finishTime;
 
-  public ContextualReport(String testClass, String accumuloVersion, Instant startTime, Instant finishTime, Report r) {
+  public ContextualReport(String testClass, String accumuloVersion, Instant startTime,
+      Instant finishTime, Report r) {
     super(r.id, r.description, r.results, r.parameters);
     this.testClass = testClass;
     this.accumuloVersion = accumuloVersion;
diff --git a/src/main/java/org/apache/accumulo/testing/performance/impl/ListTests.java b/src/main/java/org/apache/accumulo/testing/performance/impl/ListTests.java
index 4de3527..24af3d8 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/impl/ListTests.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/impl/ListTests.java
@@ -26,10 +26,12 @@ import com.google.common.reflect.ClassPath.ClassInfo;
 public class ListTests {
   public static void main(String[] args) throws Exception {
 
-    ImmutableSet<ClassInfo> classes = ClassPath.from(ListTests.class.getClassLoader()).getTopLevelClasses();
+    ImmutableSet<ClassInfo> classes = ClassPath.from(ListTests.class.getClassLoader())
+        .getTopLevelClasses();
 
     for (ClassInfo classInfo : classes) {
-      if (classInfo.getName().endsWith("PT") && PerformanceTest.class.isAssignableFrom(classInfo.load())) {
+      if (classInfo.getName().endsWith("PT")
+          && PerformanceTest.class.isAssignableFrom(classInfo.load())) {
         System.out.println(classInfo.getName());
       }
     }
diff --git a/src/main/java/org/apache/accumulo/testing/performance/impl/PerfTestRunner.java b/src/main/java/org/apache/accumulo/testing/performance/impl/PerfTestRunner.java
index 92ef0ef..2f230cd 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/impl/PerfTestRunner.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/impl/PerfTestRunner.java
@@ -41,7 +41,8 @@ public class PerfTestRunner {
     String accumuloVersion = args[2];
     String outputDir = args[3];
 
-    PerformanceTest perfTest = Class.forName(className).asSubclass(PerformanceTest.class).newInstance();
+    PerformanceTest perfTest = Class.forName(className).asSubclass(PerformanceTest.class)
+        .newInstance();
 
     AccumuloClient client = Accumulo.newClient().from(clientProps).build();
 
@@ -62,7 +63,8 @@ public class PerfTestRunner {
 
     DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMddHHmmss");
     String time = Instant.now().atZone(ZoneId.systemDefault()).format(formatter);
-    Path outputFile = Paths.get(outputDir, perfTest.getClass().getSimpleName() + "_" + time + ".json");
+    Path outputFile = Paths.get(outputDir, perfTest.getClass().getSimpleName() + "_" + time
+        + ".json");
 
     try (Writer writer = Files.newBufferedWriter(outputFile)) {
       gson.toJson(report, writer);
diff --git a/src/main/java/org/apache/accumulo/testing/performance/tests/RandomCachedLookupsPT.java b/src/main/java/org/apache/accumulo/testing/performance/tests/RandomCachedLookupsPT.java
index bee996b..8fcd200 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/tests/RandomCachedLookupsPT.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/tests/RandomCachedLookupsPT.java
@@ -90,23 +90,39 @@ public class RandomCachedLookupsPT implements PerformanceTest {
     long d128 = doLookups(env.getClient(), 128, NUM_LOOKUPS_PER_THREAD);
 
     reportBuilder.id("smalls");
-    reportBuilder.description("Runs multiple threads each doing lots of small random scans.  For this test data and index cache are enabled.");
-    reportBuilder.info("warmup", 32 * NUM_LOOKUPS_PER_THREAD, warmup, "Random lookup per sec for 32 threads");
-    reportBuilder.info("lookups_1", NUM_LOOKUPS_PER_THREAD, d1, "Random lookup per sec rate for 1 thread");
-    reportBuilder.info("lookups_4", 4 * NUM_LOOKUPS_PER_THREAD, d4, "Random lookup per sec rate for 4 threads");
-    reportBuilder.info("lookups_8", 8 * NUM_LOOKUPS_PER_THREAD, d8, "Random lookup per sec rate for 8 threads");
-    reportBuilder.info("lookups_16", 16 * NUM_LOOKUPS_PER_THREAD, d16, "Random lookup per sec rate for 16 threads");
-    reportBuilder.info("lookups_32", 32 * NUM_LOOKUPS_PER_THREAD, d32, "Random lookup per sec rate for 32 threads");
-    reportBuilder.info("lookups_64", 64 * NUM_LOOKUPS_PER_THREAD, d64, "Random lookup per sec rate for 64 threads");
-    reportBuilder.info("lookups_128", 128 * NUM_LOOKUPS_PER_THREAD, d128, "Random lookup per sec rate for 128 threads");
-
-    reportBuilder.result("avg_1", d1 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 1 thread");
-    reportBuilder.result("avg_4", d4 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 4 threads");
-    reportBuilder.result("avg_8", d8 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 8 threads");
-    reportBuilder.result("avg_16", d16 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 16 threads");
-    reportBuilder.result("avg_32", d32 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 32 threads");
-    reportBuilder.result("avg_64", d64 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 64 threads");
-    reportBuilder.result("avg_128", d128 / (double) NUM_LOOKUPS_PER_THREAD, "Average milliseconds per lookup for 128 threads");
+    reportBuilder
+        .description("Runs multiple threads each doing lots of small random scans.  For this test data and index cache are enabled.");
+    reportBuilder.info("warmup", 32 * NUM_LOOKUPS_PER_THREAD, warmup,
+        "Random lookup per sec for 32 threads");
+    reportBuilder.info("lookups_1", NUM_LOOKUPS_PER_THREAD, d1,
+        "Random lookup per sec rate for 1 thread");
+    reportBuilder.info("lookups_4", 4 * NUM_LOOKUPS_PER_THREAD, d4,
+        "Random lookup per sec rate for 4 threads");
+    reportBuilder.info("lookups_8", 8 * NUM_LOOKUPS_PER_THREAD, d8,
+        "Random lookup per sec rate for 8 threads");
+    reportBuilder.info("lookups_16", 16 * NUM_LOOKUPS_PER_THREAD, d16,
+        "Random lookup per sec rate for 16 threads");
+    reportBuilder.info("lookups_32", 32 * NUM_LOOKUPS_PER_THREAD, d32,
+        "Random lookup per sec rate for 32 threads");
+    reportBuilder.info("lookups_64", 64 * NUM_LOOKUPS_PER_THREAD, d64,
+        "Random lookup per sec rate for 64 threads");
+    reportBuilder.info("lookups_128", 128 * NUM_LOOKUPS_PER_THREAD, d128,
+        "Random lookup per sec rate for 128 threads");
+
+    reportBuilder.result("avg_1", d1 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 1 thread");
+    reportBuilder.result("avg_4", d4 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 4 threads");
+    reportBuilder.result("avg_8", d8 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 8 threads");
+    reportBuilder.result("avg_16", d16 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 16 threads");
+    reportBuilder.result("avg_32", d32 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 32 threads");
+    reportBuilder.result("avg_64", d64 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 64 threads");
+    reportBuilder.result("avg_128", d128 / (double) NUM_LOOKUPS_PER_THREAD,
+        "Average milliseconds per lookup for 128 threads");
 
     return reportBuilder.build();
   }
diff --git a/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java b/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java
index 125859d..0844683 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/tests/ScanExecutorPT.java
@@ -61,7 +61,8 @@ public class ScanExecutorPT implements PerformanceTest {
   private static final String TEST_DESC = "Scan Executor Test.  Test running lots of short scans "
       + "while long scans are running in the background.  Each short scan reads a random row and "
       + "family. Using execution hints, short scans are randomly either given a high priority or "
-      + "a dedicated executor.  If the scan prioritizer or dispatcher is not working properly, " + "then the short scans will be orders of magnitude slower.";
+      + "a dedicated executor.  If the scan prioritizer or dispatcher is not working properly, "
+      + "then the short scans will be orders of magnitude slower.";
 
   @Override
   public SystemConfiguration getSystemConfig() {
@@ -69,10 +70,14 @@ public class ScanExecutorPT implements PerformanceTest {
 
     siteCfg.put(Property.TSERV_SCAN_MAX_OPENFILES.getKey(), "200");
     siteCfg.put(Property.TSERV_MINTHREADS.getKey(), "200");
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.threads", SCAN_EXECUTOR_THREADS);
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.prioritizer", SCAN_PRIORITIZER);
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.threads", SCAN_EXECUTOR_THREADS);
-    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.prioritizer", SCAN_PRIORITIZER);
+    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.threads",
+        SCAN_EXECUTOR_THREADS);
+    siteCfg
+        .put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se1.prioritizer", SCAN_PRIORITIZER);
+    siteCfg.put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.threads",
+        SCAN_EXECUTOR_THREADS);
+    siteCfg
+        .put(Property.TSERV_SCAN_EXECUTORS_PREFIX.getKey() + "se2.prioritizer", SCAN_PRIORITIZER);
 
     return new SystemConfiguration().setAccumuloConfig(siteCfg);
   }
@@ -135,7 +140,8 @@ public class ScanExecutorPT implements PerformanceTest {
     return builder.build();
   }
 
-  private static long scan(String tableName, AccumuloClient c, byte[] row, byte[] fam, Map<String,String> hints) throws TableNotFoundException {
+  private static long scan(String tableName, AccumuloClient c, byte[] row, byte[] fam,
+      Map<String,String> hints) throws TableNotFoundException {
     long t1 = System.currentTimeMillis();
     int count = 0;
     try (Scanner scanner = c.createScanner(tableName, Authorizations.EMPTY)) {
@@ -149,7 +155,8 @@ public class ScanExecutorPT implements PerformanceTest {
     return System.currentTimeMillis() - t1;
   }
 
-  private long scan(String tableName, AccumuloClient c, AtomicBoolean stop, Map<String,String> hints) throws TableNotFoundException {
+  private long scan(String tableName, AccumuloClient c, AtomicBoolean stop, Map<String,String> hints)
+      throws TableNotFoundException {
     long count = 0;
     while (!stop.get()) {
       try (Scanner scanner = c.createScanner(tableName, Authorizations.EMPTY)) {
diff --git a/src/main/java/org/apache/accumulo/testing/performance/tests/ScanFewFamiliesPT.java b/src/main/java/org/apache/accumulo/testing/performance/tests/ScanFewFamiliesPT.java
index f197a97..d2bddf4 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/tests/ScanFewFamiliesPT.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/tests/ScanFewFamiliesPT.java
@@ -69,8 +69,10 @@ public class ScanFewFamiliesPT implements PerformanceTest {
     for (int numFams : new int[] {1, 2, 4, 8, 16}) {
       LongSummaryStatistics stats = runScans(env, tableName, numFams);
       String fams = Strings.padStart(numFams + "", 2, '0');
-      builder.info("f" + fams + "_stats", stats, "Times in ms to fetch " + numFams + " families from all rows");
-      builder.result("f" + fams, stats.getAverage(), "Average time in ms to fetch " + numFams + " families from all rows");
+      builder.info("f" + fams + "_stats", stats, "Times in ms to fetch " + numFams
+          + " families from all rows");
+      builder.result("f" + fams, stats.getAverage(), "Average time in ms to fetch " + numFams
+          + " families from all rows");
     }
 
     builder.id("sfewfam");
@@ -84,7 +86,8 @@ public class ScanFewFamiliesPT implements PerformanceTest {
     return builder.build();
   }
 
-  private LongSummaryStatistics runScans(Environment env, String tableName, int numFamilies) throws TableNotFoundException {
+  private LongSummaryStatistics runScans(Environment env, String tableName, int numFamilies)
+      throws TableNotFoundException {
     Random rand = new Random();
     LongSummaryStatistics stats = new LongSummaryStatistics();
     for (int i = 0; i < 50; i++) {
diff --git a/src/main/java/org/apache/accumulo/testing/performance/util/TestData.java b/src/main/java/org/apache/accumulo/testing/performance/util/TestData.java
index 601aaa3..2351084 100644
--- a/src/main/java/org/apache/accumulo/testing/performance/util/TestData.java
+++ b/src/main/java/org/apache/accumulo/testing/performance/util/TestData.java
@@ -42,7 +42,8 @@ public class TestData {
     return FastFormat.toZeroPaddedString(v, 9, 16, EMPTY);
   }
 
-  public static void generate(AccumuloClient client, String tableName, int rows, int fams, int quals) throws Exception {
+  public static void generate(AccumuloClient client, String tableName, int rows, int fams, int quals)
+      throws Exception {
     try (BatchWriter writer = client.createBatchWriter(tableName)) {
       int v = 0;
       for (int r = 0; r < rows; r++) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/Module.java b/src/main/java/org/apache/accumulo/testing/randomwalk/Module.java
index bd7c149..e15b9f5 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/Module.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/Module.java
@@ -214,7 +214,8 @@ public class Module extends Node {
     else
       maxSec = Integer.parseInt(initProps.getProperty("maxSec", "0"));
 
-    if ((prop = initProps.getProperty("teardown")) == null || prop.equals("true") || prop.equals(""))
+    if ((prop = initProps.getProperty("teardown")) == null || prop.equals("true")
+        || prop.equals(""))
       teardown = true;
     else
       teardown = false;
@@ -271,7 +272,8 @@ public class Module extends Node {
         numHops++;
 
         if (!adjMap.containsKey(curNodeId) && !curNodeId.startsWith("alias.")) {
-          throw new Exception("Reached node(" + curNodeId + ") without outgoing edges in module(" + this + ")");
+          throw new Exception("Reached node(" + curNodeId + ") without outgoing edges in module("
+              + this + ")");
         }
         AdjList adj = adjMap.get(curNodeId);
         String nextNodeId = adj.randomNeighbor();
@@ -315,13 +317,15 @@ public class Module extends Node {
             // Bound the time we'll wait for the node to complete
             nodeException = task.get(secondsRemaining, TimeUnit.SECONDS);
           } catch (InterruptedException e) {
-            log.warn("Interrupted waiting for " + nextNode.getClass().getSimpleName() + " to complete. Exiting.", e);
+            log.warn("Interrupted waiting for " + nextNode.getClass().getSimpleName()
+                + " to complete. Exiting.", e);
             break;
           } catch (ExecutionException e) {
             log.error("Caught error executing " + nextNode.getClass().getSimpleName(), e);
             throw e;
           } catch (TimeoutException e) {
-            log.info("Timed out waiting for " + nextNode.getClass().getSimpleName() + " to complete (waited " + secondsRemaining + " seconds). Exiting.", e);
+            log.info("Timed out waiting for " + nextNode.getClass().getSimpleName()
+                + " to complete (waited " + secondsRemaining + " seconds). Exiting.", e);
             break;
           }
 
@@ -351,7 +355,8 @@ public class Module extends Node {
             String logMsg = "  " + key + ": ";
             if (value == null)
               logMsg += "null";
-            else if (value instanceof String || value instanceof Map || value instanceof Collection || value instanceof Number)
+            else if (value instanceof String || value instanceof Map || value instanceof Collection
+                || value instanceof Number)
               logMsg += value;
             else if (value instanceof byte[])
               logMsg += new String((byte[]) value, UTF_8);
@@ -406,7 +411,8 @@ public class Module extends Node {
         }
         long timeSinceLastProgress = System.currentTimeMillis() - initNode.lastProgress();
         if (timeSinceLastProgress > time) {
-          log.warn("Node " + initNode + " has been running for " + timeSinceLastProgress / 1000.0 + " seconds. You may want to look into it.");
+          log.warn("Node " + initNode + " has been running for " + timeSinceLastProgress / 1000.0
+              + " seconds. You may want to look into it.");
           runningLong.set(true);
         }
       }
@@ -428,7 +434,8 @@ public class Module extends Node {
       }
     }
     if (runningLong.get())
-      log.warn("Node " + nextNode + ", which was running long, has now completed after " + (System.currentTimeMillis() - systemTime) / 1000.0 + " seconds");
+      log.warn("Node " + nextNode + ", which was running long, has now completed after "
+          + (System.currentTimeMillis() - systemTime) / 1000.0 + " seconds");
   }
 
   @Override
@@ -512,7 +519,8 @@ public class Module extends Node {
 
     // set the schema
     SchemaFactory sf = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
-    Schema moduleSchema = sf.newSchema(this.getClass().getClassLoader().getResource("randomwalk/module.xsd"));
+    Schema moduleSchema = sf.newSchema(this.getClass().getClassLoader()
+        .getResource("randomwalk/module.xsd"));
     dbf.setSchema(moduleSchema);
 
     // parse the document
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/RandWalkEnv.java b/src/main/java/org/apache/accumulo/testing/randomwalk/RandWalkEnv.java
index da88d45..216b995 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/RandWalkEnv.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/RandWalkEnv.java
@@ -24,8 +24,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * The test environment that is available for randomwalk tests. This includes configuration properties that are available to any randomwalk test and facilities
- * for creating client-side objects. This class is not thread-safe.
+ * The test environment that is available for randomwalk tests. This includes configuration
+ * properties that are available to any randomwalk test and facilities for creating client-side
+ * objects. This class is not thread-safe.
  */
 public class RandWalkEnv extends TestEnv {
 
@@ -38,7 +39,8 @@ public class RandWalkEnv extends TestEnv {
   }
 
   /**
-   * Gets a multitable batch writer. The same object is reused after the first call unless it is reset.
+   * Gets a multitable batch writer. The same object is reused after the first call unless it is
+   * reset.
    *
    * @return multitable batch writer
    * @throws NumberFormatException
@@ -46,7 +48,8 @@ public class RandWalkEnv extends TestEnv {
    * @throws NumberFormatException
    *           if any configuration property cannot be parsed
    */
-  public MultiTableBatchWriter getMultiTableBatchWriter() throws AccumuloException, AccumuloSecurityException {
+  public MultiTableBatchWriter getMultiTableBatchWriter() throws AccumuloException,
+      AccumuloSecurityException {
     if (mtbw == null) {
       mtbw = getAccumuloClient().createMultiTableBatchWriter();
     }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/State.java b/src/main/java/org/apache/accumulo/testing/randomwalk/State.java
index 37a6285..9b3e781 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/State.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/State.java
@@ -80,7 +80,8 @@ public class State {
   }
 
   /**
-   * Gets the map of state objects. The backing map for state is returned, so changes to it affect the state.
+   * Gets the map of state objects. The backing map for state is returned, so changes to it affect
+   * the state.
    *
    * @return state map
    */
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkImportTest.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkImportTest.java
index 0f6ac34..36dae32 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkImportTest.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkImportTest.java
@@ -22,18 +22,20 @@ import org.apache.accumulo.testing.randomwalk.RandWalkEnv;
 import org.apache.accumulo.testing.randomwalk.State;
 
 /**
- * If we have a sufficient back-up of imports, let them work off before adding even more bulk-imports. Imports of PlusOne must always be balanced with imports
- * of MinusOne.
+ * If we have a sufficient back-up of imports, let them work off before adding even more
+ * bulk-imports. Imports of PlusOne must always be balanced with imports of MinusOne.
  */
 public abstract class BulkImportTest extends BulkTest {
 
-  public static final String SKIPPED_IMPORT = "skipped.import", TRUE = Boolean.TRUE.toString(), FALSE = Boolean.FALSE.toString();
+  public static final String SKIPPED_IMPORT = "skipped.import", TRUE = Boolean.TRUE.toString(),
+      FALSE = Boolean.FALSE.toString();
 
   @Override
   public void visit(final State state, RandWalkEnv env, Properties props) throws Exception {
     /**
-     * Each visit() is performed sequentially and then submitted to the threadpool which will have async execution. As long as we're checking the state and
-     * making decisions about what to do before we submit something to the thread pool, we're fine.
+     * Each visit() is performed sequentially and then submitted to the threadpool which will have
+     * async execution. As long as we're checking the state and making decisions about what to do
+     * before we submit something to the thread pool, we're fine.
      */
 
     String lastImportSkipped = state.getString(SKIPPED_IMPORT);
@@ -43,8 +45,9 @@ public abstract class BulkImportTest extends BulkTest {
     // consistency
     if (null != lastImportSkipped) {
       if (!getClass().equals(BulkMinusOne.class)) {
-        throw new IllegalStateException("Should not have a skipped import marker for a class other than " + BulkMinusOne.class.getName() + " but was "
-            + getClass().getName());
+        throw new IllegalStateException(
+            "Should not have a skipped import marker for a class other than "
+                + BulkMinusOne.class.getName() + " but was " + getClass().getName());
       }
 
       if (TRUE.equals(lastImportSkipped)) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java
index 6e7938e..0ab337e 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/BulkPlusOne.java
@@ -73,7 +73,8 @@ public class BulkPlusOne extends BulkImportTest {
       printRows.add(String.format(FMT, row));
 
     String markerColumnQualifier = String.format("%07d", counter.incrementAndGet());
-    log.debug("preparing bulk files with start rows " + printRows + " last row " + String.format(FMT, LOTS - 1) + " marker " + markerColumnQualifier);
+    log.debug("preparing bulk files with start rows " + printRows + " last row "
+        + String.format(FMT, LOTS - 1) + " marker " + markerColumnQualifier);
 
     List<Integer> rows = new ArrayList<>(startRows);
     rows.add(LOTS);
@@ -95,7 +96,8 @@ public class BulkPlusOne extends BulkImportTest {
       }
       writer.close();
     }
-    env.getAccumuloClient().tableOperations().importDirectory(Setup.getTableName(), dir.toString(), fail.toString(), true);
+    env.getAccumuloClient().tableOperations()
+        .importDirectory(Setup.getTableName(), dir.toString(), fail.toString(), true);
     fs.delete(dir, true);
     FileStatus[] failures = fs.listStatus(fail);
     if (failures != null && failures.length > 0) {
@@ -103,7 +105,8 @@ public class BulkPlusOne extends BulkImportTest {
       throw new Exception(failures.length + " failure files found importing files from " + dir);
     }
     fs.delete(fail, true);
-    log.debug("Finished bulk import, start rows " + printRows + " last row " + String.format(FMT, LOTS - 1) + " marker " + markerColumnQualifier);
+    log.debug("Finished bulk import, start rows " + printRows + " last row "
+        + String.format(FMT, LOTS - 1) + " marker " + markerColumnQualifier);
   }
 
   @Override
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Compact.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Compact.java
index 5fd4294..329a397 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Compact.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Compact.java
@@ -27,7 +27,8 @@ public class Compact extends SelectiveBulkTest {
     final Text[] points = Merge.getRandomTabletRange(state);
     final String rangeString = Merge.rangeToString(points);
     log.info("Compacting " + rangeString);
-    env.getAccumuloClient().tableOperations().compact(Setup.getTableName(), points[0], points[1], false, true);
+    env.getAccumuloClient().tableOperations()
+        .compact(Setup.getTableName(), points[0], points[1], false, true);
     log.info("Compaction " + rangeString + " finished");
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/ConsistencyCheck.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/ConsistencyCheck.java
index eaac7e5..119a3fe 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/ConsistencyCheck.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/ConsistencyCheck.java
@@ -38,7 +38,8 @@ public class ConsistencyCheck extends SelectiveBulkTest {
     log.info("Checking " + row);
     String user = env.getAccumuloClient().whoami();
     Authorizations auths = env.getAccumuloClient().securityOperations().getUserAuthorizations(user);
-    try (Scanner scanner = new IsolatedScanner(env.getAccumuloClient().createScanner(Setup.getTableName(), auths))) {
+    try (Scanner scanner = new IsolatedScanner(env.getAccumuloClient().createScanner(
+        Setup.getTableName(), auths))) {
       scanner.setRange(new Range(row));
       scanner.fetchColumnFamily(BulkPlusOne.CHECK_COLUMN_FAMILY);
       Value v = null;
@@ -49,7 +50,8 @@ public class ConsistencyCheck extends SelectiveBulkTest {
           first = entry.getKey();
         }
         if (!v.equals(entry.getValue()))
-          throw new RuntimeException("Inconsistent value at " + entry.getKey() + " was " + entry.getValue() + " should be " + v + " first read at " + first);
+          throw new RuntimeException("Inconsistent value at " + entry.getKey() + " was "
+              + entry.getValue() + " should be " + v + " first read at " + first);
       }
     }
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Merge.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Merge.java
index 2468a8d..c9835d6 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Merge.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Merge.java
@@ -34,11 +34,13 @@ public class Merge extends SelectiveBulkTest {
   }
 
   public static String rangeToString(Text[] points) {
-    return "(" + (points[0] == null ? "-inf" : points[0]) + " -> " + (points[1] == null ? "+inf" : points[1]) + "]";
+    return "(" + (points[0] == null ? "-inf" : points[0]) + " -> "
+        + (points[1] == null ? "+inf" : points[1]) + "]";
   }
 
   public static Text getRandomRow(Random rand) {
-    return new Text(String.format(BulkPlusOne.FMT, (rand.nextLong() & 0x7fffffffffffffffl) % BulkPlusOne.LOTS));
+    return new Text(String.format(BulkPlusOne.FMT, (rand.nextLong() & 0x7fffffffffffffffl)
+        % BulkPlusOne.LOTS));
   }
 
   public static Text[] getRandomTabletRange(State state) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveBulkTest.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveBulkTest.java
index 56f9c33..371b131 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveBulkTest.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveBulkTest.java
@@ -22,7 +22,8 @@ import org.apache.accumulo.testing.randomwalk.RandWalkEnv;
 import org.apache.accumulo.testing.randomwalk.State;
 
 /**
- * Selectively runs the actual {@link BulkTest} based on the number of active TServers and the number of queued operations.
+ * Selectively runs the actual {@link BulkTest} based on the number of active TServers and the
+ * number of queued operations.
  */
 public abstract class SelectiveBulkTest extends BulkTest {
 
@@ -31,7 +32,8 @@ public abstract class SelectiveBulkTest extends BulkTest {
     if (SelectiveQueueing.shouldQueueOperation(state, env)) {
       super.visit(state, env, props);
     } else {
-      log.debug("Skipping queueing of " + getClass().getSimpleName() + " because of excessive queued tasks already");
+      log.debug("Skipping queueing of " + getClass().getSimpleName()
+          + " because of excessive queued tasks already");
       log.debug("Waiting 30 seconds before continuing");
       try {
         Thread.sleep(30 * 1000);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveQueueing.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveQueueing.java
index da87f7a..2efc07f 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveQueueing.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/SelectiveQueueing.java
@@ -25,7 +25,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Chooses whether or not an operation should be queued based on the current thread pool queue length and the number of available TServers.
+ * Chooses whether or not an operation should be queued based on the current thread pool queue
+ * length and the number of available TServers.
  */
 public class SelectiveQueueing {
   private static final Logger log = LoggerFactory.getLogger(SelectiveQueueing.class);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Split.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Split.java
index 0f42b0b..1aac330 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Split.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Split.java
@@ -32,7 +32,8 @@ public class Split extends SelectiveBulkTest {
     Random rand = (Random) state.get("rand");
     int count = rand.nextInt(20);
     for (int i = 0; i < count; i++)
-      splits.add(new Text(String.format(BulkPlusOne.FMT, (rand.nextLong() & 0x7fffffffffffffffl) % BulkPlusOne.LOTS)));
+      splits.add(new Text(String.format(BulkPlusOne.FMT, (rand.nextLong() & 0x7fffffffffffffffl)
+          % BulkPlusOne.LOTS)));
     log.info("splitting " + splits);
     env.getAccumuloClient().tableOperations().addSplits(Setup.getTableName(), splits);
     log.info("split for " + splits + " finished");
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Verify.java b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Verify.java
index 818e723..e82d1bd 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Verify.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/bulk/Verify.java
@@ -26,6 +26,7 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.cli.ClientOnRequiredTable;
+import org.apache.accumulo.core.client.AccumuloClient;
 import org.apache.accumulo.core.client.RowIterator;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.data.Key;
@@ -38,7 +39,7 @@ import org.apache.hadoop.io.Text;
 
 public class Verify extends Test {
 
-  static byte[] zero = new byte[] {'0'};
+  private static byte[] zero = new byte[] {'0'};
 
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
@@ -86,7 +87,8 @@ public class Verify extends Test {
         long curr = Long.parseLong(entry.getKey().getColumnQualifier().toString());
 
         if (curr - 1 != prev)
-          throw new Exception("Bad marker count " + entry.getKey() + " " + entry.getValue() + " " + prev);
+          throw new Exception("Bad marker count " + entry.getKey() + " " + entry.getValue() + " "
+              + prev);
 
         if (!entry.getValue().toString().equals("1"))
           throw new Exception("Bad marker value " + entry.getKey() + " " + entry.getValue());
@@ -95,7 +97,8 @@ public class Verify extends Test {
       }
 
       if (BulkPlusOne.counter.get() != prev) {
-        throw new Exception("Row " + rowText + " does not have all markers " + BulkPlusOne.counter.get() + " " + prev);
+        throw new Exception("Row " + rowText + " does not have all markers "
+            + BulkPlusOne.counter.get() + " " + prev);
       }
     }
 
@@ -106,37 +109,39 @@ public class Verify extends Test {
   public static void main(String args[]) throws Exception {
     ClientOnRequiredTable opts = new ClientOnRequiredTable();
     opts.parseArgs(Verify.class.getName(), args);
-    Scanner scanner = opts.getClient().createScanner(opts.getTableName(), opts.auths);
-    scanner.fetchColumnFamily(BulkPlusOne.CHECK_COLUMN_FAMILY);
-    Text startBadRow = null;
-    Text lastBadRow = null;
-    Value currentBadValue = null;
-    for (Entry<Key,Value> entry : scanner) {
-      // System.out.println("Entry: " + entry);
-      byte[] value = entry.getValue().get();
-      if (!Arrays.equals(value, zero)) {
-        if (currentBadValue == null || entry.getValue().equals(currentBadValue)) {
-          // same value, keep skipping ahead
-          lastBadRow = new Text(entry.getKey().getRow());
-          if (startBadRow == null)
-            startBadRow = lastBadRow;
+    try (AccumuloClient client = opts.createClient()) {
+      Scanner scanner = client.createScanner(opts.getTableName(), opts.auths);
+      scanner.fetchColumnFamily(BulkPlusOne.CHECK_COLUMN_FAMILY);
+      Text startBadRow = null;
+      Text lastBadRow = null;
+      Value currentBadValue = null;
+      for (Entry<Key,Value> entry : scanner) {
+        // System.out.println("Entry: " + entry);
+        byte[] value = entry.getValue().get();
+        if (!Arrays.equals(value, zero)) {
+          if (currentBadValue == null || entry.getValue().equals(currentBadValue)) {
+            // same value, keep skipping ahead
+            lastBadRow = new Text(entry.getKey().getRow());
+            if (startBadRow == null)
+              startBadRow = lastBadRow;
+          } else {
+            // new bad value, report
+            report(startBadRow, lastBadRow, currentBadValue);
+            startBadRow = lastBadRow = new Text(entry.getKey().getRow());
+          }
+          currentBadValue = new Value(entry.getValue());
         } else {
-          // new bad value, report
-          report(startBadRow, lastBadRow, currentBadValue);
-          startBadRow = lastBadRow = new Text(entry.getKey().getRow());
+          // end of bad range, report
+          if (startBadRow != null) {
+            report(startBadRow, lastBadRow, currentBadValue);
+          }
+          startBadRow = lastBadRow = null;
+          currentBadValue = null;
         }
-        currentBadValue = new Value(entry.getValue());
-      } else {
-        // end of bad range, report
-        if (startBadRow != null) {
-          report(startBadRow, lastBadRow, currentBadValue);
-        }
-        startBadRow = lastBadRow = null;
-        currentBadValue = null;
       }
-    }
-    if (startBadRow != null) {
-      report(startBadRow, lastBadRow, currentBadValue);
+      if (startBadRow != null) {
+        report(startBadRow, lastBadRow, currentBadValue);
+      }
     }
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java
index 7715f32..b618ca2 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/BulkImport.java
@@ -57,7 +57,8 @@ public class BulkImport extends Test {
     public void addMutation(Mutation m) throws MutationsRejectedException {
       List<ColumnUpdate> updates = m.getUpdates();
       for (ColumnUpdate cu : updates) {
-        Key key = new Key(m.getRow(), cu.getColumnFamily(), cu.getColumnQualifier(), cu.getColumnVisibility(), 42, false, false);
+        Key key = new Key(m.getRow(), cu.getColumnFamily(), cu.getColumnQualifier(),
+            cu.getColumnVisibility(), 42, false, false);
         Value val = new Value(cu.getValue(), false);
 
         try {
@@ -101,13 +102,15 @@ public class BulkImport extends Test {
 
     FileSystem fs = FileSystem.get(env.getHadoopConfiguration());
 
-    String bulkDir = "/tmp/concurrent_bulk/b_" + String.format("%016x", rand.nextLong() & 0x7fffffffffffffffl);
+    String bulkDir = "/tmp/concurrent_bulk/b_"
+        + String.format("%016x", rand.nextLong() & 0x7fffffffffffffffl);
 
     fs.mkdirs(new Path(bulkDir));
     fs.mkdirs(new Path(bulkDir + "_f"));
 
     try {
-      BatchWriter bw = new RFileBatchWriter(env.getHadoopConfiguration(), fs, bulkDir + "/file01.rf");
+      BatchWriter bw = new RFileBatchWriter(env.getHadoopConfiguration(), fs, bulkDir
+          + "/file01.rf");
       try {
         TreeSet<Long> rows = new TreeSet<>();
         int numRows = rand.nextInt(100000);
@@ -128,7 +131,8 @@ public class BulkImport extends Test {
         bw.close();
       }
 
-      client.tableOperations().importDirectory(tableName, bulkDir, bulkDir + "_f", rand.nextBoolean());
+      client.tableOperations().importDirectory(tableName, bulkDir, bulkDir + "_f",
+          rand.nextBoolean());
 
       log.debug("BulkImported to " + tableName);
     } catch (TableNotFoundException e) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangeAuthorizations.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangeAuthorizations.java
index 4522c2d..0fbf24f 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangeAuthorizations.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangeAuthorizations.java
@@ -43,7 +43,8 @@ public class ChangeAuthorizations extends Test {
 
     String userName = userNames.get(rand.nextInt(userNames.size()));
     try {
-      List<byte[]> auths = new ArrayList<>(client.securityOperations().getUserAuthorizations(userName).getAuthorizations());
+      List<byte[]> auths = new ArrayList<>(client.securityOperations()
+          .getUserAuthorizations(userName).getAuthorizations());
 
       if (rand.nextBoolean()) {
         String authorization = String.format("a%d", rand.nextInt(5000));
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangePermissions.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangePermissions.java
index ca3c5c3..e3a3d7c 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangePermissions.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ChangePermissions.java
@@ -65,7 +65,8 @@ public class ChangePermissions extends Test {
     }
   }
 
-  private void changeTablePermission(AccumuloClient client, Random rand, String userName, String tableName) throws AccumuloException, AccumuloSecurityException {
+  private void changeTablePermission(AccumuloClient client, Random rand, String userName,
+      String tableName) throws AccumuloException, AccumuloSecurityException {
 
     EnumSet<TablePermission> perms = EnumSet.noneOf(TablePermission.class);
     for (TablePermission p : TablePermission.values()) {
@@ -91,7 +92,8 @@ public class ChangePermissions extends Test {
     }
   }
 
-  private void changeSystemPermission(AccumuloClient client, Random rand, String userName) throws AccumuloException, AccumuloSecurityException {
+  private void changeSystemPermission(AccumuloClient client, Random rand, String userName)
+      throws AccumuloException, AccumuloSecurityException {
     EnumSet<SystemPermission> perms = EnumSet.noneOf(SystemPermission.class);
     for (SystemPermission p : SystemPermission.values()) {
       if (client.securityOperations().hasSystemPermission(userName, p))
@@ -117,8 +119,8 @@ public class ChangePermissions extends Test {
     }
   }
 
-  private void changeNamespacePermission(AccumuloClient client, Random rand, String userName, String namespace) throws AccumuloException,
-      AccumuloSecurityException {
+  private void changeNamespacePermission(AccumuloClient client, Random rand, String userName,
+      String namespace) throws AccumuloException, AccumuloSecurityException {
 
     EnumSet<NamespacePermission> perms = EnumSet.noneOf(NamespacePermission.class);
     for (NamespacePermission p : NamespacePermission.values()) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CheckPermission.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CheckPermission.java
index 74dce76..ec65a18 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CheckPermission.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CheckPermission.java
@@ -53,14 +53,16 @@ public class CheckPermission extends Test {
       int dice = rand.nextInt(2);
       if (dice == 0) {
         log.debug("Checking systerm permission " + userName);
-        client.securityOperations().hasSystemPermission(userName, SystemPermission.values()[rand.nextInt(SystemPermission.values().length)]);
+        client.securityOperations().hasSystemPermission(userName,
+            SystemPermission.values()[rand.nextInt(SystemPermission.values().length)]);
       } else if (dice == 1) {
         log.debug("Checking table permission " + userName + " " + tableName);
-        client.securityOperations().hasTablePermission(userName, tableName, TablePermission.values()[rand.nextInt(TablePermission.values().length)]);
+        client.securityOperations().hasTablePermission(userName, tableName,
+            TablePermission.values()[rand.nextInt(TablePermission.values().length)]);
       } else if (dice == 2) {
         log.debug("Checking namespace permission " + userName + " " + namespace);
-        client.securityOperations()
-            .hasNamespacePermission(userName, namespace, NamespacePermission.values()[rand.nextInt(NamespacePermission.values().length)]);
+        client.securityOperations().hasNamespacePermission(userName, namespace,
+            NamespacePermission.values()[rand.nextInt(NamespacePermission.values().length)]);
       }
 
     } catch (AccumuloSecurityException ex) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CloneTable.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CloneTable.java
index 2ed8175..811ef7b 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CloneTable.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/CloneTable.java
@@ -48,7 +48,8 @@ public class CloneTable extends Test {
 
     try {
       log.debug("Cloning table " + srcTableName + " " + newTableName + " " + flush);
-      client.tableOperations().clone(srcTableName, newTableName, flush, new HashMap<>(), new HashSet<>());
+      client.tableOperations().clone(srcTableName, newTableName, flush, new HashMap<>(),
+          new HashSet<>());
     } catch (TableExistsException e) {
       log.debug("Clone " + srcTableName + " failed, " + newTableName + " exists");
     } catch (TableNotFoundException e) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Compact.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Compact.java
index 27080a4..9b5980f 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Compact.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Compact.java
@@ -46,11 +46,14 @@ public class Compact extends Test {
     try {
       boolean wait = rand.nextBoolean();
       client.tableOperations().compact(tableName, range.get(0), range.get(1), false, wait);
-      log.debug((wait ? "compacted " : "initiated compaction ") + tableName + " from " + range.get(0) + " to " + range.get(1));
+      log.debug((wait ? "compacted " : "initiated compaction ") + tableName + " from "
+          + range.get(0) + " to " + range.get(1));
     } catch (TableNotFoundException tne) {
-      log.debug("compact " + tableName + " from " + range.get(0) + " to " + range.get(1) + " failed, doesnt exist");
+      log.debug("compact " + tableName + " from " + range.get(0) + " to " + range.get(1)
+          + " failed, doesnt exist");
     } catch (TableOfflineException toe) {
-      log.debug("compact " + tableName + " from " + range.get(0) + " to " + range.get(1) + " failed, offline");
+      log.debug("compact " + tableName + " from " + range.get(0) + " to " + range.get(1)
+          + " failed, offline");
     }
 
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ConcurrentFixture.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ConcurrentFixture.java
index b6df085..edaf632 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ConcurrentFixture.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/ConcurrentFixture.java
@@ -26,7 +26,8 @@ import org.apache.accumulo.testing.randomwalk.State;
 import org.apache.hadoop.io.Text;
 
 /**
- * When multiple instance of this test suite are run, all instances will operate on the same set of table names.
+ * When multiple instance of this test suite are run, all instances will operate on the same set of
+ * table names.
  *
  *
  */
@@ -43,7 +44,8 @@ public class ConcurrentFixture extends Fixture {
    *
    * @param rand
    *          A Random to use
-   * @return A two element list with first being smaller than the second, but either value (or both) can be null
+   * @return A two element list with first being smaller than the second, but either value (or both)
+   *         can be null
    */
   public static List<Text> generateRange(Random rand) {
     ArrayList<Text> toRet = new ArrayList<>(2);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java
index c475c59..48d8eea 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Config.java
@@ -115,7 +115,8 @@ public class Config extends Test {
       int choice = Integer.parseInt(lastSetting.toString());
       Property property = settings[choice].property;
       log.debug("Setting " + property.getKey() + " back to " + property.getDefaultValue());
-      env.getAccumuloClient().instanceOperations().setProperty(property.getKey(), property.getDefaultValue());
+      env.getAccumuloClient().instanceOperations()
+          .setProperty(property.getKey(), property.getDefaultValue());
     }
     lastSetting = state.getOkIfAbsent(LAST_TABLE_SETTING);
     if (lastSetting != null) {
@@ -124,9 +125,11 @@ public class Config extends Test {
       int choice = Integer.parseInt(parts[1]);
       Property property = tableSettings[choice].property;
       if (env.getAccumuloClient().tableOperations().exists(table)) {
-        log.debug("Setting " + property.getKey() + " on " + table + " back to " + property.getDefaultValue());
+        log.debug("Setting " + property.getKey() + " on " + table + " back to "
+            + property.getDefaultValue());
         try {
-          env.getAccumuloClient().tableOperations().setProperty(table, property.getKey(), property.getDefaultValue());
+          env.getAccumuloClient().tableOperations()
+              .setProperty(table, property.getKey(), property.getDefaultValue());
         } catch (AccumuloException ex) {
           if (ex.getCause() instanceof ThriftTableOperationException) {
             ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
@@ -144,9 +147,11 @@ public class Config extends Test {
       int choice = Integer.parseInt(parts[1]);
       Property property = tableSettings[choice].property;
       if (env.getAccumuloClient().namespaceOperations().exists(namespace)) {
-        log.debug("Setting " + property.getKey() + " on " + namespace + " back to " + property.getDefaultValue());
+        log.debug("Setting " + property.getKey() + " on " + namespace + " back to "
+            + property.getDefaultValue());
         try {
-          env.getAccumuloClient().namespaceOperations().setProperty(namespace, property.getKey(), property.getDefaultValue());
+          env.getAccumuloClient().namespaceOperations()
+              .setProperty(namespace, property.getKey(), property.getDefaultValue());
         } catch (AccumuloException ex) {
           if (ex.getCause() instanceof ThriftTableOperationException) {
             ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
@@ -171,13 +176,15 @@ public class Config extends Test {
     }
   }
 
-  private void changeTableSetting(RandomDataGenerator random, State state, RandWalkEnv env, Properties props) throws Exception {
+  private void changeTableSetting(RandomDataGenerator random, State state, RandWalkEnv env,
+      Properties props) throws Exception {
     // pick a random property
     int choice = random.nextInt(0, tableSettings.length - 1);
     Setting setting = tableSettings[choice];
 
     // pick a random table
-    SortedSet<String> tables = env.getAccumuloClient().tableOperations().list().tailSet("ctt").headSet("ctu");
+    SortedSet<String> tables = env.getAccumuloClient().tableOperations().list().tailSet("ctt")
+        .headSet("ctu");
     if (tables.isEmpty())
       return;
     String table = random.nextSample(tables, 1)[0].toString();
@@ -187,7 +194,8 @@ public class Config extends Test {
     state.set(LAST_TABLE_SETTING, table + "," + choice);
     log.debug("Setting " + setting.property.getKey() + " on table " + table + " to " + newValue);
     try {
-      env.getAccumuloClient().tableOperations().setProperty(table, setting.property.getKey(), "" + newValue);
+      env.getAccumuloClient().tableOperations()
+          .setProperty(table, setting.property.getKey(), "" + newValue);
     } catch (AccumuloException ex) {
       if (ex.getCause() instanceof ThriftTableOperationException) {
         ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
@@ -198,13 +206,15 @@ public class Config extends Test {
     }
   }
 
-  private void changeNamespaceSetting(RandomDataGenerator random, State state, RandWalkEnv env, Properties props) throws Exception {
+  private void changeNamespaceSetting(RandomDataGenerator random, State state, RandWalkEnv env,
+      Properties props) throws Exception {
     // pick a random property
     int choice = random.nextInt(0, tableSettings.length - 1);
     Setting setting = tableSettings[choice];
 
     // pick a random table
-    SortedSet<String> namespaces = env.getAccumuloClient().namespaceOperations().list().tailSet("nspc").headSet("nspd");
+    SortedSet<String> namespaces = env.getAccumuloClient().namespaceOperations().list()
+        .tailSet("nspc").headSet("nspd");
     if (namespaces.isEmpty())
       return;
     String namespace = random.nextSample(namespaces, 1)[0].toString();
@@ -212,9 +222,11 @@ public class Config extends Test {
     // generate a random value
     long newValue = random.nextLong(setting.min, setting.max);
     state.set(LAST_NAMESPACE_SETTING, namespace + "," + choice);
-    log.debug("Setting " + setting.property.getKey() + " on namespace " + namespace + " to " + newValue);
+    log.debug("Setting " + setting.property.getKey() + " on namespace " + namespace + " to "
+        + newValue);
     try {
-      env.getAccumuloClient().namespaceOperations().setProperty(namespace, setting.property.getKey(), "" + newValue);
+      env.getAccumuloClient().namespaceOperations()
+          .setProperty(namespace, setting.property.getKey(), "" + newValue);
     } catch (AccumuloException ex) {
       if (ex.getCause() instanceof ThriftTableOperationException) {
         ThriftTableOperationException ttoe = (ThriftTableOperationException) ex.getCause();
@@ -225,7 +237,8 @@ public class Config extends Test {
     }
   }
 
-  private void changeSetting(RandomDataGenerator random, State state, RandWalkEnv env, Properties props) throws Exception {
+  private void changeSetting(RandomDataGenerator random, State state, RandWalkEnv env,
+      Properties props) throws Exception {
     // pick a random property
     int choice = random.nextInt(0, settings.length - 1);
     Setting setting = settings[choice];
@@ -233,7 +246,8 @@ public class Config extends Test {
     long newValue = random.nextLong(setting.min, setting.max);
     state.set(LAST_SETTING, "" + choice);
     log.debug("Setting " + setting.property.getKey() + " to " + newValue);
-    env.getAccumuloClient().instanceOperations().setProperty(setting.property.getKey(), "" + newValue);
+    env.getAccumuloClient().instanceOperations()
+        .setProperty(setting.property.getKey(), "" + newValue);
   }
 
 }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/IsolatedScan.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/IsolatedScan.java
index 7f91622..27e8d9c 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/IsolatedScan.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/IsolatedScan.java
@@ -49,7 +49,8 @@ public class IsolatedScan extends Test {
     String tableName = tableNames.get(rand.nextInt(tableNames.size()));
 
     try {
-      RowIterator iter = new RowIterator(new IsolatedScanner(client.createScanner(tableName, Authorizations.EMPTY)));
+      RowIterator iter = new RowIterator(new IsolatedScanner(client.createScanner(tableName,
+          Authorizations.EMPTY)));
 
       while (iter.hasNext()) {
         PeekingIterator<Entry<Key,Value>> row = new PeekingIterator<>(iter.next());
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Merge.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Merge.java
index 900ced6..d7803dc 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Merge.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Merge.java
@@ -50,9 +50,11 @@ public class Merge extends Test {
       client.tableOperations().merge(tableName, range.get(0), range.get(1));
       log.debug("merged " + tableName + " from " + range.get(0) + " to " + range.get(1));
     } catch (TableOfflineException toe) {
-      log.debug("merge " + tableName + " from " + range.get(0) + " to " + range.get(1) + " failed, table is not online");
+      log.debug("merge " + tableName + " from " + range.get(0) + " to " + range.get(1)
+          + " failed, table is not online");
     } catch (TableNotFoundException tne) {
-      log.debug("merge " + tableName + " from " + range.get(0) + " to " + range.get(1) + " failed, doesnt exist");
+      log.debug("merge " + tableName + " from " + range.get(0) + " to " + range.get(1)
+          + " failed, doesnt exist");
     }
 
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Replication.java b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Replication.java
index 85f1fe6..d77675d 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Replication.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/concurrent/Replication.java
@@ -68,7 +68,9 @@ public class Replication extends Test {
 
     // Replicate to ourselves
     iOps.setProperty(REPLICATION_NAME.getKey(), instName);
-    iOps.setProperty(REPLICATION_PEERS.getKey() + instName, "org.apache.accumulo.tserver.replication.AccumuloReplicaSystem," + instName + "," + zookeepers);
+    iOps.setProperty(REPLICATION_PEERS.getKey() + instName,
+        "org.apache.accumulo.tserver.replication.AccumuloReplicaSystem," + instName + ","
+            + zookeepers);
     iOps.setProperty(REPLICATION_PEER_USER.getKey() + instName, env.getAccumuloUserName());
     iOps.setProperty(REPLICATION_PEER_PASSWORD.getKey() + instName, env.getAccumuloPassword());
     // Tweak some replication parameters to make the replication go faster
@@ -176,7 +178,8 @@ public class Replication extends Test {
   // junit isn't a dependency
   private void assertEquals(int expected, int actual) {
     if (expected != actual)
-      throw new RuntimeException(String.format("%d fails to match expected value %d", actual, expected));
+      throw new RuntimeException(String.format("%d fails to match expected value %d", actual,
+          expected));
   }
 
   // junit isn't a dependency
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Setup.java b/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Setup.java
index 7651144..631d31f 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Setup.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Setup.java
@@ -49,11 +49,13 @@ public class Setup extends Test {
       env.getAccumuloClient().tableOperations().create(tableName);
       log.debug("created table " + tableName);
       boolean blockCache = rand.nextBoolean();
-      env.getAccumuloClient().tableOperations().setProperty(tableName, Property.TABLE_BLOCKCACHE_ENABLED.getKey(), blockCache + "");
+      env.getAccumuloClient().tableOperations()
+          .setProperty(tableName, Property.TABLE_BLOCKCACHE_ENABLED.getKey(), blockCache + "");
       log.debug("set " + Property.TABLE_BLOCKCACHE_ENABLED.getKey() + " " + blockCache);
     } catch (TableExistsException tee) {}
 
-    ConditionalWriter cw = env.getAccumuloClient().createConditionalWriter(tableName, new ConditionalWriterConfig().setMaxWriteThreads(1));
+    ConditionalWriter cw = env.getAccumuloClient().createConditionalWriter(tableName,
+        new ConditionalWriterConfig().setMaxWriteThreads(1));
     state.set("cw", cw);
 
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Transfer.java b/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Transfer.java
index 464d396..d16d283 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Transfer.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Transfer.java
@@ -111,10 +111,12 @@ public class Transfer extends Test {
 
       int amt = rand.nextInt(50);
 
-      log.debug("transfer req " + bank + " " + amt + " " + acct1 + " " + a1 + " " + acct2 + " " + a2);
+      log.debug("transfer req " + bank + " " + amt + " " + acct1 + " " + a1 + " " + acct2 + " "
+          + a2);
 
       if (a1.bal >= amt) {
-        ConditionalMutation cm = new ConditionalMutation(bank, new Condition(acct1, "seq").setValue(Utils.getSeq(a1.seq)),
+        ConditionalMutation cm = new ConditionalMutation(bank,
+            new Condition(acct1, "seq").setValue(Utils.getSeq(a1.seq)),
             new Condition(acct2, "seq").setValue(Utils.getSeq(a2.seq)));
         cm.put(acct1, "bal", (a1.bal - amt) + "");
         cm.put(acct2, "bal", (a2.bal + amt) + "");
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Verify.java b/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Verify.java
index 4366d2c..d3b89d9 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Verify.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/conditional/Verify.java
@@ -50,7 +50,8 @@ public class Verify extends Test {
 
   }
 
-  private void verifyBank(String table, AccumuloClient client, String row, int numAccts) throws TableNotFoundException, Exception {
+  private void verifyBank(String table, AccumuloClient client, String row, int numAccts)
+      throws TableNotFoundException, Exception {
     log.debug("Verifying bank " + row);
 
     int count = 0;
@@ -83,7 +84,8 @@ public class Verify extends Test {
       throw new Exception("Sum is off " + sum);
     }
 
-    log.debug("Verified " + row + " count = " + count + " sum = " + sum + " min = " + min + " max = " + max);
+    log.debug("Verified " + row + " count = " + count + " sum = " + sum + " min = " + min
+        + " max = " + max);
   }
 
 }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/image/Commit.java b/src/main/java/org/apache/accumulo/testing/randomwalk/image/Commit.java
index 7097b90..f3e4318 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/image/Commit.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/image/Commit.java
@@ -28,7 +28,8 @@ public class Commit extends Test {
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
     env.getMultiTableBatchWriter().flush();
 
-    log.debug("Committed " + state.getLong("numWrites") + " writes.  Total writes: " + state.getLong("totalWrites"));
+    log.debug("Committed " + state.getLong("numWrites") + " writes.  Total writes: "
+        + state.getLong("totalWrites"));
     state.set("numWrites", Long.valueOf(0));
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/image/ImageFixture.java b/src/main/java/org/apache/accumulo/testing/randomwalk/image/ImageFixture.java
index 4d9f7bd..6e6a9b7 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/image/ImageFixture.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/image/ImageFixture.java
@@ -61,7 +61,8 @@ public class ImageFixture extends Fixture {
     try {
       client.tableOperations().create(imageTableName);
       client.tableOperations().addSplits(imageTableName, splits);
-      log.debug("Created table " + imageTableName + " (id:" + client.tableOperations().tableIdMap().get(imageTableName) + ")");
+      log.debug("Created table " + imageTableName + " (id:"
+          + client.tableOperations().tableIdMap().get(imageTableName) + ")");
     } catch (TableExistsException e) {
       log.error("Table " + imageTableName + " already exists.");
       throw e;
@@ -69,7 +70,8 @@ public class ImageFixture extends Fixture {
 
     try {
       client.tableOperations().create(indexTableName);
-      log.debug("Created table " + indexTableName + " (id:" + client.tableOperations().tableIdMap().get(indexTableName) + ")");
+      log.debug("Created table " + indexTableName + " (id:"
+          + client.tableOperations().tableIdMap().get(indexTableName) + ")");
     } catch (TableExistsException e) {
       log.error("Table " + imageTableName + " already exists.");
       throw e;
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/image/ScanMeta.java b/src/main/java/org/apache/accumulo/testing/randomwalk/image/ScanMeta.java
index 7e56668..1355381 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/image/ScanMeta.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/image/ScanMeta.java
@@ -102,7 +102,8 @@ public class ScanMeta extends Test {
     if (!hashes.equals(hashes2)) {
       log.error("uuids from doc table : " + hashes.values());
       log.error("uuids from index     : " + hashes2.values());
-      throw new Exception("Mismatch between document table and index " + indexTableName + " " + imageTableName);
+      throw new Exception("Mismatch between document table and index " + indexTableName + " "
+          + imageTableName);
     }
 
     indexScanner.close();
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/image/Verify.java b/src/main/java/org/apache/accumulo/testing/randomwalk/image/Verify.java
index 8039e7a..5b45027 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/image/Verify.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/image/Verify.java
@@ -83,7 +83,8 @@ public class Verify extends Test {
     if (count != numVerifications && curRow != null) {
       Text lastRow = (Text) state.get("lastIndexRow");
       if (lastRow.compareTo(curRow) != 0) {
-        log.error("Verified only " + count + " of " + numVerifications + " - curRow " + curRow + " lastKey " + lastRow);
+        log.error("Verified only " + count + " of " + numVerifications + " - curRow " + curRow
+            + " lastKey " + lastRow);
       }
     }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/image/Write.java b/src/main/java/org/apache/accumulo/testing/randomwalk/image/Write.java
index ea24cb3..ae3be22 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/image/Write.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/image/Write.java
@@ -63,7 +63,8 @@ public class Write extends Test {
     m.put(CONTENT_COLUMN_FAMILY, IMAGE_COLUMN_QUALIFIER, new Value(imageBytes));
 
     // store size
-    m.put(META_COLUMN_FAMILY, new Text("size"), new Value(String.format("%d", numBytes).getBytes(UTF_8)));
+    m.put(META_COLUMN_FAMILY, new Text("size"),
+        new Value(String.format("%d", numBytes).getBytes(UTF_8)));
 
     // store hash
     MessageDigest alg = MessageDigest.getInstance("SHA-1");
@@ -77,7 +78,8 @@ public class Write extends Test {
     state.set("totalWrites", totalWrites);
 
     // set count
-    m.put(META_COLUMN_FAMILY, COUNT_COLUMN_QUALIFIER, new Value(String.format("%d", totalWrites).getBytes(UTF_8)));
+    m.put(META_COLUMN_FAMILY, COUNT_COLUMN_QUALIFIER, new Value(String.format("%d", totalWrites)
+        .getBytes(UTF_8)));
 
     // add mutation
     imagesBW.addMutation(m);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/CopyTool.java b/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/CopyTool.java
index 53b54e6..2ac69fb 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/CopyTool.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/CopyTool.java
@@ -20,8 +20,8 @@ import java.io.IOException;
 import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
-import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloOutputFormat;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
@@ -58,19 +58,19 @@ public class CopyTool extends Configured implements Tool {
 
     Properties props = Accumulo.newClientProperties().from(args[0]).build();
     job.setInputFormatClass(AccumuloInputFormat.class);
-    AccumuloInputFormat.setClientProperties(job, props);
-    AccumuloInputFormat.setInputTableName(job, args[1]);
-    AccumuloInputFormat.setScanAuthorizations(job, Authorizations.EMPTY);
+
+    AccumuloInputFormat.configure().clientProperties(props).table(args[1])
+        .auths(Authorizations.EMPTY).store(job);
 
     job.setMapperClass(SeqMapClass.class);
     job.setMapOutputKeyClass(Text.class);
     job.setMapOutputValueClass(Mutation.class);
     job.setNumReduceTasks(0);
+    job.getConfiguration().set("mapreduce.job.classloader", "true");
 
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setClientProperties(job, props);
-    AccumuloOutputFormat.setCreateTables(job, true);
-    AccumuloOutputFormat.setDefaultTableName(job, args[2]);
+    AccumuloOutputFormat.configure().clientProperties(props).createTables(true)
+        .defaultTable(args[2]).store(job);
 
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/MultiTableFixture.java b/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/MultiTableFixture.java
index 0a00250..59ee619 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/MultiTableFixture.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/multitable/MultiTableFixture.java
@@ -35,7 +35,8 @@ public class MultiTableFixture extends Fixture {
 
     String hostname = InetAddress.getLocalHost().getHostName().replaceAll("[-.]", "_");
 
-    state.set("tableNamePrefix", String.format("multi_%s_%s_%d", hostname, env.getPid(), System.currentTimeMillis()));
+    state.set("tableNamePrefix",
+        String.format("multi_%s_%s_%d", hostname, env.getPid(), System.currentTimeMillis()));
     state.set("nextId", Integer.valueOf(0));
     state.set("numWrites", Long.valueOf(0));
     state.set("totalWrites", Long.valueOf(0));
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTable.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTable.java
index 72ccbc5..d01a6fc 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTable.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTable.java
@@ -36,40 +36,46 @@ public class AlterTable extends Test {
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
     String systemUser = WalkingSecurity.get(state, env).getSysUserName();
-    try (AccumuloClient client = env.createClient(systemUser, WalkingSecurity.get(state, env).getSysToken())) {
+    try (AccumuloClient client = env.createClient(systemUser, WalkingSecurity.get(state, env)
+        .getSysToken())) {
 
       String tableName = WalkingSecurity.get(state, env).getTableName();
 
       boolean exists = WalkingSecurity.get(state, env).getTableExists();
       boolean hasPermission;
       try {
-        hasPermission = client.securityOperations().hasTablePermission(systemUser, tableName, TablePermission.ALTER_TABLE)
-            || client.securityOperations().hasSystemPermission(systemUser, SystemPermission.ALTER_TABLE);
+        hasPermission = client.securityOperations().hasTablePermission(systemUser, tableName,
+            TablePermission.ALTER_TABLE)
+            || client.securityOperations().hasSystemPermission(systemUser,
+                SystemPermission.ALTER_TABLE);
       } catch (AccumuloSecurityException ae) {
         if (ae.getSecurityErrorCode().equals(SecurityErrorCode.TABLE_DOESNT_EXIST)) {
           if (exists)
-            throw new TableExistsException(null, tableName, "Got a TableNotFoundException but it should exist", ae);
+            throw new TableExistsException(null, tableName,
+                "Got a TableNotFoundException but it should exist", ae);
           else
             return;
         } else {
           throw new AccumuloException("Got unexpected ae error code", ae);
         }
       }
-      String newTableName = String.format("security_%s_%s_%d", InetAddress.getLocalHost().getHostName().replaceAll("[-.]", "_"), env.getPid(),
-          System.currentTimeMillis());
+      String newTableName = String.format("security_%s_%s_%d", InetAddress.getLocalHost()
+          .getHostName().replaceAll("[-.]", "_"), env.getPid(), System.currentTimeMillis());
 
       renameTable(client, state, env, tableName, newTableName, hasPermission, exists);
     }
   }
 
-  public static void renameTable(AccumuloClient client, State state, RandWalkEnv env, String oldName, String newName, boolean hasPermission, boolean tableExists)
+  public static void renameTable(AccumuloClient client, State state, RandWalkEnv env,
+      String oldName, String newName, boolean hasPermission, boolean tableExists)
       throws AccumuloSecurityException, AccumuloException, TableExistsException {
     try {
       client.tableOperations().rename(oldName, newName);
     } catch (AccumuloSecurityException ae) {
       if (ae.getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
         if (hasPermission)
-          throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+          throw new AccumuloException(
+              "Got a security exception when I should have had permission.", ae);
         else
           return;
       } else if (ae.getSecurityErrorCode().equals(SecurityErrorCode.BAD_CREDENTIALS)) {
@@ -79,7 +85,8 @@ public class AlterTable extends Test {
       throw new AccumuloException("Got unexpected ae error code", ae);
     } catch (TableNotFoundException tnfe) {
       if (tableExists)
-        throw new TableExistsException(null, oldName, "Got a TableNotFoundException but it should exist", tnfe);
+        throw new TableExistsException(null, oldName,
+            "Got a TableNotFoundException but it should exist", tnfe);
       else
         return;
     }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTablePerm.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTablePerm.java
index 491452e..e70d486 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTablePerm.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/AlterTablePerm.java
@@ -62,7 +62,8 @@ public class AlterTablePerm extends Test {
     } else
       tabPerm = TablePermission.valueOf(perm);
     String tableName = WalkingSecurity.get(state, env).getTableName();
-    boolean hasPerm = WalkingSecurity.get(state, env).hasTablePermission(target, tableName, tabPerm);
+    boolean hasPerm = WalkingSecurity.get(state, env)
+        .hasTablePermission(target, tableName, tabPerm);
     boolean canGive;
     String sourceUser;
     AuthenticationToken sourceToken;
@@ -85,7 +86,8 @@ public class AlterTablePerm extends Test {
       } catch (AccumuloSecurityException ae) {
         if (ae.getSecurityErrorCode().equals(SecurityErrorCode.TABLE_DOESNT_EXIST)) {
           if (tableExists)
-            throw new TableExistsException(null, tableName, "Got a TableNotFoundException but it should exist", ae);
+            throw new TableExistsException(null, tableName,
+                "Got a TableNotFoundException but it should exist", ae);
           else
             return;
         } else {
@@ -97,8 +99,10 @@ public class AlterTablePerm extends Test {
       if (!"take".equals(action) && !"give".equals(action)) {
         try {
           boolean res;
-          if (hasPerm != (res = env.getAccumuloClient().securityOperations().hasTablePermission(target, tableName, tabPerm)))
-            throw new AccumuloException("Test framework and accumulo are out of sync for user " + client.whoami() + " for perm " + tabPerm.name()
+          if (hasPerm != (res = env.getAccumuloClient().securityOperations()
+              .hasTablePermission(target, tableName, tabPerm)))
+            throw new AccumuloException("Test framework and accumulo are out of sync for user "
+                + client.whoami() + " for perm " + tabPerm.name()
                 + " with local vs. accumulo being " + hasPerm + " " + res);
 
           if (hasPerm)
@@ -109,12 +113,14 @@ public class AlterTablePerm extends Test {
           switch (ae.getSecurityErrorCode()) {
             case USER_DOESNT_EXIST:
               if (userExists)
-                throw new AccumuloException("Framework and Accumulo are out of sync, we think user exists", ae);
+                throw new AccumuloException(
+                    "Framework and Accumulo are out of sync, we think user exists", ae);
               else
                 return;
             case TABLE_DOESNT_EXIST:
               if (tableExists)
-                throw new TableExistsException(null, tableName, "Got a TableNotFoundException but it should exist", ae);
+                throw new TableExistsException(null, tableName,
+                    "Got a TableNotFoundException but it should exist", ae);
               else
                 return;
             default:
@@ -133,7 +139,8 @@ public class AlterTablePerm extends Test {
               throw new AccumuloException("Got a grant invalid on non-System.GRANT option", ae);
             case PERMISSION_DENIED:
               if (canGive)
-                throw new AccumuloException(client.whoami() + " failed to revoke permission to " + target + " when it should have worked", ae);
+                throw new AccumuloException(client.whoami() + " failed to revoke permission to "
+                    + target + " when it should have worked", ae);
               return;
             case USER_DOESNT_EXIST:
               if (userExists)
@@ -161,7 +168,8 @@ public class AlterTablePerm extends Test {
               throw new AccumuloException("Got a grant invalid on non-System.GRANT option", ae);
             case PERMISSION_DENIED:
               if (canGive)
-                throw new AccumuloException(client.whoami() + " failed to give permission to " + target + " when it should have worked", ae);
+                throw new AccumuloException(client.whoami() + " failed to give permission to "
+                    + target + " when it should have worked", ae);
               return;
             case USER_DOESNT_EXIST:
               if (userExists)
@@ -187,7 +195,8 @@ public class AlterTablePerm extends Test {
       if (!tableExists)
         throw new AccumuloException("Table shouldn't have existed, but apparently does");
       if (!canGive)
-        throw new AccumuloException(client.whoami() + " shouldn't have been able to grant privilege");
+        throw new AccumuloException(client.whoami()
+            + " shouldn't have been able to grant privilege");
     }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/Authenticate.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/Authenticate.java
index 1e3453b..89f3290 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/Authenticate.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/Authenticate.java
@@ -33,10 +33,12 @@ public class Authenticate extends Test {
 
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
-    authenticate(WalkingSecurity.get(state, env).getSysUserName(), WalkingSecurity.get(state, env).getSysToken(), state, env, props);
+    authenticate(WalkingSecurity.get(state, env).getSysUserName(), WalkingSecurity.get(state, env)
+        .getSysToken(), state, env, props);
   }
 
-  public static void authenticate(String principal, AuthenticationToken token, State state, RandWalkEnv env, Properties props) throws Exception {
+  public static void authenticate(String principal, AuthenticationToken token, State state,
+      RandWalkEnv env, Properties props) throws Exception {
     String targetProp = props.getProperty("target");
     boolean success = Boolean.parseBoolean(props.getProperty("valid"));
 
@@ -51,8 +53,11 @@ public class Authenticate extends Test {
       }
       boolean exists = WalkingSecurity.get(state, env).userExists(target);
       // Copy so if failed it doesn't mess with the password stored in state
-      byte[] password = Arrays.copyOf(WalkingSecurity.get(state, env).getUserPassword(target), WalkingSecurity.get(state, env).getUserPassword(target).length);
-      boolean hasPermission = client.securityOperations().hasSystemPermission(principal, SystemPermission.SYSTEM) || principal.equals(target);
+      byte[] password = Arrays.copyOf(WalkingSecurity.get(state, env).getUserPassword(target),
+          WalkingSecurity.get(state, env).getUserPassword(target).length);
+      boolean hasPermission = client.securityOperations().hasSystemPermission(principal,
+          SystemPermission.SYSTEM)
+          || principal.equals(target);
 
       if (!success)
         for (int i = 0; i < password.length; i++)
@@ -66,7 +71,8 @@ public class Authenticate extends Test {
         switch (ae.getSecurityErrorCode()) {
           case PERMISSION_DENIED:
             if (exists && hasPermission)
-              throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+              throw new AccumuloException(
+                  "Got a security exception when I should have had permission.", ae);
             else
               return;
           default:
@@ -76,8 +82,9 @@ public class Authenticate extends Test {
       if (!hasPermission)
         throw new AccumuloException("Didn't get Security Exception when we should have");
       if (result != (success && exists))
-        throw new AccumuloException("Authentication " + (result ? "succeeded" : "failed") + " when it should have "
-            + ((success && exists) ? "succeeded" : "failed") + " while the user exists? " + exists);
+        throw new AccumuloException("Authentication " + (result ? "succeeded" : "failed")
+            + " when it should have " + ((success && exists) ? "succeeded" : "failed")
+            + " while the user exists? " + exists);
     }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/ChangePass.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/ChangePass.java
index 01f5715..b75f58a 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/ChangePass.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/ChangePass.java
@@ -56,7 +56,9 @@ public class ChangePass extends Test {
 
       targetExists = WalkingSecurity.get(state, env).userExists(target);
 
-      hasPerm = client.securityOperations().hasSystemPermission(principal, SystemPermission.ALTER_USER) || principal.equals(target);
+      hasPerm = client.securityOperations().hasSystemPermission(principal,
+          SystemPermission.ALTER_USER)
+          || principal.equals(target);
 
       Random r = new Random();
 
@@ -71,7 +73,8 @@ public class ChangePass extends Test {
         switch (ae.getSecurityErrorCode()) {
           case PERMISSION_DENIED:
             if (hasPerm)
-              throw new AccumuloException("Change failed when it should have succeeded to change " + target + "'s password", ae);
+              throw new AccumuloException("Change failed when it should have succeeded to change "
+                  + target + "'s password", ae);
             return;
           case USER_DOESNT_EXIST:
             if (targetExists)
@@ -89,7 +92,8 @@ public class ChangePass extends Test {
       // Waiting 1 second for password to propogate through Zk
       Thread.sleep(1000);
       if (!hasPerm)
-        throw new AccumuloException("Password change succeeded when it should have failed for " + source + " changing the password for " + target + ".");
+        throw new AccumuloException("Password change succeeded when it should have failed for "
+            + source + " changing the password for " + target + ".");
     }
   }
 }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateTable.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateTable.java
index e9d53eb..54ffcac 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateTable.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateTable.java
@@ -33,19 +33,22 @@ public class CreateTable extends Test {
 
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
-    try (AccumuloClient client = env.createClient(WalkingSecurity.get(state, env).getSysUserName(), WalkingSecurity.get(state, env).getSysToken())) {
+    try (AccumuloClient client = env.createClient(WalkingSecurity.get(state, env).getSysUserName(),
+        WalkingSecurity.get(state, env).getSysToken())) {
 
       String tableName = WalkingSecurity.get(state, env).getTableName();
 
       boolean exists = WalkingSecurity.get(state, env).getTableExists();
-      boolean hasPermission = client.securityOperations().hasSystemPermission(WalkingSecurity.get(state, env).getSysUserName(), SystemPermission.CREATE_TABLE);
+      boolean hasPermission = client.securityOperations().hasSystemPermission(
+          WalkingSecurity.get(state, env).getSysUserName(), SystemPermission.CREATE_TABLE);
 
       try {
         client.tableOperations().create(tableName);
       } catch (AccumuloSecurityException ae) {
         if (ae.getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
           if (hasPermission)
-            throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+            throw new AccumuloException(
+                "Got a security exception when I should have had permission.", ae);
           else {
             // create table anyway for sake of state
             try {
@@ -63,7 +66,8 @@ public class CreateTable extends Test {
           throw new AccumuloException("Got unexpected error", ae);
       } catch (TableExistsException tee) {
         if (!exists)
-          throw new TableExistsException(null, tableName, "Got a TableExistsException but it shouldn't have existed", tee);
+          throw new TableExistsException(null, tableName,
+              "Got a TableExistsException but it shouldn't have existed", tee);
         else
           return;
       }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateUser.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateUser.java
index bae37c6..b6b1ca2 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateUser.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/CreateUser.java
@@ -32,12 +32,14 @@ public class CreateUser extends Test {
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
     String sysPrincipal = WalkingSecurity.get(state, env).getSysUserName();
-    try (AccumuloClient client = env.createClient(sysPrincipal, WalkingSecurity.get(state, env).getSysToken())) {
+    try (AccumuloClient client = env.createClient(sysPrincipal, WalkingSecurity.get(state, env)
+        .getSysToken())) {
 
       String tableUserName = WalkingSecurity.get(state, env).getTabUserName();
 
       boolean exists = WalkingSecurity.get(state, env).userExists(tableUserName);
-      boolean hasPermission = client.securityOperations().hasSystemPermission(sysPrincipal, SystemPermission.CREATE_USER);
+      boolean hasPermission = client.securityOperations().hasSystemPermission(sysPrincipal,
+          SystemPermission.CREATE_USER);
       PasswordToken tabUserPass = new PasswordToken("Super Sekret Table User Password");
       try {
         client.securityOperations().createLocalUser(tableUserName, tabUserPass);
@@ -45,11 +47,13 @@ public class CreateUser extends Test {
         switch (ae.getSecurityErrorCode()) {
           case PERMISSION_DENIED:
             if (hasPermission)
-              throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+              throw new AccumuloException(
+                  "Got a security exception when I should have had permission.", ae);
             else {
               // create user anyway for sake of state
               if (!exists) {
-                env.getAccumuloClient().securityOperations().createLocalUser(tableUserName, tabUserPass);
+                env.getAccumuloClient().securityOperations()
+                    .createLocalUser(tableUserName, tabUserPass);
                 WalkingSecurity.get(state, env).createUser(tableUserName, tabUserPass);
                 Thread.sleep(1000);
               }
@@ -57,7 +61,8 @@ public class CreateUser extends Test {
             }
           case USER_EXISTS:
             if (!exists)
-              throw new AccumuloException("Got security exception when the user shouldn't have existed", ae);
+              throw new AccumuloException(
+                  "Got security exception when the user shouldn't have existed", ae);
             else
               return;
           default:
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropTable.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropTable.java
index 2311967..80a5845 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropTable.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropTable.java
@@ -57,18 +57,22 @@ public class DropTable extends Test {
       boolean exists = WalkingSecurity.get(state, env).getTableExists();
 
       try {
-        hasPermission = client.securityOperations().hasTablePermission(principal, tableName, TablePermission.DROP_TABLE)
-            || client.securityOperations().hasSystemPermission(principal, SystemPermission.DROP_TABLE);
+        hasPermission = client.securityOperations().hasTablePermission(principal, tableName,
+            TablePermission.DROP_TABLE)
+            || client.securityOperations().hasSystemPermission(principal,
+                SystemPermission.DROP_TABLE);
         client.tableOperations().delete(tableName);
       } catch (AccumuloSecurityException ae) {
         if (ae.getSecurityErrorCode().equals(SecurityErrorCode.TABLE_DOESNT_EXIST)) {
           if (exists)
-            throw new TableExistsException(null, tableName, "Got a TableNotFoundException but it should have existed", ae);
+            throw new TableExistsException(null, tableName,
+                "Got a TableNotFoundException but it should have existed", ae);
           else
             return;
         } else if (ae.getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
           if (hasPermission)
-            throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+            throw new AccumuloException(
+                "Got a security exception when I should have had permission.", ae);
           else {
             // Drop anyway for sake of state
             env.getAccumuloClient().tableOperations().delete(tableName);
@@ -82,7 +86,8 @@ public class DropTable extends Test {
         throw new AccumuloException("Got unexpected ae error code", ae);
       } catch (TableNotFoundException tnfe) {
         if (exists)
-          throw new TableExistsException(null, tableName, "Got a TableNotFoundException but it should have existed", tnfe);
+          throw new TableExistsException(null, tableName,
+              "Got a TableNotFoundException but it should have existed", tnfe);
         else
           return;
       }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropUser.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropUser.java
index 92cb9c1..9718dde 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropUser.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/DropUser.java
@@ -31,12 +31,14 @@ public class DropUser extends Test {
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
     String sysPrincipal = WalkingSecurity.get(state, env).getSysUserName();
-    try (AccumuloClient client = env.createClient(sysPrincipal, WalkingSecurity.get(state, env).getSysToken())) {
+    try (AccumuloClient client = env.createClient(sysPrincipal, WalkingSecurity.get(state, env)
+        .getSysToken())) {
 
       String tableUserName = WalkingSecurity.get(state, env).getTabUserName();
 
       boolean exists = WalkingSecurity.get(state, env).userExists(tableUserName);
-      boolean hasPermission = client.securityOperations().hasSystemPermission(sysPrincipal, SystemPermission.DROP_USER);
+      boolean hasPermission = client.securityOperations().hasSystemPermission(sysPrincipal,
+          SystemPermission.DROP_USER);
 
       try {
         client.securityOperations().dropLocalUser(tableUserName);
@@ -44,7 +46,8 @@ public class DropUser extends Test {
         switch (ae.getSecurityErrorCode()) {
           case PERMISSION_DENIED:
             if (hasPermission)
-              throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+              throw new AccumuloException(
+                  "Got a security exception when I should have had permission.", ae);
             else {
               if (exists) {
                 env.getAccumuloClient().securityOperations().dropLocalUser(tableUserName);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityFixture.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityFixture.java
index cbc5464..9d34282 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityFixture.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityFixture.java
@@ -36,7 +36,8 @@ public class SecurityFixture extends Fixture {
     String secTableName, systemUserName, tableUserName, secNamespaceName;
     // A best-effort sanity check to guard against not password-based auth
     if (env.getClientProps().getProperty(ClientProperty.AUTH_TYPE.getKey()).equals("kerberos")) {
-      throw new IllegalStateException("Security module currently cannot support Kerberos/SASL instances");
+      throw new IllegalStateException(
+          "Security module currently cannot support Kerberos/SASL instances");
     }
 
     AccumuloClient client = env.getAccumuloClient();
@@ -101,7 +102,8 @@ public class SecurityFixture extends Fixture {
       client.namespaceOperations().delete(secNamespaceName);
     }
 
-    if (WalkingSecurity.get(state, env).userExists(WalkingSecurity.get(state, env).getTabUserName())) {
+    if (WalkingSecurity.get(state, env)
+        .userExists(WalkingSecurity.get(state, env).getTabUserName())) {
       String tableUserName = WalkingSecurity.get(state, env).getTabUserName();
       log.debug("Dropping user: " + tableUserName);
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java
index 1097c7c..b632249 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SecurityHelper.java
@@ -129,7 +129,8 @@ public class SecurityHelper {
   }
 
   public static void setTabPerm(State state, String userName, TablePermission tp, boolean value) {
-    log.debug((value ? "Gave" : "Took") + " the table permission " + tp.name() + (value ? " to" : " from") + " user " + userName);
+    log.debug((value ? "Gave" : "Took") + " the table permission " + tp.name()
+        + (value ? " to" : " from") + " user " + userName);
     state.set("Tab" + userName + tp.name(), Boolean.toString(value));
     if (tp.equals(TablePermission.READ) || tp.equals(TablePermission.WRITE))
       state.set("Tab" + userName + tp.name() + "time", System.currentTimeMillis());
@@ -141,7 +142,8 @@ public class SecurityHelper {
   }
 
   public static void setSysPerm(State state, String userName, SystemPermission tp, boolean value) {
-    log.debug((value ? "Gave" : "Took") + " the system permission " + tp.name() + (value ? " to" : " from") + " user " + userName);
+    log.debug((value ? "Gave" : "Took") + " the system permission " + tp.name()
+        + (value ? " to" : " from") + " user " + userName);
     state.set("Sys" + userName + tp.name(), Boolean.toString(value));
   }
 
@@ -163,7 +165,8 @@ public class SecurityHelper {
   }
 
   public static String[] getAuthsArray() {
-    return new String[] {"Fishsticks", "PotatoSkins", "Ribs", "Asparagus", "Paper", "Towels", "Lint", "Brush", "Celery"};
+    return new String[] {"Fishsticks", "PotatoSkins", "Ribs", "Asparagus", "Paper", "Towels",
+        "Lint", "Brush", "Celery"};
   }
 
   public static String getLastKey(State state) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SetAuths.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SetAuths.java
index ced6a00..7823500 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/SetAuths.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/SetAuths.java
@@ -51,7 +51,8 @@ public class SetAuths extends Test {
     try (AccumuloClient client = env.createClient(authPrincipal, authToken)) {
 
       boolean exists = WalkingSecurity.get(state, env).userExists(target);
-      boolean hasPermission = client.securityOperations().hasSystemPermission(authPrincipal, SystemPermission.ALTER_USER);
+      boolean hasPermission = client.securityOperations().hasSystemPermission(authPrincipal,
+          SystemPermission.ALTER_USER);
 
       Authorizations auths;
       if (authsString.equals("_random")) {
@@ -79,12 +80,14 @@ public class SetAuths extends Test {
         switch (ae.getSecurityErrorCode()) {
           case PERMISSION_DENIED:
             if (hasPermission)
-              throw new AccumuloException("Got a security exception when I should have had permission.", ae);
+              throw new AccumuloException(
+                  "Got a security exception when I should have had permission.", ae);
             else
               return;
           case USER_DOESNT_EXIST:
             if (exists)
-              throw new AccumuloException("Got security exception when the user should have existed", ae);
+              throw new AccumuloException(
+                  "Got security exception when the user should have existed", ae);
             else
               return;
           default:
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java
index 0b539c0..8e671c4 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/TableOp.java
@@ -57,7 +57,8 @@ public class TableOp extends Test {
   @Override
   public void visit(State state, RandWalkEnv env, Properties props) throws Exception {
     String tablePrincipal = WalkingSecurity.get(state, env).getTabUserName();
-    try (AccumuloClient client = env.createClient(tablePrincipal, WalkingSecurity.get(state, env).getTabToken())) {
+    try (AccumuloClient client = env.createClient(tablePrincipal, WalkingSecurity.get(state, env)
+        .getTabToken())) {
       TableOperations tableOps = client.tableOperations();
       SecurityOperations secOps = client.securityOperations();
 
@@ -80,12 +81,15 @@ public class TableOp extends Test {
             canRead = secOps.hasTablePermission(tablePrincipal, tableName, TablePermission.READ);
           } catch (AccumuloSecurityException ase) {
             if (tableExists)
-              throw new AccumuloException("Table didn't exist when it should have: " + tableName, ase);
+              throw new AccumuloException("Table didn't exist when it should have: " + tableName,
+                  ase);
             return;
           }
           Authorizations auths = secOps.getUserAuthorizations(tablePrincipal);
-          boolean ambiguousZone = WalkingSecurity.get(state, env).inAmbiguousZone(client.whoami(), tp);
-          boolean ambiguousAuths = WalkingSecurity.get(state, env).ambiguousAuthorizations(client.whoami());
+          boolean ambiguousZone = WalkingSecurity.get(state, env).inAmbiguousZone(client.whoami(),
+              tp);
+          boolean ambiguousAuths = WalkingSecurity.get(state, env).ambiguousAuthorizations(
+              client.whoami());
 
           Scanner scan = null;
           try {
@@ -97,11 +101,15 @@ public class TableOp extends Test {
               Key k = entry.getKey();
               seen++;
               if (!auths.contains(k.getColumnVisibilityData()) && !ambiguousAuths)
-                throw new AccumuloException("Got data I should not be capable of seeing: " + k + " table " + tableName);
+                throw new AccumuloException("Got data I should not be capable of seeing: " + k
+                    + " table " + tableName);
             }
             if (!canRead && !ambiguousZone)
-              throw new AccumuloException("Was able to read when I shouldn't have had the perm with connection user " + client.whoami() + " table " + tableName);
-            for (Entry<String,Integer> entry : WalkingSecurity.get(state, env).getAuthsMap().entrySet()) {
+              throw new AccumuloException(
+                  "Was able to read when I shouldn't have had the perm with connection user "
+                      + client.whoami() + " table " + tableName);
+            for (Entry<String,Integer> entry : WalkingSecurity.get(state, env).getAuthsMap()
+                .entrySet()) {
               if (auths.contains(entry.getKey().getBytes(UTF_8)))
                 seen = seen - entry.getValue();
             }
@@ -109,12 +117,14 @@ public class TableOp extends Test {
               throw new AccumuloException("Got mismatched amounts of data");
           } catch (TableNotFoundException tnfe) {
             if (tableExists)
-              throw new AccumuloException("Accumulo and test suite out of sync: table " + tableName, tnfe);
+              throw new AccumuloException(
+                  "Accumulo and test suite out of sync: table " + tableName, tnfe);
             return;
           } catch (AccumuloSecurityException ae) {
             if (ae.getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
               if (canRead && !ambiguousZone)
-                throw new AccumuloException("Table read permission out of sync with Accumulo: table " + tableName, ae);
+                throw new AccumuloException(
+                    "Table read permission out of sync with Accumulo: table " + tableName, ae);
               else
                 return;
             }
@@ -127,14 +137,18 @@ public class TableOp extends Test {
             throw new AccumuloException("Unexpected exception!", ae);
           } catch (RuntimeException re) {
             if (re.getCause() instanceof AccumuloSecurityException
-                && ((AccumuloSecurityException) re.getCause()).getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
+                && ((AccumuloSecurityException) re.getCause()).getSecurityErrorCode().equals(
+                    SecurityErrorCode.PERMISSION_DENIED)) {
               if (canRead && !ambiguousZone)
-                throw new AccumuloException("Table read permission out of sync with Accumulo: table " + tableName, re.getCause());
+                throw new AccumuloException(
+                    "Table read permission out of sync with Accumulo: table " + tableName,
+                    re.getCause());
               else
                 return;
             }
             if (re.getCause() instanceof AccumuloSecurityException
-                && ((AccumuloSecurityException) re.getCause()).getSecurityErrorCode().equals(SecurityErrorCode.BAD_AUTHORIZATIONS)) {
+                && ((AccumuloSecurityException) re.getCause()).getSecurityErrorCode().equals(
+                    SecurityErrorCode.BAD_AUTHORIZATIONS)) {
               if (ambiguousAuths)
                 return;
               else
@@ -158,19 +172,22 @@ public class TableOp extends Test {
             canWrite = secOps.hasTablePermission(tablePrincipal, tableName, TablePermission.WRITE);
           } catch (AccumuloSecurityException ase) {
             if (tableExists)
-              throw new AccumuloException("Table didn't exist when it should have: " + tableName, ase);
+              throw new AccumuloException("Table didn't exist when it should have: " + tableName,
+                  ase);
             return;
           }
 
           String key = WalkingSecurity.get(state, env).getLastKey() + "1";
           Mutation m = new Mutation(new Text(key));
           for (String s : WalkingSecurity.get(state, env).getAuthsArray()) {
-            m.put(new Text(), new Text(), new ColumnVisibility(s), new Value("value".getBytes(UTF_8)));
+            m.put(new Text(), new Text(), new ColumnVisibility(s),
+                new Value("value".getBytes(UTF_8)));
           }
           BatchWriter writer = null;
           try {
             try {
-              writer = client.createBatchWriter(tableName, new BatchWriterConfig().setMaxMemory(9000l).setMaxWriteThreads(1));
+              writer = client.createBatchWriter(tableName,
+                  new BatchWriterConfig().setMaxMemory(9000l).setMaxWriteThreads(1));
             } catch (TableNotFoundException tnfe) {
               if (tableExists)
                 throw new AccumuloException("Table didn't exist when it should have: " + tableName);
@@ -182,14 +199,18 @@ public class TableOp extends Test {
               writer.close();
             } catch (MutationsRejectedException mre) {
               if (mre.getSecurityErrorCodes().size() == 1) {
-                // TabletServerBatchWriter will log the error automatically so make sure its the error we expect
-                SecurityErrorCode errorCode = mre.getSecurityErrorCodes().entrySet().iterator().next().getValue().iterator().next();
+                // TabletServerBatchWriter will log the error automatically so make sure its the
+                // error we expect
+                SecurityErrorCode errorCode = mre.getSecurityErrorCodes().entrySet().iterator()
+                    .next().getValue().iterator().next();
                 if (errorCode.equals(SecurityErrorCode.PERMISSION_DENIED) && !canWrite) {
-                  log.info("Caught MutationsRejectedException({}) in TableOp.WRITE as expected.", errorCode);
+                  log.info("Caught MutationsRejectedException({}) in TableOp.WRITE as expected.",
+                      errorCode);
                   return;
                 }
               }
-              throw new AccumuloException("Unexpected MutationsRejectedException in TableOp.WRITE", mre);
+              throw new AccumuloException("Unexpected MutationsRejectedException in TableOp.WRITE",
+                  mre);
             }
             if (works)
               for (String s : WalkingSecurity.get(state, env).getAuthsArray())
@@ -211,7 +232,8 @@ public class TableOp extends Test {
           Path dir = new Path("/tmp", "bulk_" + UUID.randomUUID().toString());
           Path fail = new Path(dir.toString() + "_fail");
           FileSystem fs = WalkingSecurity.get(state, env).getFs();
-          RFileWriter rFileWriter = RFile.newWriter().to(dir + "/securityBulk.rf").withFileSystem(fs).build();
+          RFileWriter rFileWriter = RFile.newWriter().to(dir + "/securityBulk.rf")
+              .withFileSystem(fs).build();
           rFileWriter.startDefaultLocalityGroup();
           fs.mkdirs(fail);
           for (Key k : keys)
@@ -226,7 +248,8 @@ public class TableOp extends Test {
           } catch (AccumuloSecurityException ae) {
             if (ae.getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
               if (secOps.hasTablePermission(tablePrincipal, tableName, TablePermission.BULK_IMPORT))
-                throw new AccumuloException("Bulk Import failed when it should have worked: " + tableName);
+                throw new AccumuloException("Bulk Import failed when it should have worked: "
+                    + tableName);
               return;
             } else if (ae.getSecurityErrorCode().equals(SecurityErrorCode.BAD_CREDENTIALS)) {
               if (WalkingSecurity.get(state, env).userPassTransient(client.whoami()))
@@ -240,18 +263,22 @@ public class TableOp extends Test {
           fs.delete(fail, true);
 
           if (!secOps.hasTablePermission(tablePrincipal, tableName, TablePermission.BULK_IMPORT))
-            throw new AccumuloException("Bulk Import succeeded when it should have failed: " + dir + " table " + tableName);
+            throw new AccumuloException("Bulk Import succeeded when it should have failed: " + dir
+                + " table " + tableName);
           break;
         case ALTER_TABLE:
           boolean tablePerm;
           try {
-            tablePerm = secOps.hasTablePermission(tablePrincipal, tableName, TablePermission.ALTER_TABLE);
+            tablePerm = secOps.hasTablePermission(tablePrincipal, tableName,
+                TablePermission.ALTER_TABLE);
           } catch (AccumuloSecurityException ase) {
             if (tableExists)
-              throw new AccumuloException("Table didn't exist when it should have: " + tableName, ase);
+              throw new AccumuloException("Table didn't exist when it should have: " + tableName,
+                  ase);
             return;
           }
-          AlterTable.renameTable(client, state, env, tableName, tableName + "plus", tablePerm, tableExists);
+          AlterTable.renameTable(client, state, env, tableName, tableName + "plus", tablePerm,
+              tableExists);
           break;
 
         case GRANT:
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/Validate.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/Validate.java
index a7f788f..466c163 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/Validate.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/Validate.java
@@ -40,12 +40,15 @@ public class Validate extends Test {
     AccumuloClient client = env.getAccumuloClient();
 
     boolean tableExists = WalkingSecurity.get(state, env).getTableExists();
-    boolean cloudTableExists = client.tableOperations().list().contains(WalkingSecurity.get(state, env).getTableName());
+    boolean cloudTableExists = client.tableOperations().list()
+        .contains(WalkingSecurity.get(state, env).getTableName());
     if (tableExists != cloudTableExists)
       throw new AccumuloException("Table existance out of sync");
 
-    boolean tableUserExists = WalkingSecurity.get(state, env).userExists(WalkingSecurity.get(state, env).getTabUserName());
-    boolean cloudTableUserExists = client.securityOperations().listLocalUsers().contains(WalkingSecurity.get(state, env).getTabUserName());
+    boolean tableUserExists = WalkingSecurity.get(state, env).userExists(
+        WalkingSecurity.get(state, env).getTabUserName());
+    boolean cloudTableUserExists = client.securityOperations().listLocalUsers()
+        .contains(WalkingSecurity.get(state, env).getTabUserName());
     if (tableUserExists != cloudTableUserExists)
       throw new AccumuloException("Table User existance out of sync");
 
@@ -55,13 +58,15 @@ public class Validate extends Test {
     props.setProperty("target", "table");
     Authenticate.authenticate(env.getAccumuloUserName(), env.getToken(), state, env, props);
 
-    for (String user : new String[] {WalkingSecurity.get(state, env).getSysUserName(), WalkingSecurity.get(state, env).getTabUserName()}) {
+    for (String user : new String[] {WalkingSecurity.get(state, env).getSysUserName(),
+        WalkingSecurity.get(state, env).getTabUserName()}) {
       for (SystemPermission sp : SystemPermission.values()) {
         boolean hasSp = WalkingSecurity.get(state, env).hasSystemPermission(user, sp);
         boolean accuHasSp;
         try {
           accuHasSp = client.securityOperations().hasSystemPermission(user, sp);
-          log.debug("Just checked to see if user " + user + " has system perm " + sp.name() + " with answer " + accuHasSp);
+          log.debug("Just checked to see if user " + user + " has system perm " + sp.name()
+              + " with answer " + accuHasSp);
         } catch (AccumuloSecurityException ae) {
           if (ae.getSecurityErrorCode().equals(SecurityErrorCode.USER_DOESNT_EXIST)) {
             if (tableUserExists)
@@ -72,15 +77,19 @@ public class Validate extends Test {
             throw new AccumuloException("Unexpected exception!", ae);
         }
         if (hasSp != accuHasSp)
-          throw new AccumuloException(user + " existance out of sync for system perm " + sp + " hasSp/CloudhasSP " + hasSp + " " + accuHasSp);
+          throw new AccumuloException(user + " existance out of sync for system perm " + sp
+              + " hasSp/CloudhasSP " + hasSp + " " + accuHasSp);
       }
 
       for (TablePermission tp : TablePermission.values()) {
-        boolean hasTp = WalkingSecurity.get(state, env).hasTablePermission(user, WalkingSecurity.get(state, env).getTableName(), tp);
+        boolean hasTp = WalkingSecurity.get(state, env).hasTablePermission(user,
+            WalkingSecurity.get(state, env).getTableName(), tp);
         boolean accuHasTp;
         try {
-          accuHasTp = client.securityOperations().hasTablePermission(user, WalkingSecurity.get(state, env).getTableName(), tp);
-          log.debug("Just checked to see if user " + user + " has table perm " + tp.name() + " with answer " + accuHasTp);
+          accuHasTp = client.securityOperations().hasTablePermission(user,
+              WalkingSecurity.get(state, env).getTableName(), tp);
+          log.debug("Just checked to see if user " + user + " has table perm " + tp.name()
+              + " with answer " + accuHasTp);
         } catch (AccumuloSecurityException ae) {
           if (ae.getSecurityErrorCode().equals(SecurityErrorCode.USER_DOESNT_EXIST)) {
             if (tableUserExists)
@@ -96,7 +105,8 @@ public class Validate extends Test {
             throw new AccumuloException("Unexpected exception!", ae);
         }
         if (hasTp != accuHasTp)
-          throw new AccumuloException(user + " existance out of sync for table perm " + tp + " hasTp/CloudhasTP " + hasTp + " " + accuHasTp);
+          throw new AccumuloException(user + " existance out of sync for table perm " + tp
+              + " hasTp/CloudhasTP " + hasTp + " " + accuHasTp);
       }
 
     }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/security/WalkingSecurity.java b/src/main/java/org/apache/accumulo/testing/randomwalk/security/WalkingSecurity.java
index 65c5b79..695fdd1 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/security/WalkingSecurity.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/security/WalkingSecurity.java
@@ -75,7 +75,8 @@ public class WalkingSecurity {
     return instance;
   }
 
-  public void changeAuthorizations(String user, Authorizations authorizations) throws AccumuloSecurityException {
+  public void changeAuthorizations(String user, Authorizations authorizations)
+      throws AccumuloSecurityException {
     state.set(user + "_auths", authorizations);
     state.set("Auths-" + user + '-' + "time", System.currentTimeMillis());
   }
@@ -89,7 +90,8 @@ public class WalkingSecurity {
     return false;
   }
 
-  public void createUser(String principal, AuthenticationToken token) throws AccumuloSecurityException {
+  public void createUser(String principal, AuthenticationToken token)
+      throws AccumuloSecurityException {
     state.set(principal + userExists, Boolean.toString(true));
     changePassword(principal, token);
     cleanUser(principal);
@@ -102,7 +104,8 @@ public class WalkingSecurity {
       state.set("table" + connector, null);
   }
 
-  public void changePassword(String principal, AuthenticationToken token) throws AccumuloSecurityException {
+  public void changePassword(String principal, AuthenticationToken token)
+      throws AccumuloSecurityException {
     state.set(principal + userPass, token);
     state.set(principal + userPass + "time", System.currentTimeMillis());
   }
@@ -111,46 +114,56 @@ public class WalkingSecurity {
     return Boolean.parseBoolean(state.getString(user + userExists));
   }
 
-  public boolean hasSystemPermission(String user, SystemPermission permission) throws AccumuloSecurityException {
+  public boolean hasSystemPermission(String user, SystemPermission permission)
+      throws AccumuloSecurityException {
     boolean res = Boolean.parseBoolean(state.getString("Sys-" + user + '-' + permission.name()));
     return res;
   }
 
-  public boolean hasTablePermission(String user, String table, TablePermission permission) throws AccumuloSecurityException, TableNotFoundException {
+  public boolean hasTablePermission(String user, String table, TablePermission permission)
+      throws AccumuloSecurityException, TableNotFoundException {
     return Boolean.parseBoolean(state.getString("Tab-" + user + '-' + permission.name()));
   }
 
-  public void grantSystemPermission(String user, SystemPermission permission) throws AccumuloSecurityException {
+  public void grantSystemPermission(String user, SystemPermission permission)
+      throws AccumuloSecurityException {
     setSysPerm(state, user, permission, true);
   }
 
-  public void revokeSystemPermission(String user, SystemPermission permission) throws AccumuloSecurityException {
+  public void revokeSystemPermission(String user, SystemPermission permission)
+      throws AccumuloSecurityException {
     setSysPerm(state, user, permission, false);
   }
 
-  public void grantTablePermission(String user, String table, TablePermission permission) throws AccumuloSecurityException, TableNotFoundException {
+  public void grantTablePermission(String user, String table, TablePermission permission)
+      throws AccumuloSecurityException, TableNotFoundException {
     setTabPerm(state, user, permission, table, true);
   }
 
   private static void setSysPerm(State state, String userName, SystemPermission tp, boolean value) {
-    log.debug((value ? "Gave" : "Took") + " the system permission " + tp.name() + (value ? " to" : " from") + " user " + userName);
+    log.debug((value ? "Gave" : "Took") + " the system permission " + tp.name()
+        + (value ? " to" : " from") + " user " + userName);
     state.set("Sys-" + userName + '-' + tp.name(), Boolean.toString(value));
   }
 
-  private void setTabPerm(State state, String userName, TablePermission tp, String table, boolean value) {
+  private void setTabPerm(State state, String userName, TablePermission tp, String table,
+      boolean value) {
     if (table.equals(userName))
       throw new RuntimeException("Something went wrong: table is equal to userName: " + userName);
-    log.debug((value ? "Gave" : "Took") + " the table permission " + tp.name() + (value ? " to" : " from") + " user " + userName);
+    log.debug((value ? "Gave" : "Took") + " the table permission " + tp.name()
+        + (value ? " to" : " from") + " user " + userName);
     state.set("Tab-" + userName + '-' + tp.name(), Boolean.toString(value));
     if (tp.equals(TablePermission.READ) || tp.equals(TablePermission.WRITE))
       state.set("Tab-" + userName + '-' + tp.name() + '-' + "time", System.currentTimeMillis());
   }
 
-  public void revokeTablePermission(String user, String table, TablePermission permission) throws AccumuloSecurityException, TableNotFoundException {
+  public void revokeTablePermission(String user, String table, TablePermission permission)
+      throws AccumuloSecurityException, TableNotFoundException {
     setTabPerm(state, user, permission, table, false);
   }
 
-  public void cleanTablePermissions(String table) throws AccumuloSecurityException, TableNotFoundException {
+  public void cleanTablePermissions(String table) throws AccumuloSecurityException,
+      TableNotFoundException {
     for (String user : new String[] {getSysUserName(), getTabUserName()}) {
       for (TablePermission tp : TablePermission.values()) {
         revokeTablePermission(user, table, tp);
@@ -254,7 +267,8 @@ public class WalkingSecurity {
   }
 
   public String[] getAuthsArray() {
-    return new String[] {"Fishsticks", "PotatoSkins", "Ribs", "Asparagus", "Paper", "Towels", "Lint", "Brush", "Celery"};
+    return new String[] {"Fishsticks", "PotatoSkins", "Ribs", "Asparagus", "Paper", "Towels",
+        "Lint", "Brush", "Celery"};
   }
 
   public boolean inAmbiguousZone(String userName, TablePermission tp) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/BatchVerify.java b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/BatchVerify.java
index 735edc7..3788a4f 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/BatchVerify.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/BatchVerify.java
@@ -51,7 +51,8 @@ public class BatchVerify extends Test {
     }
 
     AccumuloClient client = env.getAccumuloClient();
-    BatchScanner scanner = client.createBatchScanner(state.getString("seqTableName"), new Authorizations(), 2);
+    BatchScanner scanner = client.createBatchScanner(state.getString("seqTableName"),
+        new Authorizations(), 2);
 
     try {
       int count = 0;
@@ -63,7 +64,8 @@ public class BatchVerify extends Test {
           rangeEnd = numWrites - 1;
         }
         count += rangeEnd - rangeStart + 1;
-        ranges.add(new Range(new Text(String.format("%010d", rangeStart)), new Text(String.format("%010d", rangeEnd))));
+        ranges.add(new Range(new Text(String.format("%010d", rangeStart)), new Text(String.format(
+            "%010d", rangeEnd))));
       }
 
       ranges = Range.mergeOverlapping(ranges);
@@ -95,7 +97,8 @@ public class BatchVerify extends Test {
       boolean done = false;
       for (Range r : ranges) {
         int start = Integer.parseInt(r.getStartKey().getRow().toString());
-        int end = Integer.parseInt(String.copyValueOf(r.getEndKey().getRow().toString().toCharArray(), 0, 10));
+        int end = Integer.parseInt(String.copyValueOf(r.getEndKey().getRow().toString()
+            .toCharArray(), 0, 10));
         for (int i = start; i <= end; i++) {
 
           if (done) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/Commit.java b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/Commit.java
index a31ecd8..316c650 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/Commit.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/Commit.java
@@ -29,7 +29,8 @@ public class Commit extends Test {
 
     env.getMultiTableBatchWriter().flush();
 
-    log.debug("Committed " + state.getLong("numWrites") + " writes.  Total writes: " + state.getLong("totalWrites"));
+    log.debug("Committed " + state.getLong("numWrites") + " writes.  Total writes: "
+        + state.getLong("totalWrites"));
     state.set("numWrites", Long.valueOf(0));
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/MapRedVerifyTool.java b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/MapRedVerifyTool.java
index 7dde8e6..465e63f 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/MapRedVerifyTool.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/MapRedVerifyTool.java
@@ -21,8 +21,8 @@ import java.util.Iterator;
 import java.util.Properties;
 
 import org.apache.accumulo.core.client.Accumulo;
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
-import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormat;
+import org.apache.accumulo.hadoop.mapreduce.AccumuloOutputFormat;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
@@ -44,16 +44,17 @@ public class MapRedVerifyTool extends Configured implements Tool {
     @Override
     public void map(Key row, Value data, Context output) throws IOException, InterruptedException {
       Integer num = Integer.valueOf(row.getRow().toString());
-      output.write(NullWritable.get(), new IntWritable(num.intValue()));
+      output.write(NullWritable.get(), new IntWritable(num));
     }
   }
 
   public static class SeqReduceClass extends Reducer<NullWritable,IntWritable,Text,Mutation> {
     @Override
-    public void reduce(NullWritable ignore, Iterable<IntWritable> values, Context output) throws IOException, InterruptedException {
+    public void reduce(NullWritable ignore, Iterable<IntWritable> values, Context output)
+        throws IOException, InterruptedException {
       Iterator<IntWritable> iterator = values.iterator();
 
-      if (iterator.hasNext() == false) {
+      if (!iterator.hasNext()) {
         return;
       }
 
@@ -70,7 +71,8 @@ public class MapRedVerifyTool extends Configured implements Tool {
       writeMutation(output, start, index);
     }
 
-    public void writeMutation(Context output, int start, int end) throws IOException, InterruptedException {
+    private void writeMutation(Context output, int start, int end) throws IOException,
+        InterruptedException {
       Mutation m = new Mutation(new Text(String.format("%010d", start)));
       m.put(new Text(String.format("%010d", end)), new Text(""), new Value(new byte[0]));
       output.write(null, m);
@@ -88,11 +90,10 @@ public class MapRedVerifyTool extends Configured implements Tool {
     }
 
     Properties props = Accumulo.newClientProperties().from(args[0]).build();
-    AccumuloInputFormat.setClientProperties(job, props);
-    AccumuloInputFormat.setInputTableName(job, args[1]);
 
-    AccumuloOutputFormat.setClientProperties(job, props);
-    AccumuloOutputFormat.setDefaultTableName(job, args[2]);
+    AccumuloInputFormat.configure().clientProperties(props).table(args[1]).store(job);
+    AccumuloOutputFormat.configure().clientProperties(props).defaultTable(args[2])
+        .createTables(true).store(job);
 
     job.setInputFormatClass(AccumuloInputFormat.class);
     job.setMapperClass(SeqMapClass.class);
@@ -101,9 +102,9 @@ public class MapRedVerifyTool extends Configured implements Tool {
 
     job.setReducerClass(SeqReduceClass.class);
     job.setNumReduceTasks(1);
+    job.getConfiguration().set("mapreduce.job.classloader", "true");
 
     job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setCreateTables(job, true);
 
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/SequentialFixture.java b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/SequentialFixture.java
index d5551a2..f949cce 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/SequentialFixture.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/sequential/SequentialFixture.java
@@ -37,12 +37,14 @@ public class SequentialFixture extends Fixture {
 
     String hostname = InetAddress.getLocalHost().getHostName().replaceAll("[-.]", "_");
 
-    seqTableName = String.format("sequential_%s_%s_%d", hostname, env.getPid(), System.currentTimeMillis());
+    seqTableName = String.format("sequential_%s_%s_%d", hostname, env.getPid(),
+        System.currentTimeMillis());
     state.set("seqTableName", seqTableName);
 
     try {
       client.tableOperations().create(seqTableName);
-      log.debug("Created table " + seqTableName + " (id:" + client.tableOperations().tableIdMap().get(seqTableName) + ")");
+      log.debug("Created table " + seqTableName + " (id:"
+          + client.tableOperations().tableIdMap().get(seqTableName) + ")");
     } catch (TableExistsException e) {
       log.warn("Table " + seqTableName + " already exists!");
       throw e;
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java
index 24212ff..ee59a4c 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/BulkInsert.java
@@ -54,15 +54,17 @@ public class BulkInsert extends Test {
     SequenceFile.Writer writer;
 
     SeqfileBatchWriter(Configuration conf, FileSystem fs, String file) throws IOException {
-      writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(fs.makeQualified(new Path(file))), SequenceFile.Writer.keyClass(Key.class),
-          SequenceFile.Writer.valueClass(Value.class));
+      writer = SequenceFile.createWriter(conf,
+          SequenceFile.Writer.file(fs.makeQualified(new Path(file))),
+          SequenceFile.Writer.keyClass(Key.class), SequenceFile.Writer.valueClass(Value.class));
     }
 
     @Override
     public void addMutation(Mutation m) throws MutationsRejectedException {
       List<ColumnUpdate> updates = m.getUpdates();
       for (ColumnUpdate cu : updates) {
-        Key key = new Key(m.getRow(), cu.getColumnFamily(), cu.getColumnQualifier(), cu.getColumnVisibility(), Long.MAX_VALUE, false, false);
+        Key key = new Key(m.getRow(), cu.getColumnFamily(), cu.getColumnQualifier(),
+            cu.getColumnVisibility(), Long.MAX_VALUE, false, false);
         Value val = new Value(cu.getValue(), false);
 
         try {
@@ -119,7 +121,8 @@ public class BulkInsert extends Test {
     BatchWriter indexWriter = new SeqfileBatchWriter(conf, fs, rootDir + "/index.seq");
 
     for (int i = 0; i < numToInsert; i++) {
-      String docID = Insert.insertRandomDocument(nextDocID++, dataWriter, indexWriter, indexTableName, dataTableName, numPartitions, rand);
+      String docID = Insert.insertRandomDocument(nextDocID++, dataWriter, indexWriter,
+          indexTableName, dataTableName, numPartitions, rand);
       log.debug("Bulk inserting document " + docID);
     }
 
@@ -128,8 +131,10 @@ public class BulkInsert extends Test {
     dataWriter.close();
     indexWriter.close();
 
-    sort(state, env, fs, dataTableName, rootDir + "/data.seq", rootDir + "/data_bulk", rootDir + "/data_work", maxSplits);
-    sort(state, env, fs, indexTableName, rootDir + "/index.seq", rootDir + "/index_bulk", rootDir + "/index_work", maxSplits);
+    sort(state, env, fs, dataTableName, rootDir + "/data.seq", rootDir + "/data_bulk", rootDir
+        + "/data_work", maxSplits);
+    sort(state, env, fs, indexTableName, rootDir + "/index.seq", rootDir + "/index_bulk", rootDir
+        + "/index_work", maxSplits);
 
     bulkImport(fs, state, env, dataTableName, rootDir, "data");
     bulkImport(fs, state, env, indexTableName, rootDir, "index");
@@ -137,7 +142,8 @@ public class BulkInsert extends Test {
     fs.delete(new Path(rootDir), true);
   }
 
-  private void bulkImport(FileSystem fs, State state, RandWalkEnv env, String tableName, String rootDir, String prefix) throws Exception {
+  private void bulkImport(FileSystem fs, State state, RandWalkEnv env, String tableName,
+      String rootDir, String prefix) throws Exception {
     while (true) {
       String bulkDir = rootDir + "/" + prefix + "_bulk";
       String failDir = rootDir + "/" + prefix + "_failure";
@@ -162,10 +168,11 @@ public class BulkInsert extends Test {
     }
   }
 
-  private void sort(State state, RandWalkEnv env, FileSystem fs, String tableName, String seqFile, String outputDir, String workDir, int maxSplits)
-      throws Exception {
+  private void sort(State state, RandWalkEnv env, FileSystem fs, String tableName, String seqFile,
+      String outputDir, String workDir, int maxSplits) throws Exception {
 
-    PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt"))), false, UTF_8.name());
+    PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir
+        + "/splits.txt"))), false, UTF_8.name());
 
     AccumuloClient client = env.getAccumuloClient();
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CloneIndex.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CloneIndex.java
index 3b33d07..4d232b3 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CloneIndex.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CloneIndex.java
@@ -35,10 +35,14 @@ public class CloneIndex extends Test {
     long t1 = System.currentTimeMillis();
     env.getAccumuloClient().tableOperations().flush(indexTableName, null, null, true);
     long t2 = System.currentTimeMillis();
-    env.getAccumuloClient().tableOperations().clone(indexTableName, tmpIndexTableName, false, new HashMap<String,String>(), new HashSet<String>());
+    env.getAccumuloClient()
+        .tableOperations()
+        .clone(indexTableName, tmpIndexTableName, false, new HashMap<String,String>(),
+            new HashSet<String>());
     long t3 = System.currentTimeMillis();
 
-    log.debug("Cloned " + tmpIndexTableName + " from " + indexTableName + " flush: " + (t2 - t1) + "ms clone: " + (t3 - t2) + "ms");
+    log.debug("Cloned " + tmpIndexTableName + " from " + indexTableName + " flush: " + (t2 - t1)
+        + "ms clone: " + (t3 - t2) + "ms");
 
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CompactFilter.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CompactFilter.java
index ac5fe92..1406acd 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CompactFilter.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/CompactFilter.java
@@ -57,7 +57,8 @@ public class CompactFilter extends Test {
     documentFilters.add(is);
 
     long t1 = System.currentTimeMillis();
-    env.getAccumuloClient().tableOperations().compact(docTableName, null, null, documentFilters, true, true);
+    env.getAccumuloClient().tableOperations()
+        .compact(docTableName, null, null, documentFilters, true, true);
     long t2 = System.currentTimeMillis();
     long t3 = t2 - t1;
 
@@ -69,12 +70,15 @@ public class CompactFilter extends Test {
     indexFilters.add(is);
 
     t1 = System.currentTimeMillis();
-    env.getAccumuloClient().tableOperations().compact(indexTableName, null, null, indexFilters, true, true);
+    env.getAccumuloClient().tableOperations()
+        .compact(indexTableName, null, null, indexFilters, true, true);
     t2 = System.currentTimeMillis();
 
-    log.debug("Filtered documents using compaction iterators " + regex + " " + (t3) + " " + (t2 - t1));
+    log.debug("Filtered documents using compaction iterators " + regex + " " + (t3) + " "
+        + (t2 - t1));
 
-    BatchScanner bscanner = env.getAccumuloClient().createBatchScanner(docTableName, new Authorizations(), 10);
+    BatchScanner bscanner = env.getAccumuloClient().createBatchScanner(docTableName,
+        new Authorizations(), 10);
 
     List<Range> ranges = new ArrayList<>();
     for (int i = 0; i < 16; i++) {
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Delete.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Delete.java
index bf9b98f..2689075 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Delete.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Delete.java
@@ -43,7 +43,8 @@ public class Delete extends Test {
     String docID = entry.getKey().getRow().toString();
     String doc = entry.getValue().toString();
 
-    Insert.unindexDocument(env.getMultiTableBatchWriter().getBatchWriter(indexTableName), doc, docID, numPartitions);
+    Insert.unindexDocument(env.getMultiTableBatchWriter().getBatchWriter(indexTableName), doc,
+        docID, numPartitions);
 
     Mutation m = new Mutation(docID);
     m.putDelete("doc", "");
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteSomeDocs.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteSomeDocs.java
index 4611070..a0ee586 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteSomeDocs.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteSomeDocs.java
@@ -52,7 +52,8 @@ public class DeleteSomeDocs extends Test {
 
     String pattern = patterns.get(rand.nextInt(patterns.size()));
     BatchWriterConfig bwc = new BatchWriterConfig();
-    BatchDeleter ibd = env.getAccumuloClient().createBatchDeleter(indexTableName, Authorizations.EMPTY, 8, bwc);
+    BatchDeleter ibd = env.getAccumuloClient().createBatchDeleter(indexTableName,
+        Authorizations.EMPTY, 8, bwc);
     ibd.setRanges(Collections.singletonList(new Range()));
 
     IteratorSetting iterSettings = new IteratorSetting(100, RegExFilter.class);
@@ -64,7 +65,8 @@ public class DeleteSomeDocs extends Test {
 
     ibd.close();
 
-    BatchDeleter dbd = env.getAccumuloClient().createBatchDeleter(dataTableName, Authorizations.EMPTY, 8, bwc);
+    BatchDeleter dbd = env.getAccumuloClient().createBatchDeleter(dataTableName,
+        Authorizations.EMPTY, 8, bwc);
     dbd.setRanges(Collections.singletonList(new Range()));
 
     iterSettings = new IteratorSetting(100, RegExFilter.class);
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteWord.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteWord.java
index 8cced58..f1dd3b0 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteWord.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/DeleteWord.java
@@ -61,7 +61,8 @@ public class DeleteWord extends Test {
 
     if (documentsToDelete.size() > 0) {
       // use a batch scanner to fetch all documents
-      BatchScanner bscanner = env.getAccumuloClient().createBatchScanner(docTableName, Authorizations.EMPTY, 8);
+      BatchScanner bscanner = env.getAccumuloClient().createBatchScanner(docTableName,
+          Authorizations.EMPTY, 8);
       bscanner.setRanges(documentsToDelete);
 
       BatchWriter ibw = env.getMultiTableBatchWriter().getBatchWriter(indexTableName);
@@ -87,7 +88,8 @@ public class DeleteWord extends Test {
       env.getMultiTableBatchWriter().flush();
 
       if (count != documentsToDelete.size()) {
-        throw new Exception("Batch scanner did not return expected number of docs " + count + " " + documentsToDelete.size());
+        throw new Exception("Batch scanner did not return expected number of docs " + count + " "
+            + documentsToDelete.size());
       }
     }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ExportIndex.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ExportIndex.java
index 588bbe0..f55b363 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ExportIndex.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ExportIndex.java
@@ -55,7 +55,8 @@ public class ExportIndex extends Test {
 
     // disable spits, so that splits can be compared later w/o worrying one
     // table splitting and the other not
-    env.getAccumuloClient().tableOperations().setProperty(indexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey(), "20G");
+    env.getAccumuloClient().tableOperations()
+        .setProperty(indexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey(), "20G");
 
     long t1 = System.currentTimeMillis();
 
@@ -69,7 +70,8 @@ public class ExportIndex extends Test {
     long t3 = System.currentTimeMillis();
 
     // copy files
-    BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(new Path(exportDir, "distcp.txt")), UTF_8));
+    BufferedReader reader = new BufferedReader(new InputStreamReader(fs.open(new Path(exportDir,
+        "distcp.txt")), UTF_8));
     String file = null;
     while ((file = reader.readLine()) != null) {
       Path src = new Path(file);
@@ -89,29 +91,35 @@ public class ExportIndex extends Test {
     fs.delete(new Path(exportDir), true);
     fs.delete(new Path(copyDir), true);
 
-    HashSet<Text> splits1 = new HashSet<>(env.getAccumuloClient().tableOperations().listSplits(indexTableName));
-    HashSet<Text> splits2 = new HashSet<>(env.getAccumuloClient().tableOperations().listSplits(tmpIndexTableName));
+    HashSet<Text> splits1 = new HashSet<>(env.getAccumuloClient().tableOperations()
+        .listSplits(indexTableName));
+    HashSet<Text> splits2 = new HashSet<>(env.getAccumuloClient().tableOperations()
+        .listSplits(tmpIndexTableName));
 
     if (!splits1.equals(splits2))
       throw new Exception("Splits not equals " + indexTableName + " " + tmpIndexTableName);
 
     HashMap<String,String> props1 = new HashMap<>();
-    for (Entry<String,String> entry : env.getAccumuloClient().tableOperations().getProperties(indexTableName))
+    for (Entry<String,String> entry : env.getAccumuloClient().tableOperations()
+        .getProperties(indexTableName))
       props1.put(entry.getKey(), entry.getValue());
 
     HashMap<String,String> props2 = new HashMap<>();
-    for (Entry<String,String> entry : env.getAccumuloClient().tableOperations().getProperties(tmpIndexTableName))
+    for (Entry<String,String> entry : env.getAccumuloClient().tableOperations()
+        .getProperties(tmpIndexTableName))
       props2.put(entry.getKey(), entry.getValue());
 
     if (!props1.equals(props2))
       throw new Exception("Props not equals " + indexTableName + " " + tmpIndexTableName);
 
     // unset the split threshold
-    env.getAccumuloClient().tableOperations().removeProperty(indexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey());
-    env.getAccumuloClient().tableOperations().removeProperty(tmpIndexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey());
+    env.getAccumuloClient().tableOperations()
+        .removeProperty(indexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey());
+    env.getAccumuloClient().tableOperations()
+        .removeProperty(tmpIndexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey());
 
-    log.debug("Imported " + tmpIndexTableName + " from " + indexTableName + " flush: " + (t2 - t1) + "ms export: " + (t3 - t2) + "ms copy:" + (t4 - t3)
-        + "ms import:" + (t5 - t4) + "ms");
+    log.debug("Imported " + tmpIndexTableName + " from " + indexTableName + " flush: " + (t2 - t1)
+        + "ms export: " + (t3 - t2) + "ms copy:" + (t4 - t3) + "ms import:" + (t5 - t4) + "ms");
 
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Grep.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Grep.java
index bb77836..eb1dc08 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Grep.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Grep.java
@@ -53,7 +53,8 @@ public class Grep extends Test {
       words[i] = new Text(Insert.generateRandomWord(rand));
     }
 
-    BatchScanner bs = env.getAccumuloClient().createBatchScanner(indexTableName, Authorizations.EMPTY, 16);
+    BatchScanner bs = env.getAccumuloClient().createBatchScanner(indexTableName,
+        Authorizations.EMPTY, 16);
     IteratorSetting ii = new IteratorSetting(20, "ii", IntersectingIterator.class.getName());
     IntersectingIterator.setColumnFamilies(ii, words);
     bs.addScanIterator(ii);
@@ -71,7 +72,8 @@ public class Grep extends Test {
 
     for (int i = 0; i < words.length; i++) {
       IteratorSetting more = new IteratorSetting(20 + i, "ii" + i, RegExFilter.class);
-      RegExFilter.setRegexs(more, null, null, null, "(^|(.*\\s))" + words[i] + "($|(\\s.*))", false);
+      RegExFilter
+          .setRegexs(more, null, null, null, "(^|(.*\\s))" + words[i] + "($|(\\s.*))", false);
       bs.addScanIterator(more);
     }
 
@@ -86,11 +88,13 @@ public class Grep extends Test {
     bs.close();
 
     if (!documentsFoundInIndex.equals(documentsFoundByGrep)) {
-      throw new Exception("Set of documents found not equal for words " + Arrays.asList(words).toString() + " " + documentsFoundInIndex + " "
+      throw new Exception("Set of documents found not equal for words "
+          + Arrays.asList(words).toString() + " " + documentsFoundInIndex + " "
           + documentsFoundByGrep);
     }
 
-    log.debug("Grep and index agree " + Arrays.asList(words).toString() + " " + documentsFoundInIndex.size());
+    log.debug("Grep and index agree " + Arrays.asList(words).toString() + " "
+        + documentsFoundInIndex.size());
 
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Insert.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Insert.java
index eb65286..27e8f19 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Insert.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Insert.java
@@ -47,15 +47,17 @@ public class Insert extends Test {
     BatchWriter dataWriter = env.getMultiTableBatchWriter().getBatchWriter(dataTableName);
     BatchWriter indexWriter = env.getMultiTableBatchWriter().getBatchWriter(indexTableName);
 
-    String docID = insertRandomDocument(nextDocID++, dataWriter, indexWriter, indexTableName, dataTableName, numPartitions, rand);
+    String docID = insertRandomDocument(nextDocID++, dataWriter, indexWriter, indexTableName,
+        dataTableName, numPartitions, rand);
 
     log.debug("Inserted document " + docID);
 
     state.set("nextDocID", Long.valueOf(nextDocID));
   }
 
-  static String insertRandomDocument(long did, BatchWriter dataWriter, BatchWriter indexWriter, String indexTableName, String dataTableName, int numPartitions,
-      Random rand) throws TableNotFoundException, Exception, AccumuloException, AccumuloSecurityException {
+  static String insertRandomDocument(long did, BatchWriter dataWriter, BatchWriter indexWriter,
+      String indexTableName, String dataTableName, int numPartitions, Random rand)
+      throws TableNotFoundException, Exception, AccumuloException, AccumuloSecurityException {
     String doc = createDocument(rand);
 
     String docID = new StringBuilder(String.format("%016x", did)).reverse().toString();
@@ -99,15 +101,18 @@ public class Insert extends Test {
     return String.format("%06x", Math.abs(partition));
   }
 
-  static void indexDocument(BatchWriter bw, String doc, String docId, int numPartitions) throws Exception {
+  static void indexDocument(BatchWriter bw, String doc, String docId, int numPartitions)
+      throws Exception {
     indexDocument(bw, doc, docId, numPartitions, false);
   }
 
-  static void unindexDocument(BatchWriter bw, String doc, String docId, int numPartitions) throws Exception {
+  static void unindexDocument(BatchWriter bw, String doc, String docId, int numPartitions)
+      throws Exception {
     indexDocument(bw, doc, docId, numPartitions, true);
   }
 
-  static void indexDocument(BatchWriter bw, String doc, String docId, int numPartitions, boolean delete) throws Exception {
+  static void indexDocument(BatchWriter bw, String doc, String docId, int numPartitions,
+      boolean delete) throws Exception {
 
     String[] tokens = doc.split("\\W+");
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Merge.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Merge.java
index 521a702..eaa9253 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Merge.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Merge.java
@@ -38,13 +38,15 @@ public class Merge extends Test {
     log.debug("merging " + indexTableName);
     env.getAccumuloClient().tableOperations().merge(indexTableName, null, null);
     org.apache.accumulo.core.util.Merge merge = new org.apache.accumulo.core.util.Merge();
-    merge.mergomatic((AccumuloClient) env.getAccumuloClient(), indexTableName, null, null, 256 * 1024 * 1024, true);
+    merge.mergomatic((AccumuloClient) env.getAccumuloClient(), indexTableName, null, null,
+        256 * 1024 * 1024, true);
     splits = env.getAccumuloClient().tableOperations().listSplits(indexTableName);
     if (splits.size() > splitSet.size()) {
       // throw an excpetion so that test will die an no further changes to
       // table will occur...
       // this way table is left as is for debugging.
-      throw new Exception("There are more tablets after a merge: " + splits.size() + " was " + splitSet.size());
+      throw new Exception("There are more tablets after a merge: " + splits.size() + " was "
+          + splitSet.size());
     }
   }
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Reindex.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Reindex.java
index b982bfd..5f321c5 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Reindex.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Reindex.java
@@ -44,7 +44,8 @@ public class Reindex extends Test {
     ShardFixture.createIndexTable(this.log, state, env, "_tmp", rand);
 
     Scanner scanner = env.getAccumuloClient().createScanner(docTableName, Authorizations.EMPTY);
-    BatchWriter tbw = env.getAccumuloClient().createBatchWriter(tmpIndexTableName, new BatchWriterConfig());
+    BatchWriter tbw = env.getAccumuloClient().createBatchWriter(tmpIndexTableName,
+        new BatchWriterConfig());
 
     int count = 0;
 
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Search.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Search.java
index 109f7ca..75bb392 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Search.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Search.java
@@ -69,7 +69,8 @@ public class Search extends Test {
 
     log.debug("Looking up terms " + searchTerms + " expect to find " + docID);
 
-    BatchScanner bs = env.getAccumuloClient().createBatchScanner(indexTableName, Authorizations.EMPTY, 10);
+    BatchScanner bs = env.getAccumuloClient().createBatchScanner(indexTableName,
+        Authorizations.EMPTY, 10);
     IteratorSetting ii = new IteratorSetting(20, "ii", IntersectingIterator.class);
     IntersectingIterator.setColumnFamilies(ii, columns);
     bs.addScanIterator(ii);
@@ -87,10 +88,12 @@ public class Search extends Test {
     bs.close();
 
     if (!sawDocID)
-      throw new Exception("Did not see doc " + docID + " in index.  terms:" + searchTerms + " " + indexTableName + " " + dataTableName);
+      throw new Exception("Did not see doc " + docID + " in index.  terms:" + searchTerms + " "
+          + indexTableName + " " + dataTableName);
   }
 
-  static Entry<Key,Value> findRandomDocument(State state, RandWalkEnv env, String dataTableName, Random rand) throws Exception {
+  static Entry<Key,Value> findRandomDocument(State state, RandWalkEnv env, String dataTableName,
+      Random rand) throws Exception {
     Scanner scanner = env.getAccumuloClient().createScanner(dataTableName, Authorizations.EMPTY);
     scanner.setBatchSize(1);
     scanner.setRange(new Range(Integer.toString(rand.nextInt(0xfffffff), 16), null));
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ShardFixture.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ShardFixture.java
index eda75e0..e451ef1 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ShardFixture.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/ShardFixture.java
@@ -49,7 +49,8 @@ public class ShardFixture extends Fixture {
     return splits;
   }
 
-  static void createIndexTable(Logger log, State state, RandWalkEnv env, String suffix, Random rand) throws Exception {
+  static void createIndexTable(Logger log, State state, RandWalkEnv env, String suffix, Random rand)
+      throws Exception {
     AccumuloClient client = env.getAccumuloClient();
     String name = state.get("indexTableName") + suffix;
     int numPartitions = (Integer) state.get("numPartitions");
@@ -65,8 +66,10 @@ public class ShardFixture extends Fixture {
     log.info("Added " + splits.size() + " splits to " + name);
 
     if (enableCache) {
-      client.tableOperations().setProperty(name, Property.TABLE_INDEXCACHE_ENABLED.getKey(), "true");
-      client.tableOperations().setProperty(name, Property.TABLE_BLOCKCACHE_ENABLED.getKey(), "true");
+      client.tableOperations()
+          .setProperty(name, Property.TABLE_INDEXCACHE_ENABLED.getKey(), "true");
+      client.tableOperations()
+          .setProperty(name, Property.TABLE_BLOCKCACHE_ENABLED.getKey(), "true");
 
       log.info("Enabled caching for table " + name);
     }
@@ -81,8 +84,10 @@ public class ShardFixture extends Fixture {
 
     int numPartitions = rand.nextInt(90) + 10;
 
-    state.set("indexTableName", String.format("ST_index_%s_%s_%d", hostname, pid, System.currentTimeMillis()));
-    state.set("docTableName", String.format("ST_docs_%s_%s_%d", hostname, pid, System.currentTimeMillis()));
+    state.set("indexTableName",
+        String.format("ST_index_%s_%s_%d", hostname, pid, System.currentTimeMillis()));
+    state.set("docTableName",
+        String.format("ST_docs_%s_%s_%d", hostname, pid, System.currentTimeMillis()));
     state.set("numPartitions", Integer.valueOf(numPartitions));
     state.set("cacheIndex", rand.nextDouble() < .5);
     state.set("rand", rand);
@@ -104,7 +109,8 @@ public class ShardFixture extends Fixture {
     log.info("Added " + splits.size() + " splits to " + docTableName);
 
     if (rand.nextDouble() < .5) {
-      client.tableOperations().setProperty((String) state.get("docTableName"), Property.TABLE_BLOOM_ENABLED.getKey(), "true");
+      client.tableOperations().setProperty((String) state.get("docTableName"),
+          Property.TABLE_BLOOM_ENABLED.getKey(), "true");
       log.info("Enabled bloom filters for table " + (String) state.get("docTableName"));
     }
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Split.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Split.java
index 965cd55..65b0fbf 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Split.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/Split.java
@@ -33,7 +33,8 @@ public class Split extends Test {
     int numPartitions = (Integer) state.get("numPartitions");
     Random rand = (Random) state.get("rand");
 
-    SortedSet<Text> splitSet = ShardFixture.genSplits(numPartitions, rand.nextInt(numPartitions) + 1, "%06x");
+    SortedSet<Text> splitSet = ShardFixture.genSplits(numPartitions,
+        rand.nextInt(numPartitions) + 1, "%06x");
     log.debug("adding splits " + indexTableName);
     env.getAccumuloClient().tableOperations().addSplits(indexTableName, splitSet);
   }
diff --git a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/VerifyIndex.java b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/VerifyIndex.java
index 96b840d..9b2741b 100644
--- a/src/main/java/org/apache/accumulo/testing/randomwalk/shard/VerifyIndex.java
+++ b/src/main/java/org/apache/accumulo/testing/randomwalk/shard/VerifyIndex.java
@@ -38,8 +38,10 @@ public class VerifyIndex extends Test {
     String tmpIndexTableName = indexTableName + "_tmp";
 
     // scan new and old index and verify identical
-    Scanner indexScanner1 = env.getAccumuloClient().createScanner(tmpIndexTableName, Authorizations.EMPTY);
-    Scanner indexScanner2 = env.getAccumuloClient().createScanner(indexTableName, Authorizations.EMPTY);
+    Scanner indexScanner1 = env.getAccumuloClient().createScanner(tmpIndexTableName,
+        Authorizations.EMPTY);
+    Scanner indexScanner2 = env.getAccumuloClient().createScanner(indexTableName,
+        Authorizations.EMPTY);
 
     Iterator<Entry<Key,Value>> iter = indexScanner2.iterator();
 
@@ -53,14 +55,16 @@ public class VerifyIndex extends Test {
       Key key2 = iter.next().getKey();
 
       if (!key1.equals(key2, PartialKey.ROW_COLFAM_COLQUAL))
-        throw new Exception("index rebuild mismatch " + key1 + " " + key2 + " " + indexTableName + " " + tmpIndexTableName);
+        throw new Exception("index rebuild mismatch " + key1 + " " + key2 + " " + indexTableName
+            + " " + tmpIndexTableName);
       count++;
       if (count % 1000 == 0)
         makingProgress();
     }
 
     if (iter.hasNext())
-      throw new Exception("index rebuild mismatch " + iter.next().getKey() + " " + tmpIndexTableName);
+      throw new Exception("index rebuild mismatch " + iter.next().getKey() + " "
+          + tmpIndexTableName);
 
     log.debug("Verified " + count + " index entries ");
 
diff --git a/src/main/java/org/apache/accumulo/testing/scalability/Ingest.java b/src/main/java/org/apache/accumulo/testing/scalability/Ingest.java
index 8191b38..aa5753e 100644
--- a/src/main/java/org/apache/accumulo/testing/scalability/Ingest.java
+++ b/src/main/java/org/apache/accumulo/testing/scalability/Ingest.java
@@ -78,8 +78,8 @@ public class Ingest extends ScaleTest {
     // create batch writer
     BatchWriter bw = null;
     try {
-      bw = client.createBatchWriter(tableName, new BatchWriterConfig().setMaxMemory(maxMemory).setMaxLatency(maxLatency, TimeUnit.MILLISECONDS)
-          .setMaxWriteThreads(maxWriteThreads));
+      bw = client.createBatchWriter(tableName, new BatchWriterConfig().setMaxMemory(maxMemory)
+          .setMaxLatency(maxLatency, TimeUnit.MILLISECONDS).setMaxWriteThreads(maxWriteThreads));
     } catch (TableNotFoundException e) {
       log.error("Table '" + tableName + "' not found.", e);
       System.exit(-1);
@@ -105,7 +105,8 @@ public class Ingest extends ScaleTest {
     while (count < numIngestEntries) {
       count++;
       long rowId = ContinuousIngest.genLong(minRow, maxRow, r);
-      Mutation m = ContinuousIngest.genMutation(rowId, r.nextInt(maxColF), r.nextInt(maxColQ), cv, ingestInstanceId.getBytes(UTF_8), count, null, false);
+      Mutation m = ContinuousIngest.genMutation(rowId, r.nextInt(maxColF), r.nextInt(maxColQ), cv,
+          ingestInstanceId.getBytes(UTF_8), count, null, false);
       totalBytes += m.numBytes();
       try {
         bw.addMutation(m);
diff --git a/src/main/java/org/apache/accumulo/testing/scalability/Run.java b/src/main/java/org/apache/accumulo/testing/scalability/Run.java
index 57c2af1..1df1d25 100644
--- a/src/main/java/org/apache/accumulo/testing/scalability/Run.java
+++ b/src/main/java/org/apache/accumulo/testing/scalability/Run.java
@@ -37,7 +37,8 @@ public class Run {
   static class Opts extends Help {
     @Parameter(names = "--testId", required = true)
     String testId;
-    @Parameter(names = "--action", required = true, description = "one of 'setup', 'teardown' or 'client'")
+    @Parameter(names = "--action", required = true,
+        description = "one of 'setup', 'teardown' or 'client'")
     String action;
     @Parameter(names = "--count", description = "number of tablet servers", required = true)
     int numTabletServers;
@@ -55,7 +56,8 @@ public class Run {
     fs = FileSystem.get(conf);
 
     fs.copyToLocalFile(new Path("/accumulo-scale/conf/site.conf"), new Path(sitePath));
-    fs.copyToLocalFile(new Path(String.format("/accumulo-scale/conf/%s.conf", opts.testId)), new Path(testPath));
+    fs.copyToLocalFile(new Path(String.format("/accumulo-scale/conf/%s.conf", opts.testId)),
+        new Path(testPath));
 
     // load configuration file properties
     Properties scaleProps = new Properties();
@@ -77,7 +79,8 @@ public class Run {
       log.error("Error loading config file.", e);
     }
 
-    ScaleTest test = (ScaleTest) Class.forName(String.format("org.apache.accumulo.test.scalability.%s", opts.testId)).newInstance();
+    ScaleTest test = (ScaleTest) Class.forName(
+        String.format("org.apache.accumulo.test.scalability.%s", opts.testId)).newInstance();
 
     test.init(scaleProps, testProps, opts.numTabletServers);
 
diff --git a/src/main/java/org/apache/accumulo/testing/scalability/ScaleTest.java b/src/main/java/org/apache/accumulo/testing/scalability/ScaleTest.java
index c806f7f..f36ba40 100644
--- a/src/main/java/org/apache/accumulo/testing/scalability/ScaleTest.java
+++ b/src/main/java/org/apache/accumulo/testing/scalability/ScaleTest.java
@@ -34,7 +34,8 @@ public abstract class ScaleTest {
   private int numTabletServers;
   private long startTime;
 
-  public void init(Properties scaleProps, Properties testProps, int numTabletServers) throws AccumuloException, AccumuloSecurityException {
+  public void init(Properties scaleProps, Properties testProps, int numTabletServers)
+      throws AccumuloException, AccumuloSecurityException {
 
     this.scaleProps = scaleProps;
     this.testProps = testProps;
@@ -47,7 +48,8 @@ public abstract class ScaleTest {
     String password = this.scaleProps.getProperty("PASSWORD");
     System.out.println(password);
 
-    client = Accumulo.newClient().to(instanceName, zookeepers).as(user, new PasswordToken(password)).build();
+    client = Accumulo.newClient().to(instanceName, zookeepers)
+        .as(user, new PasswordToken(password)).build();
   }
 
   protected void startTimer() {
diff --git a/src/main/java/org/apache/accumulo/testing/stress/RandomMutations.java b/src/main/java/org/apache/accumulo/testing/stress/RandomMutations.java
index 5effbf9..522a457 100644
--- a/src/main/java/org/apache/accumulo/testing/stress/RandomMutations.java
+++ b/src/main/java/org/apache/accumulo/testing/stress/RandomMutations.java
@@ -25,14 +25,16 @@ public class RandomMutations extends Stream<Mutation> {
   private byte[] current_row;
   private int cells_remaining_in_row;
 
-  public RandomMutations(RandomByteArrays rows, RandomByteArrays column_families, RandomByteArrays column_qualifiers, RandomByteArrays values,
-      RandomWithinRange row_widths, int max_cells_per_mutation) {
+  public RandomMutations(RandomByteArrays rows, RandomByteArrays column_families,
+      RandomByteArrays column_qualifiers, RandomByteArrays values, RandomWithinRange row_widths,
+      int max_cells_per_mutation) {
     this.rows = rows;
     this.column_families = column_families;
     this.column_qualifiers = column_qualifiers;
     this.values = values;
     this.row_widths = row_widths;
-    this.max_cells_per_mutation = (max_cells_per_mutation > 0 ? max_cells_per_mutation : Integer.MAX_VALUE);
+    this.max_cells_per_mutation = (max_cells_per_mutation > 0 ? max_cells_per_mutation
+        : Integer.MAX_VALUE);
 
     current_row = null;
     cells_remaining_in_row = 0;
diff --git a/src/main/java/org/apache/accumulo/testing/stress/Scan.java b/src/main/java/org/apache/accumulo/testing/stress/Scan.java
index b1a5676..0a657e7 100644
--- a/src/main/java/org/apache/accumulo/testing/stress/Scan.java
+++ b/src/main/java/org/apache/accumulo/testing/stress/Scan.java
@@ -38,28 +38,31 @@ public class Scan {
     ScanOpts opts = new ScanOpts();
     opts.parseArgs(Scan.class.getName(), args);
 
-    AccumuloClient client = opts.getClient();
-    Scanner scanner = client.createScanner(opts.getTableName(), new Authorizations());
-
-    if (opts.isolate) {
-      scanner.enableIsolation();
-    }
-
-    Random tablet_index_generator = new Random(opts.scan_seed);
-
-    LoopControl scanning_condition = opts.continuous ? new ContinuousLoopControl() : new IterativeLoopControl(opts.scan_iterations);
-
-    while (scanning_condition.keepScanning()) {
-      Range range = pickRange(client.tableOperations(), opts.getTableName(), tablet_index_generator);
-      scanner.setRange(range);
-      if (opts.batch_size > 0) {
-        scanner.setBatchSize(opts.batch_size);
+    try (AccumuloClient client = opts.createClient();
+        Scanner scanner = client.createScanner(opts.getTableName(), new Authorizations())) {
+      if (opts.isolate) {
+        scanner.enableIsolation();
       }
-      try {
-        consume(scanner);
-      } catch (Exception e) {
-        System.err.println(String.format("Exception while scanning range %s. Check the state of Accumulo for errors.", range));
-        throw e;
+
+      Random tablet_index_generator = new Random(opts.scan_seed);
+
+      LoopControl scanning_condition = opts.continuous ? new ContinuousLoopControl()
+          : new IterativeLoopControl(opts.scan_iterations);
+
+      while (scanning_condition.keepScanning()) {
+        Range range = pickRange(client.tableOperations(), opts.getTableName(),
+            tablet_index_generator);
+        scanner.setRange(range);
+        if (opts.batch_size > 0) {
+          scanner.setBatchSize(opts.batch_size);
+        }
+        try {
+          consume(scanner);
+        } catch (Exception e) {
+          System.err.println(String.format(
+              "Exception while scanning range %s. Check the state of Accumulo for errors.", range));
+          throw e;
+        }
       }
     }
   }
@@ -71,7 +74,8 @@ public class Scan {
     }
   }
 
-  public static Range pickRange(TableOperations tops, String table, Random r) throws TableNotFoundException, AccumuloSecurityException, AccumuloException {
+  public static Range pickRange(TableOperations tops, String table, Random r)
+      throws TableNotFoundException, AccumuloSecurityException, AccumuloException {
     ArrayList<Text> splits = Lists.newArrayList(tops.listSplits(table));
     if (splits.isEmpty()) {
       return new Range();
@@ -84,7 +88,8 @@ public class Scan {
   }
 
   /*
-   * These interfaces + implementations are used to determine how many times the scanner should look up a random tablet and scan it.
+   * These interfaces + implementations are used to determine how many times the scanner should look
+   * up a random tablet and scan it.
    */
   static interface LoopControl {
     public boolean keepScanning();
diff --git a/src/main/java/org/apache/accumulo/testing/stress/ScanOpts.java b/src/main/java/org/apache/accumulo/testing/stress/ScanOpts.java
index 9fe4741..ff6fdb4 100644
--- a/src/main/java/org/apache/accumulo/testing/stress/ScanOpts.java
+++ b/src/main/java/org/apache/accumulo/testing/stress/ScanOpts.java
@@ -21,13 +21,15 @@ import org.apache.accumulo.core.cli.ClientOnDefaultTable;
 import com.beust.jcommander.Parameter;
 
 class ScanOpts extends ClientOnDefaultTable {
-  @Parameter(names = "--isolate", description = "true to turn on scan isolation, false to turn off. default is false.")
+  @Parameter(names = "--isolate",
+      description = "true to turn on scan isolation, false to turn off. default is false.")
   boolean isolate = false;
 
   @Parameter(names = "--num-iterations", description = "number of scan iterations")
   int scan_iterations = 1024;
 
-  @Parameter(names = "--continuous", description = "continuously scan the table. note that this overrides --num-iterations")
+  @Parameter(names = "--continuous",
+      description = "continuously scan the table. note that this overrides --num-iterations")
   boolean continuous;
 
   @Parameter(names = "--scan-seed", description = "seed for randomly choosing tablets to scan")
diff --git a/src/main/java/org/apache/accumulo/testing/stress/Stream.java b/src/main/java/org/apache/accumulo/testing/stress/Stream.java
index a78de10..a8de13f 100644
--- a/src/main/java/org/apache/accumulo/testing/stress/Stream.java
+++ b/src/main/java/org/apache/accumulo/testing/stress/Stream.java
@@ -19,7 +19,8 @@ package org.apache.accumulo.testing.stress;
 import java.util.Iterator;
 
 /**
- * Base class to model an infinite stream of data. A stream implements an iterator whose {{@link #hasNext()} method will always return true.
+ * Base class to model an infinite stream of data. A stream implements an iterator whose {
+ * {@link #hasNext()} method will always return true.
  *
  */
 public abstract class Stream<T> implements Iterator<T> {
diff --git a/src/main/java/org/apache/accumulo/testing/stress/Write.java b/src/main/java/org/apache/accumulo/testing/stress/Write.java
index 66fb496..0153320 100644
--- a/src/main/java/org/apache/accumulo/testing/stress/Write.java
+++ b/src/main/java/org/apache/accumulo/testing/stress/Write.java
@@ -30,47 +30,50 @@ public class Write {
 
     opts.check();
 
-    AccumuloClient c = opts.getClient();
+    try (AccumuloClient c = opts.createClient()) {
 
-    if (opts.clear_table && c.tableOperations().exists(opts.getTableName())) {
-      try {
-        c.tableOperations().delete(opts.getTableName());
-      } catch (TableNotFoundException e) {
-        System.err.println("Couldn't delete the table because it doesn't exist any more.");
+      if (opts.clear_table && c.tableOperations().exists(opts.getTableName())) {
+        try {
+          c.tableOperations().delete(opts.getTableName());
+        } catch (TableNotFoundException e) {
+          System.err.println("Couldn't delete the table because it doesn't exist any more.");
+        }
       }
-    }
 
-    if (!c.tableOperations().exists(opts.getTableName())) {
-      try {
-        c.tableOperations().create(opts.getTableName());
-      } catch (TableExistsException e) {
-        System.err.println("Couldn't create table ourselves, but that's ok. Continuing.");
+      if (!c.tableOperations().exists(opts.getTableName())) {
+        try {
+          c.tableOperations().create(opts.getTableName());
+        } catch (TableExistsException e) {
+          System.err.println("Couldn't create table ourselves, but that's ok. Continuing.");
+        }
       }
-    }
 
-    long writeDelay = opts.write_delay;
-    if (writeDelay < 0) {
-      writeDelay = 0;
-    }
+      long writeDelay = opts.write_delay;
+      if (writeDelay < 0) {
+        writeDelay = 0;
+      }
 
-    DataWriter dw = new DataWriter(c.createBatchWriter(opts.getTableName(), batch_writer_opts.getBatchWriterConfig()), new RandomMutations(
-    // rows
-        new RandomByteArrays(new RandomWithinRange(opts.row_seed, opts.rowMin(), opts.rowMax())),
-        // cfs
-        new RandomByteArrays(new RandomWithinRange(opts.cf_seed, opts.cfMin(), opts.cfMax())),
-        // cqs
-        new RandomByteArrays(new RandomWithinRange(opts.cq_seed, opts.cqMin(), opts.cqMax())),
-        // vals
-        new RandomByteArrays(new RandomWithinRange(opts.value_seed, opts.valueMin(), opts.valueMax())),
-        // number of cells per row
-        new RandomWithinRange(opts.row_width_seed, opts.rowWidthMin(), opts.rowWidthMax()),
-        // max cells per mutation
-        opts.max_cells_per_mutation));
+      DataWriter dw = new DataWriter(c.createBatchWriter(opts.getTableName(),
+          batch_writer_opts.getBatchWriterConfig()), new RandomMutations(
+      // rows
+          new RandomByteArrays(new RandomWithinRange(opts.row_seed, opts.rowMin(), opts.rowMax())),
+          // cfs
+          new RandomByteArrays(new RandomWithinRange(opts.cf_seed, opts.cfMin(), opts.cfMax())),
+          // cqs
+          new RandomByteArrays(new RandomWithinRange(opts.cq_seed, opts.cqMin(), opts.cqMax())),
+          // vals
+          new RandomByteArrays(new RandomWithinRange(opts.value_seed, opts.valueMin(),
+              opts.valueMax())),
+          // number of cells per row
+          new RandomWithinRange(opts.row_width_seed, opts.rowWidthMin(), opts.rowWidthMax()),
+          // max cells per mutation
+          opts.max_cells_per_mutation));
 
-    while (true) {
-      dw.next();
-      if (writeDelay > 0) {
-        Thread.sleep(writeDelay);
+      while (true) {
+        dw.next();
+        if (writeDelay > 0) {
+          Thread.sleep(writeDelay);
+        }
       }
     }
   }
diff --git a/src/main/java/org/apache/accumulo/testing/stress/WriteOptions.java b/src/main/java/org/apache/accumulo/testing/stress/WriteOptions.java
index 83d975f..90821b1 100644
--- a/src/main/java/org/apache/accumulo/testing/stress/WriteOptions.java
+++ b/src/main/java/org/apache/accumulo/testing/stress/WriteOptions.java
@@ -24,34 +24,44 @@ class WriteOptions extends ClientOnDefaultTable {
   static final String DEFAULT_TABLE = "stress_test";
   static final int DEFAULT_MIN = 1, DEFAULT_MAX = 128, DEFAULT_SPREAD = DEFAULT_MAX - DEFAULT_MIN;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-row-size", description = "minimum row size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-row-size",
+      description = "minimum row size")
   Integer row_min;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-row-size", description = "maximum row size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-row-size",
+      description = "maximum row size")
   Integer row_max;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-cf-size", description = "minimum column family size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-cf-size",
+      description = "minimum column family size")
   Integer cf_min;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-cf-size", description = "maximum column family size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-cf-size",
+      description = "maximum column family size")
   Integer cf_max;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-cq-size", description = "minimum column qualifier size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-cq-size",
+      description = "minimum column qualifier size")
   Integer cq_min;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-cq-size", description = "maximum column qualifier size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-cq-size",
+      description = "maximum column qualifier size")
   Integer cq_max;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-value-size", description = "minimum value size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-value-size",
+      description = "minimum value size")
   Integer value_min;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-value-size", description = "maximum value size")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-value-size",
+      description = "maximum value size")
   Integer value_max;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-row-width", description = "minimum row width")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--min-row-width",
+      description = "minimum row width")
   Integer row_width_min;
 
-  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-row-width", description = "maximum row width")
+  @Parameter(validateValueWith = IntArgValidator.class, names = "--max-row-width",
+      description = "maximum row width")
   Integer row_width_max;
 
   @Parameter(names = "--clear-table", description = "clears the table before ingesting")
@@ -69,10 +79,12 @@ class WriteOptions extends ClientOnDefaultTable {
   @Parameter(names = "--value-seed", description = "seed for generating values")
   int value_seed = 99;
 
-  @Parameter(names = "--row-width-seed", description = "seed for generating the number of cells within a row (a row's \"width\")")
+  @Parameter(names = "--row-width-seed",
+      description = "seed for generating the number of cells within a row (a row's \"width\")")
   int row_width_seed = 444;
 
-  @Parameter(names = "--max-cells-per-mutation", description = "maximum number of cells per mutation; non-positive value implies no limit")
+  @Parameter(names = "--max-cells-per-mutation",
+      description = "maximum number of cells per mutation; non-positive value implies no limit")
   int max_cells_per_mutation = -1;
 
   @Parameter(names = "--write-delay", description = "milliseconds to wait between writes")
@@ -117,12 +129,17 @@ class WriteOptions extends ClientOnDefaultTable {
 
     if (min_ref == null && max_ref != null) {
       // we don't support just specifying a max yet
-      throw new IllegalArgumentException(String.format("[%s] Maximum value supplied, but no minimum. Must supply a minimum with a maximum value.", label));
+      throw new IllegalArgumentException(
+          String
+              .format(
+                  "[%s] Maximum value supplied, but no minimum. Must supply a minimum with a maximum value.",
+                  label));
     } else if (min_ref != null && max_ref != null) {
       // if a user supplied lower and upper bounds, we need to verify
       // that min <= max
       if (min_ref.compareTo(max_ref) > 0) {
-        throw new IllegalArgumentException(String.format("[%s] Min value (%d) is greater than max value (%d)", label, min_ref, max_ref));
+        throw new IllegalArgumentException(String.format(
+            "[%s] Min value (%d) is greater than max value (%d)", label, min_ref, max_ref));
       }
     }
   }
diff --git a/src/test/java/org/apache/accumulo/testing/randomwalk/FrameworkTest.java b/src/test/java/org/apache/accumulo/testing/randomwalk/FrameworkTest.java
index a16ac4b..de78794 100644
--- a/src/test/java/org/apache/accumulo/testing/randomwalk/FrameworkTest.java
+++ b/src/test/java/org/apache/accumulo/testing/randomwalk/FrameworkTest.java
@@ -39,7 +39,8 @@ public class FrameworkTest {
   // Need to use fully qualified name here because of conflict with
   // org.apache.accumulo.testing.randomwalk.Test
   @org.junit.Test
-  public void testXML() throws SAXException, URISyntaxException, ParserConfigurationException, IOException {
+  public void testXML() throws SAXException, URISyntaxException, ParserConfigurationException,
+      IOException {
     SchemaFactory sf = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
     Schema moduleSchema = sf.newSchema(getFile("/randomwalk/module.xsd"));
 
@@ -49,7 +50,8 @@ public class FrameworkTest {
     DocumentBuilder docbuilder = dbf.newDocumentBuilder();
     Document document = docbuilder.parse(getFile("/randomwalk/modules/unit/Basic.xml"));
 
-    assertNotEquals("Parsing randomwalk xml should result in nodes.", 0, document.getChildNodes().getLength());
+    assertNotEquals("Parsing randomwalk xml should result in nodes.", 0, document.getChildNodes()
+        .getLength());
   }
 
   private File getFile(String resource) throws URISyntaxException {
diff --git a/src/test/java/org/apache/accumulo/testing/randomwalk/ReplicationRandomWalkIT.java b/src/test/java/org/apache/accumulo/testing/randomwalk/ReplicationRandomWalkIT.java
index 2d61f6d..793ffaa 100644
--- a/src/test/java/org/apache/accumulo/testing/randomwalk/ReplicationRandomWalkIT.java
+++ b/src/test/java/org/apache/accumulo/testing/randomwalk/ReplicationRandomWalkIT.java
@@ -42,7 +42,7 @@ public class ReplicationRandomWalkIT extends ConfigurableMacBase {
     RandWalkEnv env = EasyMock.createMock(RandWalkEnv.class);
     EasyMock.expect(env.getAccumuloUserName()).andReturn("root").anyTimes();
     EasyMock.expect(env.getAccumuloPassword()).andReturn(ROOT_PASSWORD).anyTimes();
-    EasyMock.expect(env.getAccumuloClient()).andReturn(this.getClient()).anyTimes();
+    EasyMock.expect(env.getAccumuloClient()).andReturn(this.createClient()).anyTimes();
     EasyMock.replay(env);
 
     r.visit(null, env, null);


Mime
View raw message