accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ctubb...@apache.org
Subject [04/66] [abbrv] accumulo git commit: ACCUMULO-3451 Format master branch (1.7.0-SNAPSHOT)
Date Fri, 09 Jan 2015 02:44:08 GMT
http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/security/TableOp.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/security/TableOp.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/security/TableOp.java
index 2499af2..2612fc9 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/security/TableOp.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/security/TableOp.java
@@ -53,11 +53,11 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 
 public class TableOp extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    Connector conn = env.getInstance().getConnector(WalkingSecurity.get(state,env).getTabUserName(), WalkingSecurity.get(state,env).getTabToken());
-    
+    Connector conn = env.getInstance().getConnector(WalkingSecurity.get(state, env).getTabUserName(), WalkingSecurity.get(state, env).getTabToken());
+
     String action = props.getProperty("action", "_random");
     TablePermission tp;
     if ("_random".equalsIgnoreCase(action)) {
@@ -66,18 +66,18 @@ public class TableOp extends Test {
     } else {
       tp = TablePermission.valueOf(action);
     }
-    
-    boolean tableExists = WalkingSecurity.get(state,env).getTableExists();
-    String tableName = WalkingSecurity.get(state,env).getTableName();
-    String namespaceName = WalkingSecurity.get(state,env).getNamespaceName();
-    
+
+    boolean tableExists = WalkingSecurity.get(state, env).getTableExists();
+    String tableName = WalkingSecurity.get(state, env).getTableName();
+    String namespaceName = WalkingSecurity.get(state, env).getNamespaceName();
+
     switch (tp) {
       case READ: {
-        boolean canRead = WalkingSecurity.get(state,env).canScan(WalkingSecurity.get(state,env).getTabCredentials(), tableName, namespaceName);
-        Authorizations auths = WalkingSecurity.get(state,env).getUserAuthorizations(WalkingSecurity.get(state,env).getTabCredentials());
-        boolean ambiguousZone = WalkingSecurity.get(state,env).inAmbiguousZone(conn.whoami(), tp);
-        boolean ambiguousAuths = WalkingSecurity.get(state,env).ambiguousAuthorizations(conn.whoami());
-        
+        boolean canRead = WalkingSecurity.get(state, env).canScan(WalkingSecurity.get(state, env).getTabCredentials(), tableName, namespaceName);
+        Authorizations auths = WalkingSecurity.get(state, env).getUserAuthorizations(WalkingSecurity.get(state, env).getTabCredentials());
+        boolean ambiguousZone = WalkingSecurity.get(state, env).inAmbiguousZone(conn.whoami(), tp);
+        boolean ambiguousAuths = WalkingSecurity.get(state, env).ambiguousAuthorizations(conn.whoami());
+
         Scanner scan = null;
         try {
           scan = conn.createScanner(tableName, conn.securityOperations().getUserAuthorizations(conn.whoami()));
@@ -92,7 +92,7 @@ public class TableOp extends Test {
           }
           if (!canRead && !ambiguousZone)
             throw new AccumuloException("Was able to read when I shouldn't have had the perm with connection user " + conn.whoami() + " table " + tableName);
-          for (Entry<String,Integer> entry : WalkingSecurity.get(state,env).getAuthsMap().entrySet()) {
+          for (Entry<String,Integer> entry : WalkingSecurity.get(state, env).getAuthsMap().entrySet()) {
             if (auths.contains(entry.getKey().getBytes(UTF_8)))
               seen = seen - entry.getValue();
           }
@@ -131,25 +131,25 @@ public class TableOp extends Test {
             else
               throw new AccumuloException("Mismatched authorizations! ", re.getCause());
           }
-          
+
           throw new AccumuloException("Unexpected exception!", re);
         } finally {
           if (scan != null) {
             scan.close();
             scan = null;
           }
-          
+
         }
-        
+
         break;
       }
       case WRITE:
-        boolean canWrite = WalkingSecurity.get(state,env).canWrite(WalkingSecurity.get(state,env).getTabCredentials(), tableName, namespaceName);
-        boolean ambiguousZone = WalkingSecurity.get(state,env).inAmbiguousZone(conn.whoami(), tp);
-        
-        String key = WalkingSecurity.get(state,env).getLastKey() + "1";
+        boolean canWrite = WalkingSecurity.get(state, env).canWrite(WalkingSecurity.get(state, env).getTabCredentials(), tableName, namespaceName);
+        boolean ambiguousZone = WalkingSecurity.get(state, env).inAmbiguousZone(conn.whoami(), tp);
+
+        String key = WalkingSecurity.get(state, env).getLastKey() + "1";
         Mutation m = new Mutation(new Text(key));
-        for (String s : WalkingSecurity.get(state,env).getAuthsArray()) {
+        for (String s : WalkingSecurity.get(state, env).getAuthsArray()) {
           m.put(new Text(), new Text(), new ColumnVisibility(s), new Value("value".getBytes(UTF_8)));
         }
         BatchWriter writer = null;
@@ -170,7 +170,7 @@ public class TableOp extends Test {
             // For now, just wait a second and go again if they can write!
             if (!canWrite)
               return;
-            
+
             if (ambiguousZone) {
               Thread.sleep(1000);
               try {
@@ -184,8 +184,8 @@ public class TableOp extends Test {
             }
           }
           if (works)
-            for (String s : WalkingSecurity.get(state,env).getAuthsArray())
-              WalkingSecurity.get(state,env).increaseAuthMap(s, 1);
+            for (String s : WalkingSecurity.get(state, env).getAuthsArray())
+              WalkingSecurity.get(state, env).increaseAuthMap(s, 1);
         } finally {
           if (writer != null) {
             writer.close();
@@ -194,15 +194,15 @@ public class TableOp extends Test {
         }
         break;
       case BULK_IMPORT:
-        key = WalkingSecurity.get(state,env).getLastKey() + "1";
+        key = WalkingSecurity.get(state, env).getLastKey() + "1";
         SortedSet<Key> keys = new TreeSet<Key>();
-        for (String s : WalkingSecurity.get(state,env).getAuthsArray()) {
+        for (String s : WalkingSecurity.get(state, env).getAuthsArray()) {
           Key k = new Key(key, "", "", s);
           keys.add(k);
         }
         Path dir = new Path("/tmp", "bulk_" + UUID.randomUUID().toString());
         Path fail = new Path(dir.toString() + "_fail");
-        FileSystem fs = WalkingSecurity.get(state,env).getFs();
+        FileSystem fs = WalkingSecurity.get(state, env).getFs();
         FileSKVWriter f = FileOperations.getInstance().openWriter(dir + "/securityBulk." + RFile.EXTENSION, fs, fs.getConf(),
             AccumuloConfiguration.getDefaultConfiguration());
         f.startDefaultLocalityGroup();
@@ -218,28 +218,28 @@ public class TableOp extends Test {
           return;
         } catch (AccumuloSecurityException ae) {
           if (ae.getSecurityErrorCode().equals(SecurityErrorCode.PERMISSION_DENIED)) {
-            if (WalkingSecurity.get(state,env).canBulkImport(WalkingSecurity.get(state,env).getTabCredentials(), tableName, namespaceName))
+            if (WalkingSecurity.get(state, env).canBulkImport(WalkingSecurity.get(state, env).getTabCredentials(), tableName, namespaceName))
               throw new AccumuloException("Bulk Import failed when it should have worked: " + tableName);
             return;
           } else if (ae.getSecurityErrorCode().equals(SecurityErrorCode.BAD_CREDENTIALS)) {
-            if (WalkingSecurity.get(state,env).userPassTransient(conn.whoami()))
+            if (WalkingSecurity.get(state, env).userPassTransient(conn.whoami()))
               return;
           }
           throw new AccumuloException("Unexpected exception!", ae);
         }
-        for (String s : WalkingSecurity.get(state,env).getAuthsArray())
-          WalkingSecurity.get(state,env).increaseAuthMap(s, 1);
+        for (String s : WalkingSecurity.get(state, env).getAuthsArray())
+          WalkingSecurity.get(state, env).increaseAuthMap(s, 1);
         fs.delete(dir, true);
         fs.delete(fail, true);
-        
-        if (!WalkingSecurity.get(state,env).canBulkImport(WalkingSecurity.get(state,env).getTabCredentials(), tableName, namespaceName))
+
+        if (!WalkingSecurity.get(state, env).canBulkImport(WalkingSecurity.get(state, env).getTabCredentials(), tableName, namespaceName))
           throw new AccumuloException("Bulk Import succeeded when it should have failed: " + dir + " table " + tableName);
         break;
       case ALTER_TABLE:
         AlterTable.renameTable(conn, state, env, tableName, tableName + "plus",
-            WalkingSecurity.get(state,env).canAlterTable(WalkingSecurity.get(state,env).getTabCredentials(), tableName, namespaceName), tableExists);
+            WalkingSecurity.get(state, env).canAlterTable(WalkingSecurity.get(state, env).getTabCredentials(), tableName, namespaceName), tableExists);
         break;
-      
+
       case GRANT:
         props.setProperty("task", "grant");
         props.setProperty("perm", "random");
@@ -247,7 +247,7 @@ public class TableOp extends Test {
         props.setProperty("target", "system");
         AlterTablePerm.alter(state, env, props);
         break;
-      
+
       case DROP_TABLE:
         props.setProperty("source", "table");
         DropTable.dropTable(state, env, props);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/security/Validate.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/security/Validate.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/security/Validate.java
index 905327a..d2fc795 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/security/Validate.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/security/Validate.java
@@ -21,8 +21,8 @@ import java.util.Properties;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.security.SecurityErrorCode;
 import org.apache.accumulo.core.client.impl.thrift.ThriftSecurityException;
+import org.apache.accumulo.core.client.security.SecurityErrorCode;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.SystemPermission;
 import org.apache.accumulo.core.security.TablePermission;
@@ -32,34 +32,34 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.log4j.Logger;
 
 public class Validate extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     validate(state, env, log);
   }
-  
+
   public static void validate(State state, Environment env, Logger log) throws Exception {
     Connector conn = env.getConnector();
-    
-    boolean tableExists = WalkingSecurity.get(state,env).getTableExists();
-    boolean cloudTableExists = conn.tableOperations().list().contains(WalkingSecurity.get(state,env).getTableName());
+
+    boolean tableExists = WalkingSecurity.get(state, env).getTableExists();
+    boolean cloudTableExists = conn.tableOperations().list().contains(WalkingSecurity.get(state, env).getTableName());
     if (tableExists != cloudTableExists)
       throw new AccumuloException("Table existance out of sync");
-    
-    boolean tableUserExists = WalkingSecurity.get(state,env).userExists(WalkingSecurity.get(state,env).getTabUserName());
-    boolean cloudTableUserExists = conn.securityOperations().listLocalUsers().contains(WalkingSecurity.get(state,env).getTabUserName());
+
+    boolean tableUserExists = WalkingSecurity.get(state, env).userExists(WalkingSecurity.get(state, env).getTabUserName());
+    boolean cloudTableUserExists = conn.securityOperations().listLocalUsers().contains(WalkingSecurity.get(state, env).getTabUserName());
     if (tableUserExists != cloudTableUserExists)
       throw new AccumuloException("Table User existance out of sync");
-    
+
     Properties props = new Properties();
     props.setProperty("target", "system");
     Authenticate.authenticate(env.getUserName(), env.getToken(), state, env, props);
     props.setProperty("target", "table");
     Authenticate.authenticate(env.getUserName(), env.getToken(), state, env, props);
-    
-    for (String user : new String[] {WalkingSecurity.get(state,env).getSysUserName(), WalkingSecurity.get(state,env).getTabUserName()}) {
+
+    for (String user : new String[] {WalkingSecurity.get(state, env).getSysUserName(), WalkingSecurity.get(state, env).getTabUserName()}) {
       for (SystemPermission sp : SystemPermission.values()) {
-        boolean hasSp = WalkingSecurity.get(state,env).hasSystemPermission(user, sp);
+        boolean hasSp = WalkingSecurity.get(state, env).hasSystemPermission(user, sp);
         boolean accuHasSp;
         try {
           accuHasSp = conn.securityOperations().hasSystemPermission(user, sp);
@@ -76,12 +76,12 @@ public class Validate extends Test {
         if (hasSp != accuHasSp)
           throw new AccumuloException(user + " existance out of sync for system perm " + sp + " hasSp/CloudhasSP " + hasSp + " " + accuHasSp);
       }
-      
+
       for (TablePermission tp : TablePermission.values()) {
-        boolean hasTp = WalkingSecurity.get(state,env).hasTablePermission(user, WalkingSecurity.get(state,env).getTableName(), tp);
+        boolean hasTp = WalkingSecurity.get(state, env).hasTablePermission(user, WalkingSecurity.get(state, env).getTableName(), tp);
         boolean accuHasTp;
         try {
-          accuHasTp = conn.securityOperations().hasTablePermission(user, WalkingSecurity.get(state,env).getTableName(), tp);
+          accuHasTp = conn.securityOperations().hasTablePermission(user, WalkingSecurity.get(state, env).getTableName(), tp);
           log.debug("Just checked to see if user " + user + " has table perm " + tp.name() + " with answer " + accuHasTp);
         } catch (AccumuloSecurityException ae) {
           if (ae.getSecurityErrorCode().equals(SecurityErrorCode.USER_DOESNT_EXIST)) {
@@ -100,14 +100,14 @@ public class Validate extends Test {
         if (hasTp != accuHasTp)
           throw new AccumuloException(user + " existance out of sync for table perm " + tp + " hasTp/CloudhasTP " + hasTp + " " + accuHasTp);
       }
-      
+
     }
-    
+
     Authorizations accuAuths;
     Authorizations auths;
     try {
-      auths = WalkingSecurity.get(state,env).getUserAuthorizations(WalkingSecurity.get(state,env).getTabCredentials());
-      accuAuths = conn.securityOperations().getUserAuthorizations(WalkingSecurity.get(state,env).getTabUserName());
+      auths = WalkingSecurity.get(state, env).getUserAuthorizations(WalkingSecurity.get(state, env).getTabCredentials());
+      accuAuths = conn.securityOperations().getUserAuthorizations(WalkingSecurity.get(state, env).getTabUserName());
     } catch (ThriftSecurityException ae) {
       if (ae.getCode() == org.apache.accumulo.core.client.impl.thrift.SecurityErrorCode.USER_DOESNT_EXIST) {
         if (tableUserExists)
@@ -120,5 +120,5 @@ public class Validate extends Test {
     if (!auths.equals(accuAuths))
       throw new AccumuloException("Table User authorizations out of sync");
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/security/WalkingSecurity.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/security/WalkingSecurity.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/security/WalkingSecurity.java
index 0fb3a82..9d285e0 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/security/WalkingSecurity.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/security/WalkingSecurity.java
@@ -53,7 +53,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.log4j.Logger;
 
 /**
- * 
+ *
  */
 public class WalkingSecurity extends SecurityOperation implements Authorizor, Authenticator, PermissionHandler {
   State state = null;

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/BatchVerify.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/BatchVerify.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/BatchVerify.java
index 6a725c7..8616655 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/BatchVerify.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/BatchVerify.java
@@ -20,9 +20,9 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map.Entry;
 import java.util.Properties;
 import java.util.Random;
-import java.util.Map.Entry;
 
 import org.apache.accumulo.core.client.BatchScanner;
 import org.apache.accumulo.core.client.Connector;
@@ -36,23 +36,23 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.io.Text;
 
 public class BatchVerify extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     Random rand = new Random();
-    
+
     long numWrites = state.getLong("numWrites");
     int maxVerify = Integer.parseInt(props.getProperty("maxVerify", "2000"));
     long numVerify = rand.nextInt(maxVerify - 1) + 1;
-    
+
     if (numVerify > (numWrites / 4)) {
       numVerify = numWrites / 4;
     }
-    
+
     Connector conn = env.getConnector();
     BatchScanner scanner = conn.createBatchScanner(state.getString("seqTableName"), new Authorizations(), 2);
-    
+
     try {
       int count = 0;
       List<Range> ranges = new ArrayList<Range>();
@@ -65,29 +65,29 @@ public class BatchVerify extends Test {
         count += rangeEnd - rangeStart + 1;
         ranges.add(new Range(new Text(String.format("%010d", rangeStart)), new Text(String.format("%010d", rangeEnd))));
       }
-      
+
       ranges = Range.mergeOverlapping(ranges);
       Collections.sort(ranges);
-      
+
       if (count == 0 || ranges.size() == 0)
         return;
-      
+
       log.debug(String.format("scanning %d rows in the following %d ranges:", count, ranges.size()));
       for (Range r : ranges) {
         log.debug(r);
       }
-      
+
       scanner.setRanges(ranges);
-      
+
       List<Key> keys = new ArrayList<Key>();
       for (Entry<Key,Value> entry : scanner) {
         keys.add(entry.getKey());
       }
-      
+
       log.debug("scan returned " + keys.size() + " rows. now verifying...");
-      
+
       Collections.sort(keys);
-      
+
       Iterator<Key> iterator = keys.iterator();
       int curKey = Integer.parseInt(iterator.next().getRow().toString());
       boolean done = false;
@@ -95,12 +95,12 @@ public class BatchVerify extends Test {
         int start = Integer.parseInt(r.getStartKey().getRow().toString());
         int end = Integer.parseInt(String.copyValueOf(r.getEndKey().getRow().toString().toCharArray(), 0, 10));
         for (int i = start; i <= end; i++) {
-          
+
           if (done) {
             log.error("missing key " + i);
             break;
           }
-          
+
           while (curKey < i) {
             log.error("extra key " + curKey);
             if (iterator.hasNext() == false) {
@@ -109,11 +109,11 @@ public class BatchVerify extends Test {
             }
             curKey = Integer.parseInt(iterator.next().getRow().toString());
           }
-          
+
           if (curKey > i) {
             log.error("missing key " + i);
           }
-          
+
           if (iterator.hasNext()) {
             curKey = Integer.parseInt(iterator.next().getRow().toString());
           } else {
@@ -121,7 +121,7 @@ public class BatchVerify extends Test {
           }
         }
       }
-      
+
       log.debug("verify is now complete");
     } finally {
       scanner.close();

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Commit.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Commit.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Commit.java
index 7d5102e..84adae1 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Commit.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Commit.java
@@ -23,14 +23,14 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Commit extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     env.getMultiTableBatchWriter().flush();
-    
+
     log.debug("Committed " + state.getLong("numWrites") + " writes.  Total writes: " + state.getLong("totalWrites"));
     state.set("numWrites", Long.valueOf(0));
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerify.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerify.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerify.java
index 4337799..95c1d0b 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerify.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerify.java
@@ -16,8 +16,8 @@
  */
 package org.apache.accumulo.test.randomwalk.sequential;
 
-import java.util.Properties;
 import java.util.Map.Entry;
+import java.util.Properties;
 
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Scanner;
@@ -32,10 +32,10 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.util.ToolRunner;
 
 public class MapRedVerify extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     String[] args = new String[8];
     args[0] = "-libjars";
     args[1] = getMapReduceJars();
@@ -45,15 +45,15 @@ public class MapRedVerify extends Test {
     args[5] = env.getInstance().getInstanceName();
     args[6] = env.getConfigProperty("ZOOKEEPERS");
     args[7] = args[4] + "_MR";
-    
+
     if (ToolRunner.run(CachedConfiguration.getInstance(), new MapRedVerifyTool(), args) != 0) {
       log.error("Failed to run map/red verify");
       return;
     }
-    
+
     Scanner outputScanner = env.getConnector().createScanner(args[7], Authorizations.EMPTY);
     outputScanner.setRange(new Range());
-    
+
     int count = 0;
     Key lastKey = null;
     for (Entry<Key,Value> entry : outputScanner) {
@@ -64,11 +64,11 @@ public class MapRedVerify extends Test {
       }
       lastKey = current;
     }
-    
+
     if (count > 1) {
       log.error("Gaps in output");
     }
-    
+
     log.debug("Dropping table: " + args[7]);
     Connector conn = env.getConnector();
     conn.tableOperations().delete(args[7]);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerifyTool.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerifyTool.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerifyTool.java
index 3a078ef..47b671c 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerifyTool.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/MapRedVerifyTool.java
@@ -38,7 +38,7 @@ import org.apache.log4j.Logger;
 
 public class MapRedVerifyTool extends Configured implements Tool {
   protected final Logger log = Logger.getLogger(this.getClass());
-  
+
   public static class SeqMapClass extends Mapper<Key,Value,NullWritable,IntWritable> {
     @Override
     public void map(Key row, Value data, Context output) throws IOException, InterruptedException {
@@ -46,16 +46,16 @@ public class MapRedVerifyTool extends Configured implements Tool {
       output.write(NullWritable.get(), new IntWritable(num.intValue()));
     }
   }
-  
+
   public static class SeqReduceClass extends Reducer<NullWritable,IntWritable,Text,Mutation> {
     @Override
     public void reduce(NullWritable ignore, Iterable<IntWritable> values, Context output) throws IOException, InterruptedException {
       Iterator<IntWritable> iterator = values.iterator();
-      
+
       if (iterator.hasNext() == false) {
         return;
       }
-      
+
       int start = iterator.next().get();
       int index = start;
       while (iterator.hasNext()) {
@@ -68,45 +68,45 @@ public class MapRedVerifyTool extends Configured implements Tool {
       }
       writeMutation(output, start, index);
     }
-    
+
     public void writeMutation(Context output, int start, int end) throws IOException, InterruptedException {
       Mutation m = new Mutation(new Text(String.format("%010d", start)));
       m.put(new Text(String.format("%010d", end)), new Text(""), new Value(new byte[0]));
       output.write(null, m);
     }
   }
-  
+
   @Override
   public int run(String[] args) throws Exception {
     @SuppressWarnings("deprecation")
     Job job = new Job(getConf(), this.getClass().getSimpleName());
     job.setJarByClass(this.getClass());
-    
+
     if (job.getJar() == null) {
       log.error("M/R requires a jar file!  Run mvn package.");
       return 1;
     }
-    
+
     ClientConfiguration clientConf = new ClientConfiguration().withInstance(args[3]).withZkHosts(args[4]);
 
     job.setInputFormatClass(AccumuloInputFormat.class);
     AccumuloInputFormat.setConnectorInfo(job, args[0], new PasswordToken(args[1]));
     AccumuloInputFormat.setInputTableName(job, args[2]);
     AccumuloInputFormat.setZooKeeperInstance(job, clientConf);
-    
+
     job.setMapperClass(SeqMapClass.class);
     job.setMapOutputKeyClass(NullWritable.class);
     job.setMapOutputValueClass(IntWritable.class);
-    
+
     job.setReducerClass(SeqReduceClass.class);
     job.setNumReduceTasks(1);
-    
+
     job.setOutputFormatClass(AccumuloOutputFormat.class);
     AccumuloOutputFormat.setConnectorInfo(job, args[0], new PasswordToken(args[1]));
     AccumuloOutputFormat.setCreateTables(job, true);
     AccumuloOutputFormat.setDefaultTableName(job, args[5]);
     AccumuloOutputFormat.setZooKeeperInstance(job, clientConf);
-    
+
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;
   }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/SequentialFixture.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/SequentialFixture.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/SequentialFixture.java
index c2320a7..9565cc5 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/SequentialFixture.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/SequentialFixture.java
@@ -24,25 +24,25 @@ import org.apache.accumulo.core.client.MultiTableBatchWriter;
 import org.apache.accumulo.core.client.MutationsRejectedException;
 import org.apache.accumulo.core.client.TableExistsException;
 import org.apache.accumulo.core.client.impl.Tables;
-import org.apache.accumulo.test.randomwalk.Fixture;
 import org.apache.accumulo.test.randomwalk.Environment;
+import org.apache.accumulo.test.randomwalk.Fixture;
 import org.apache.accumulo.test.randomwalk.State;
 
 public class SequentialFixture extends Fixture {
-  
+
   String seqTableName;
-  
+
   @Override
   public void setUp(State state, Environment env) throws Exception {
-    
+
     Connector conn = env.getConnector();
     Instance instance = env.getInstance();
-    
+
     String hostname = InetAddress.getLocalHost().getHostName().replaceAll("[-.]", "_");
-    
+
     seqTableName = String.format("sequential_%s_%s_%d", hostname, env.getPid(), System.currentTimeMillis());
     state.set("seqTableName", seqTableName);
-    
+
     try {
       conn.tableOperations().create(seqTableName);
       log.debug("Created table " + seqTableName + " (id:" + Tables.getNameToIdMap(instance).get(seqTableName) + ")");
@@ -51,11 +51,11 @@ public class SequentialFixture extends Fixture {
       throw e;
     }
     conn.tableOperations().setProperty(seqTableName, "table.scan.max.memory", "1K");
-    
+
     state.set("numWrites", Long.valueOf(0));
     state.set("totalWrites", Long.valueOf(0));
   }
-  
+
   @Override
   public void tearDown(State state, Environment env) throws Exception {
     // We have resources we need to clean up
@@ -66,15 +66,15 @@ public class SequentialFixture extends Fixture {
       } catch (MutationsRejectedException e) {
         log.error("Ignoring mutations that weren't flushed", e);
       }
-      
+
       // Reset the MTBW on the state to null
       env.resetMultiTableBatchWriter();
     }
-    
+
     log.debug("Dropping tables: " + seqTableName);
-    
+
     Connector conn = env.getConnector();
-    
+
     conn.tableOperations().delete(seqTableName);
   }
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Write.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Write.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Write.java
index 1795434..5398d99 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Write.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/sequential/Write.java
@@ -29,20 +29,20 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.io.Text;
 
 public class Write extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     BatchWriter bw = env.getMultiTableBatchWriter().getBatchWriter(state.getString("seqTableName"));
-    
+
     state.set("numWrites", state.getLong("numWrites") + 1);
-    
+
     Long totalWrites = state.getLong("totalWrites") + 1;
     if (totalWrites % 10000 == 0) {
       log.debug("Total writes: " + totalWrites);
     }
     state.set("totalWrites", totalWrites);
-    
+
     Mutation m = new Mutation(new Text(String.format("%010d", totalWrites)));
     m.put(new Text("cf"), new Text("cq"), new Value("val".getBytes(UTF_8)));
     bw.addMutation(m);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/BulkInsert.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/BulkInsert.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/BulkInsert.java
index 5959105..1074e5a 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/BulkInsert.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/BulkInsert.java
@@ -49,23 +49,23 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.ToolRunner;
 
 public class BulkInsert extends Test {
-  
+
   class SeqfileBatchWriter implements BatchWriter {
-    
+
     SequenceFile.Writer writer;
-    
+
     @SuppressWarnings("deprecation")
     SeqfileBatchWriter(Configuration conf, FileSystem fs, String file) throws IOException {
       writer = new SequenceFile.Writer(fs, conf, new Path(file), Key.class, Value.class);
     }
-    
+
     @Override
     public void addMutation(Mutation m) throws MutationsRejectedException {
       List<ColumnUpdate> updates = m.getUpdates();
       for (ColumnUpdate cu : updates) {
         Key key = new Key(m.getRow(), cu.getColumnFamily(), cu.getColumnQualifier(), cu.getColumnVisibility(), Long.MAX_VALUE, false, false);
         Value val = new Value(cu.getValue(), false);
-        
+
         try {
           writer.append(key, val);
         } catch (IOException e) {
@@ -73,16 +73,16 @@ public class BulkInsert extends Test {
         }
       }
     }
-    
+
     @Override
     public void addMutations(Iterable<Mutation> iterable) throws MutationsRejectedException {
       for (Mutation mutation : iterable)
         addMutation(mutation);
     }
-    
+
     @Override
     public void flush() throws MutationsRejectedException {}
-    
+
     @Override
     public void close() throws MutationsRejectedException {
       try {
@@ -91,53 +91,53 @@ public class BulkInsert extends Test {
         throw new RuntimeException(e);
       }
     }
-    
+
   }
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     String indexTableName = (String) state.get("indexTableName");
     String dataTableName = (String) state.get("docTableName");
     int numPartitions = (Integer) state.get("numPartitions");
     Random rand = (Random) state.get("rand");
     long nextDocID = (Long) state.get("nextDocID");
-    
+
     int minInsert = Integer.parseInt(props.getProperty("minInsert"));
     int maxInsert = Integer.parseInt(props.getProperty("maxInsert"));
     int numToInsert = rand.nextInt(maxInsert - minInsert) + minInsert;
-    
+
     int maxSplits = Integer.parseInt(props.getProperty("maxSplits"));
-    
+
     Configuration conf = CachedConfiguration.getInstance();
     FileSystem fs = FileSystem.get(conf);
-    
+
     String rootDir = "/tmp/shard_bulk/" + dataTableName;
-    
+
     fs.mkdirs(new Path(rootDir));
-    
+
     BatchWriter dataWriter = new SeqfileBatchWriter(conf, fs, rootDir + "/data.seq");
     BatchWriter indexWriter = new SeqfileBatchWriter(conf, fs, rootDir + "/index.seq");
-    
+
     for (int i = 0; i < numToInsert; i++) {
       String docID = Insert.insertRandomDocument(nextDocID++, dataWriter, indexWriter, indexTableName, dataTableName, numPartitions, rand);
       log.debug("Bulk inserting document " + docID);
     }
-    
+
     state.set("nextDocID", Long.valueOf(nextDocID));
-    
+
     dataWriter.close();
     indexWriter.close();
-    
+
     sort(state, env, fs, dataTableName, rootDir + "/data.seq", rootDir + "/data_bulk", rootDir + "/data_work", maxSplits);
     sort(state, env, fs, indexTableName, rootDir + "/index.seq", rootDir + "/index_bulk", rootDir + "/index_work", maxSplits);
-    
+
     bulkImport(fs, state, env, dataTableName, rootDir, "data");
     bulkImport(fs, state, env, indexTableName, rootDir, "index");
-    
+
     fs.delete(new Path(rootDir), true);
   }
-  
+
   private void bulkImport(FileSystem fs, State state, Environment env, String tableName, String rootDir, String prefix) throws Exception {
     while (true) {
       String bulkDir = rootDir + "/" + prefix + "_bulk";
@@ -146,11 +146,11 @@ public class BulkInsert extends Test {
       fs.delete(failPath, true);
       fs.mkdirs(failPath);
       env.getConnector().tableOperations().importDirectory(tableName, bulkDir, failDir, true);
-      
+
       FileStatus[] failures = fs.listStatus(failPath);
       if (failures != null && failures.length > 0) {
         log.warn("Failed to bulk import some files, retrying ");
-        
+
         for (FileStatus failure : failures) {
           if (!failure.getPath().getName().endsWith(".seq"))
             fs.rename(failure.getPath(), new Path(new Path(bulkDir), failure.getPath().getName()));
@@ -162,28 +162,29 @@ public class BulkInsert extends Test {
         break;
     }
   }
-  
-  private void sort(State state, Environment env, FileSystem fs, String tableName, String seqFile, String outputDir, String workDir, int maxSplits) throws Exception {
-    
+
+  private void sort(State state, Environment env, FileSystem fs, String tableName, String seqFile, String outputDir, String workDir, int maxSplits)
+      throws Exception {
+
     PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(new Path(workDir + "/splits.txt"))), false, UTF_8.name());
-    
+
     Connector conn = env.getConnector();
-    
+
     Collection<Text> splits = conn.tableOperations().listSplits(tableName, maxSplits);
     for (Text split : splits)
       out.println(Base64.encodeBase64String(TextUtil.getBytes(split)));
-    
+
     out.close();
-    
+
     SortTool sortTool = new SortTool(seqFile, outputDir, workDir + "/splits.txt", splits);
-    
+
     String[] args = new String[2];
     args[0] = "-libjars";
     args[1] = getMapReduceJars();
-    
+
     if (ToolRunner.run(CachedConfiguration.getInstance(), sortTool, args) != 0) {
       throw new Exception("Failed to run map/red verify");
     }
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CloneIndex.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CloneIndex.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CloneIndex.java
index 77f34bc..0d3d38e 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CloneIndex.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CloneIndex.java
@@ -25,21 +25,21 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class CloneIndex extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     String indexTableName = (String) state.get("indexTableName");
     String tmpIndexTableName = indexTableName + "_tmp";
-    
+
     long t1 = System.currentTimeMillis();
     env.getConnector().tableOperations().flush(indexTableName, null, null, true);
     long t2 = System.currentTimeMillis();
     env.getConnector().tableOperations().clone(indexTableName, tmpIndexTableName, false, new HashMap<String,String>(), new HashSet<String>());
     long t3 = System.currentTimeMillis();
-    
+
     log.debug("Cloned " + tmpIndexTableName + " from " + indexTableName + " flush: " + (t2 - t1) + "ms clone: " + (t3 - t2) + "ms");
-    
+
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Commit.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Commit.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Commit.java
index 8f928c7..d3f9ed7 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Commit.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Commit.java
@@ -23,11 +23,11 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Commit extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     env.getMultiTableBatchWriter().flush();
     log.debug("Committed inserts ");
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CompactFilter.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CompactFilter.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CompactFilter.java
index 96c6c62..a73b311 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CompactFilter.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/CompactFilter.java
@@ -39,18 +39,18 @@ import org.apache.hadoop.io.Text;
  * Test deleting documents by using a compaction filter iterator
  */
 public class CompactFilter extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     String docTableName = (String) state.get("docTableName");
     Random rand = (Random) state.get("rand");
-    
+
     String deleteChar = Integer.toHexString(rand.nextInt(16)) + "";
     String regex = "^[0-9a-f][" + deleteChar + "].*";
 
     ArrayList<IteratorSetting> documentFilters = new ArrayList<IteratorSetting>();
-    
+
     IteratorSetting is = new IteratorSetting(21, "ii", RegExFilter.class);
     RegExFilter.setRegexs(is, regex, null, null, null, false);
     RegExFilter.setNegate(is, true);
@@ -60,35 +60,35 @@ public class CompactFilter extends Test {
     env.getConnector().tableOperations().compact(docTableName, null, null, documentFilters, true, true);
     long t2 = System.currentTimeMillis();
     long t3 = t2 - t1;
-    
+
     ArrayList<IteratorSetting> indexFilters = new ArrayList<IteratorSetting>();
-    
+
     is = new IteratorSetting(21, RegExFilter.class);
     RegExFilter.setRegexs(is, null, null, regex, null, false);
     RegExFilter.setNegate(is, true);
     indexFilters.add(is);
-    
+
     t1 = System.currentTimeMillis();
     env.getConnector().tableOperations().compact(indexTableName, null, null, indexFilters, true, true);
     t2 = System.currentTimeMillis();
-    
+
     log.debug("Filtered documents using compaction iterators " + regex + " " + (t3) + " " + (t2 - t1));
-    
+
     BatchScanner bscanner = env.getConnector().createBatchScanner(docTableName, new Authorizations(), 10);
-    
+
     List<Range> ranges = new ArrayList<Range>();
     for (int i = 0; i < 16; i++) {
       ranges.add(Range.prefix(new Text(Integer.toHexString(i) + "" + deleteChar)));
     }
-    
+
     bscanner.setRanges(ranges);
     Iterator<Entry<Key,Value>> iter = bscanner.iterator();
-    
+
     if (iter.hasNext()) {
       throw new Exception("Saw unexpected document " + iter.next().getKey());
     }
 
     bscanner.close();
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Delete.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Delete.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Delete.java
index 4931af5..4cc0304 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Delete.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Delete.java
@@ -28,31 +28,31 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Delete extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     String dataTableName = (String) state.get("docTableName");
     int numPartitions = (Integer) state.get("numPartitions");
     Random rand = (Random) state.get("rand");
-    
+
     Entry<Key,Value> entry = Search.findRandomDocument(state, env, dataTableName, rand);
     if (entry == null)
       return;
-    
+
     String docID = entry.getKey().getRow().toString();
     String doc = entry.getValue().toString();
-    
+
     Insert.unindexDocument(env.getMultiTableBatchWriter().getBatchWriter(indexTableName), doc, docID, numPartitions);
-    
+
     Mutation m = new Mutation(docID);
     m.putDelete("doc", "");
-    
+
     env.getMultiTableBatchWriter().getBatchWriter(dataTableName).addMutation(m);
-    
+
     log.debug("Deleted document " + docID);
-    
+
     env.getMultiTableBatchWriter().flush();
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteSomeDocs.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteSomeDocs.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteSomeDocs.java
index ed0a458..f7fb1df 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteSomeDocs.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteSomeDocs.java
@@ -33,48 +33,48 @@ import org.apache.accumulo.test.randomwalk.Test;
 
 //a test created to test the batch deleter
 public class DeleteSomeDocs extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     // delete documents that where the document id matches a given pattern from doc and index table
     // using the batch deleter
-    
+
     Random rand = (Random) state.get("rand");
     String indexTableName = (String) state.get("indexTableName");
     String dataTableName = (String) state.get("docTableName");
-    
+
     ArrayList<String> patterns = new ArrayList<String>();
-    
+
     for (Object key : props.keySet())
       if (key instanceof String && ((String) key).startsWith("pattern"))
         patterns.add(props.getProperty((String) key));
-    
+
     String pattern = patterns.get(rand.nextInt(patterns.size()));
     BatchWriterConfig bwc = new BatchWriterConfig();
     BatchDeleter ibd = env.getConnector().createBatchDeleter(indexTableName, Authorizations.EMPTY, 8, bwc);
     ibd.setRanges(Collections.singletonList(new Range()));
-    
+
     IteratorSetting iterSettings = new IteratorSetting(100, RegExFilter.class);
     RegExFilter.setRegexs(iterSettings, null, null, pattern, null, false);
-    
+
     ibd.addScanIterator(iterSettings);
-    
+
     ibd.delete();
-    
+
     ibd.close();
-    
+
     BatchDeleter dbd = env.getConnector().createBatchDeleter(dataTableName, Authorizations.EMPTY, 8, bwc);
     dbd.setRanges(Collections.singletonList(new Range()));
-    
+
     iterSettings = new IteratorSetting(100, RegExFilter.class);
     RegExFilter.setRegexs(iterSettings, pattern, null, null, null, false);
-    
+
     dbd.addScanIterator(iterSettings);
-    
+
     dbd.delete();
-    
+
     dbd.close();
-    
+
     log.debug("Deleted documents w/ id matching '" + pattern + "'");
   }
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteWord.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteWord.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteWord.java
index 10bbff3..28b1899 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteWord.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/DeleteWord.java
@@ -17,8 +17,8 @@
 package org.apache.accumulo.test.randomwalk.shard;
 
 import java.util.ArrayList;
-import java.util.Properties;
 import java.util.Map.Entry;
+import java.util.Properties;
 import java.util.Random;
 
 import org.apache.accumulo.core.client.BatchScanner;
@@ -36,62 +36,62 @@ import org.apache.hadoop.io.Text;
 
 /**
  * Delete all documents containing a particular word.
- * 
+ *
  */
 
 public class DeleteWord extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     String docTableName = (String) state.get("docTableName");
     int numPartitions = (Integer) state.get("numPartitions");
     Random rand = (Random) state.get("rand");
-    
+
     String wordToDelete = Insert.generateRandomWord(rand);
-    
+
     // use index to find all documents containing word
     Scanner scanner = env.getConnector().createScanner(indexTableName, Authorizations.EMPTY);
     scanner.fetchColumnFamily(new Text(wordToDelete));
-    
+
     ArrayList<Range> documentsToDelete = new ArrayList<Range>();
-    
+
     for (Entry<Key,Value> entry : scanner)
       documentsToDelete.add(new Range(entry.getKey().getColumnQualifier()));
-    
+
     if (documentsToDelete.size() > 0) {
       // use a batch scanner to fetch all documents
       BatchScanner bscanner = env.getConnector().createBatchScanner(docTableName, Authorizations.EMPTY, 8);
       bscanner.setRanges(documentsToDelete);
-      
+
       BatchWriter ibw = env.getMultiTableBatchWriter().getBatchWriter(indexTableName);
       BatchWriter dbw = env.getMultiTableBatchWriter().getBatchWriter(docTableName);
-      
+
       int count = 0;
-      
+
       for (Entry<Key,Value> entry : bscanner) {
         String docID = entry.getKey().getRow().toString();
         String doc = entry.getValue().toString();
-        
+
         Insert.unindexDocument(ibw, doc, docID, numPartitions);
-        
+
         Mutation m = new Mutation(docID);
         m.putDelete("doc", "");
-        
+
         dbw.addMutation(m);
         count++;
       }
-      
+
       bscanner.close();
-      
+
       env.getMultiTableBatchWriter().flush();
-      
+
       if (count != documentsToDelete.size()) {
         throw new Exception("Batch scanner did not return expected number of docs " + count + " " + documentsToDelete.size());
       }
     }
-    
+
     log.debug("Deleted " + documentsToDelete.size() + " documents containing " + wordToDelete);
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ExportIndex.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ExportIndex.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ExportIndex.java
index f5ede55..ca273ac 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ExportIndex.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ExportIndex.java
@@ -36,36 +36,36 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 
 /**
- * 
+ *
  */
 public class ExportIndex extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     String indexTableName = (String) state.get("indexTableName");
     String tmpIndexTableName = indexTableName + "_tmp";
-    
+
     String exportDir = "/tmp/shard_export/" + indexTableName;
     String copyDir = "/tmp/shard_export/" + tmpIndexTableName;
-    
+
     FileSystem fs = FileSystem.get(CachedConfiguration.getInstance());
-    
+
     fs.delete(new Path("/tmp/shard_export/" + indexTableName), true);
     fs.delete(new Path("/tmp/shard_export/" + tmpIndexTableName), true);
-    
+
     // disable spits, so that splits can be compared later w/o worrying one table splitting and the other not
     env.getConnector().tableOperations().setProperty(indexTableName, Property.TABLE_SPLIT_THRESHOLD.getKey(), "20G");
 
     long t1 = System.currentTimeMillis();
-    
+
     env.getConnector().tableOperations().flush(indexTableName, null, null, true);
     env.getConnector().tableOperations().offline(indexTableName);
-    
+
     long t2 = System.currentTimeMillis();
 
     env.getConnector().tableOperations().exportTable(indexTableName, exportDir);
-    
+
     long t3 = System.currentTimeMillis();
 
     // copy files
@@ -78,31 +78,31 @@ public class ExportIndex extends Test {
     }
 
     reader.close();
-    
+
     long t4 = System.currentTimeMillis();
-    
+
     env.getConnector().tableOperations().online(indexTableName);
     env.getConnector().tableOperations().importTable(tmpIndexTableName, copyDir);
-    
+
     long t5 = System.currentTimeMillis();
 
     fs.delete(new Path(exportDir), true);
     fs.delete(new Path(copyDir), true);
-    
+
     HashSet<Text> splits1 = new HashSet<Text>(env.getConnector().tableOperations().listSplits(indexTableName));
     HashSet<Text> splits2 = new HashSet<Text>(env.getConnector().tableOperations().listSplits(tmpIndexTableName));
-    
+
     if (!splits1.equals(splits2))
       throw new Exception("Splits not equals " + indexTableName + " " + tmpIndexTableName);
-    
+
     HashMap<String,String> props1 = new HashMap<String,String>();
     for (Entry<String,String> entry : env.getConnector().tableOperations().getProperties(indexTableName))
       props1.put(entry.getKey(), entry.getValue());
-    
+
     HashMap<String,String> props2 = new HashMap<String,String>();
     for (Entry<String,String> entry : env.getConnector().tableOperations().getProperties(tmpIndexTableName))
       props2.put(entry.getKey(), entry.getValue());
-    
+
     if (!props1.equals(props2))
       throw new Exception("Props not equals " + indexTableName + " " + tmpIndexTableName);
 
@@ -112,7 +112,7 @@ public class ExportIndex extends Test {
 
     log.debug("Imported " + tmpIndexTableName + " from " + indexTableName + " flush: " + (t2 - t1) + "ms export: " + (t3 - t2) + "ms copy:" + (t4 - t3)
         + "ms import:" + (t5 - t4) + "ms");
-    
+
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Flush.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Flush.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Flush.java
index 5bd4354..39d32c4 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Flush.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Flush.java
@@ -24,22 +24,22 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Flush extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     String dataTableName = (String) state.get("docTableName");
     Random rand = (Random) state.get("rand");
-    
+
     String table;
-    
+
     if (rand.nextDouble() < .5)
       table = indexTableName;
     else
       table = dataTableName;
-    
+
     env.getConnector().tableOperations().flush(table, null, null, true);
     log.debug("Flushed " + table);
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Grep.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Grep.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Grep.java
index 66d53d8..d5c5e5d 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Grep.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Grep.java
@@ -37,61 +37,61 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.io.Text;
 
 public class Grep extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     // pick a few randoms words... grep for those words and search the index
     // ensure both return the same set of documents
-    
+
     String indexTableName = (String) state.get("indexTableName");
     String dataTableName = (String) state.get("docTableName");
     Random rand = (Random) state.get("rand");
-    
+
     Text words[] = new Text[rand.nextInt(4) + 2];
-    
+
     for (int i = 0; i < words.length; i++) {
       words[i] = new Text(Insert.generateRandomWord(rand));
     }
-    
+
     BatchScanner bs = env.getConnector().createBatchScanner(indexTableName, Authorizations.EMPTY, 16);
     IteratorSetting ii = new IteratorSetting(20, "ii", IntersectingIterator.class.getName());
     IntersectingIterator.setColumnFamilies(ii, words);
     bs.addScanIterator(ii);
     bs.setRanges(Collections.singleton(new Range()));
-    
+
     HashSet<Text> documentsFoundInIndex = new HashSet<Text>();
-    
+
     for (Entry<Key,Value> entry2 : bs) {
       documentsFoundInIndex.add(entry2.getKey().getColumnQualifier());
     }
-    
+
     bs.close();
-    
+
     bs = env.getConnector().createBatchScanner(dataTableName, Authorizations.EMPTY, 16);
-    
+
     for (int i = 0; i < words.length; i++) {
       IteratorSetting more = new IteratorSetting(20 + i, "ii" + i, RegExFilter.class);
       RegExFilter.setRegexs(more, null, null, null, "(^|(.*\\s))" + words[i] + "($|(\\s.*))", false);
       bs.addScanIterator(more);
     }
-    
+
     bs.setRanges(Collections.singleton(new Range()));
-    
+
     HashSet<Text> documentsFoundByGrep = new HashSet<Text>();
-    
+
     for (Entry<Key,Value> entry2 : bs) {
       documentsFoundByGrep.add(entry2.getKey().getRow());
     }
-    
+
     bs.close();
-    
+
     if (!documentsFoundInIndex.equals(documentsFoundByGrep)) {
       throw new Exception("Set of documents found not equal for words " + Arrays.asList(words).toString() + " " + documentsFoundInIndex + " "
           + documentsFoundByGrep);
     }
-    
+
     log.debug("Grep and index agree " + Arrays.asList(words).toString() + " " + documentsFoundInIndex.size());
-    
+
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Insert.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Insert.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Insert.java
index 71d355b..4fdbbb9 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Insert.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Insert.java
@@ -31,11 +31,11 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Insert extends Test {
-  
+
   static final int NUM_WORDS = 100000;
   static final int MIN_WORDS_PER_DOC = 10;
   static final int MAX_WORDS_PER_DOC = 3000;
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
@@ -43,83 +43,83 @@ public class Insert extends Test {
     int numPartitions = (Integer) state.get("numPartitions");
     Random rand = (Random) state.get("rand");
     long nextDocID = (Long) state.get("nextDocID");
-    
+
     BatchWriter dataWriter = env.getMultiTableBatchWriter().getBatchWriter(dataTableName);
     BatchWriter indexWriter = env.getMultiTableBatchWriter().getBatchWriter(indexTableName);
-    
+
     String docID = insertRandomDocument(nextDocID++, dataWriter, indexWriter, indexTableName, dataTableName, numPartitions, rand);
-    
+
     log.debug("Inserted document " + docID);
-    
+
     state.set("nextDocID", Long.valueOf(nextDocID));
   }
-  
+
   static String insertRandomDocument(long did, BatchWriter dataWriter, BatchWriter indexWriter, String indexTableName, String dataTableName, int numPartitions,
       Random rand) throws TableNotFoundException, Exception, AccumuloException, AccumuloSecurityException {
     String doc = createDocument(rand);
-    
+
     String docID = new StringBuilder(String.format("%016x", did)).reverse().toString();
-    
+
     saveDocument(dataWriter, docID, doc);
     indexDocument(indexWriter, doc, docID, numPartitions);
-    
+
     return docID;
   }
-  
+
   static void saveDocument(BatchWriter bw, String docID, String doc) throws Exception {
-    
+
     Mutation m = new Mutation(docID);
     m.put("doc", "", doc);
-    
+
     bw.addMutation(m);
   }
-  
+
   static String createDocument(Random rand) {
     StringBuilder sb = new StringBuilder();
-    
+
     int numWords = rand.nextInt(MAX_WORDS_PER_DOC - MIN_WORDS_PER_DOC) + MIN_WORDS_PER_DOC;
-    
+
     for (int i = 0; i < numWords; i++) {
       String word = generateRandomWord(rand);
-      
+
       if (i > 0)
         sb.append(" ");
-      
+
       sb.append(word);
     }
-    
+
     return sb.toString();
   }
-  
+
   static String generateRandomWord(Random rand) {
     return Integer.toString(rand.nextInt(NUM_WORDS), Character.MAX_RADIX);
   }
-  
+
   static String genPartition(int partition) {
     return String.format("%06x", Math.abs(partition));
   }
-  
+
   static void indexDocument(BatchWriter bw, String doc, String docId, int numPartitions) throws Exception {
     indexDocument(bw, doc, docId, numPartitions, false);
   }
-  
+
   static void unindexDocument(BatchWriter bw, String doc, String docId, int numPartitions) throws Exception {
     indexDocument(bw, doc, docId, numPartitions, true);
   }
-  
+
   static void indexDocument(BatchWriter bw, String doc, String docId, int numPartitions, boolean delete) throws Exception {
-    
+
     String[] tokens = doc.split("\\W+");
-    
+
     String partition = genPartition(doc.hashCode() % numPartitions);
-    
+
     Mutation m = new Mutation(partition);
-    
+
     HashSet<String> tokensSeen = new HashSet<String>();
-    
+
     for (String token : tokens) {
       token = token.toLowerCase();
-      
+
       if (!tokensSeen.contains(token)) {
         tokensSeen.add(token);
         if (delete)
@@ -128,9 +128,9 @@ public class Insert extends Test {
           m.put(token, docId, new Value(new byte[0]));
       }
     }
-    
+
     if (m.size() > 0)
       bw.addMutation(m);
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Merge.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Merge.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Merge.java
index 0c4fa3f..36a70f6 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Merge.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Merge.java
@@ -27,11 +27,11 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.io.Text;
 
 public class Merge extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
-    
+
     Collection<Text> splits = env.getConnector().tableOperations().listSplits(indexTableName);
     SortedSet<Text> splitSet = new TreeSet<Text>(splits);
     log.debug("merging " + indexTableName);
@@ -45,5 +45,5 @@ public class Merge extends Test {
       throw new Exception("There are more tablets after a merge: " + splits.size() + " was " + splitSet.size());
     }
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Reindex.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Reindex.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Reindex.java
index de26b76..5aed6e3 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Reindex.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Reindex.java
@@ -31,36 +31,36 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Reindex extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     String tmpIndexTableName = indexTableName + "_tmp";
     String docTableName = (String) state.get("docTableName");
     int numPartitions = (Integer) state.get("numPartitions");
-    
+
     Random rand = (Random) state.get("rand");
-    
+
     ShardFixture.createIndexTable(this.log, state, env, "_tmp", rand);
-    
+
     Scanner scanner = env.getConnector().createScanner(docTableName, Authorizations.EMPTY);
     BatchWriter tbw = env.getConnector().createBatchWriter(tmpIndexTableName, new BatchWriterConfig());
-    
+
     int count = 0;
-    
+
     for (Entry<Key,Value> entry : scanner) {
       String docID = entry.getKey().getRow().toString();
       String doc = entry.getValue().toString();
-      
+
       Insert.indexDocument(tbw, doc, docID, numPartitions);
-      
+
       count++;
     }
-    
+
     tbw.close();
-    
+
     log.debug("Reindexed " + count + " documents into " + tmpIndexTableName);
-    
+
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Search.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Search.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Search.java
index c8888a1..899ec42 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Search.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Search.java
@@ -19,8 +19,8 @@ package org.apache.accumulo.test.randomwalk.shard;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Iterator;
-import java.util.Properties;
 import java.util.Map.Entry;
+import java.util.Properties;
 import java.util.Random;
 
 import org.apache.accumulo.core.client.BatchScanner;
@@ -37,69 +37,69 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.io.Text;
 
 public class Search extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     String dataTableName = (String) state.get("docTableName");
-    
+
     Random rand = (Random) state.get("rand");
-    
+
     Entry<Key,Value> entry = findRandomDocument(state, env, dataTableName, rand);
     if (entry == null)
       return;
-    
+
     Text docID = entry.getKey().getRow();
     String doc = entry.getValue().toString();
-    
+
     String[] tokens = doc.split("\\W+");
     int numSearchTerms = rand.nextInt(6);
     if (numSearchTerms < 2)
       numSearchTerms = 2;
-    
+
     HashSet<String> searchTerms = new HashSet<String>();
     while (searchTerms.size() < numSearchTerms)
       searchTerms.add(tokens[rand.nextInt(tokens.length)]);
-    
+
     Text columns[] = new Text[searchTerms.size()];
     int index = 0;
     for (String term : searchTerms) {
       columns[index++] = new Text(term);
     }
-    
+
     log.debug("Looking up terms " + searchTerms + " expect to find " + docID);
-    
+
     BatchScanner bs = env.getConnector().createBatchScanner(indexTableName, Authorizations.EMPTY, 10);
     IteratorSetting ii = new IteratorSetting(20, "ii", IntersectingIterator.class);
     IntersectingIterator.setColumnFamilies(ii, columns);
     bs.addScanIterator(ii);
     bs.setRanges(Collections.singleton(new Range()));
-    
+
     boolean sawDocID = false;
-    
+
     for (Entry<Key,Value> entry2 : bs) {
       if (entry2.getKey().getColumnQualifier().equals(docID)) {
         sawDocID = true;
         break;
       }
     }
-    
+
     bs.close();
-    
+
     if (!sawDocID)
       throw new Exception("Did not see doc " + docID + " in index.  terms:" + searchTerms + " " + indexTableName + " " + dataTableName);
   }
-  
+
   static Entry<Key,Value> findRandomDocument(State state, Environment env, String dataTableName, Random rand) throws Exception {
     Scanner scanner = env.getConnector().createScanner(dataTableName, Authorizations.EMPTY);
     scanner.setBatchSize(1);
     scanner.setRange(new Range(Integer.toString(rand.nextInt(0xfffffff), 16), null));
-    
+
     Iterator<Entry<Key,Value>> iter = scanner.iterator();
     if (!iter.hasNext())
       return null;
-    
+
     return iter.next();
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ShardFixture.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ShardFixture.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ShardFixture.java
index 36e9d5e..63500a0 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ShardFixture.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/ShardFixture.java
@@ -25,8 +25,8 @@ import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.MultiTableBatchWriter;
 import org.apache.accumulo.core.client.MutationsRejectedException;
 import org.apache.accumulo.core.conf.Property;
-import org.apache.accumulo.test.randomwalk.Fixture;
 import org.apache.accumulo.test.randomwalk.Environment;
+import org.apache.accumulo.test.randomwalk.Fixture;
 import org.apache.accumulo.test.randomwalk.State;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Logger;
@@ -58,7 +58,7 @@ public class ShardFixture extends Fixture {
 
     String tableId = conn.tableOperations().tableIdMap().get(name);
     log.info("Created index table " + name + "(id:" + tableId + ")");
-    
+
     SortedSet<Text> splits = genSplits(numPartitions, rand.nextInt(numPartitions) + 1, "%06x");
     conn.tableOperations().addSplits(name, splits);
 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/SortTool.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/SortTool.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/SortTool.java
index 17af89e..1247bc2 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/SortTool.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/SortTool.java
@@ -36,38 +36,38 @@ public class SortTool extends Configured implements Tool {
   private String seqFile;
   private String splitFile;
   private Collection<Text> splits;
-  
+
   public SortTool(String seqFile, String outputDir, String splitFile, Collection<Text> splits) {
     this.outputDir = outputDir;
     this.seqFile = seqFile;
     this.splitFile = splitFile;
     this.splits = splits;
   }
-  
+
   public int run(String[] args) throws Exception {
     @SuppressWarnings("deprecation")
     Job job = new Job(getConf(), this.getClass().getSimpleName());
     job.setJarByClass(this.getClass());
-    
+
     if (job.getJar() == null) {
       log.error("M/R requires a jar file!  Run mvn package.");
       return 1;
     }
-    
+
     job.setInputFormatClass(SequenceFileInputFormat.class);
     SequenceFileInputFormat.setInputPaths(job, seqFile);
-    
+
     job.setPartitionerClass(KeyRangePartitioner.class);
     KeyRangePartitioner.setSplitFile(job, splitFile);
-    
+
     job.setMapOutputKeyClass(Key.class);
     job.setMapOutputValueClass(Value.class);
-    
+
     job.setNumReduceTasks(splits.size() + 1);
-    
+
     job.setOutputFormatClass(AccumuloFileOutputFormat.class);
     AccumuloFileOutputFormat.setOutputPath(job, new Path(outputDir));
-    
+
     job.waitForCompletion(true);
     return job.isSuccessful() ? 0 : 1;
   }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Split.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Split.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Split.java
index 6c6ab00..0420fb8 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Split.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/Split.java
@@ -26,16 +26,16 @@ import org.apache.accumulo.test.randomwalk.Test;
 import org.apache.hadoop.io.Text;
 
 public class Split extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
     String indexTableName = (String) state.get("indexTableName");
     int numPartitions = (Integer) state.get("numPartitions");
     Random rand = (Random) state.get("rand");
-    
+
     SortedSet<Text> splitSet = ShardFixture.genSplits(numPartitions, rand.nextInt(numPartitions) + 1, "%06x");
     log.debug("adding splits " + indexTableName);
     env.getConnector().tableOperations().addSplits(indexTableName, splitSet);
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/VerifyIndex.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/VerifyIndex.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/VerifyIndex.java
index c6f5703..70440d8 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/VerifyIndex.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/shard/VerifyIndex.java
@@ -30,42 +30,42 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class VerifyIndex extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {
-    
+
     String indexTableName = (String) state.get("indexTableName");
     String tmpIndexTableName = indexTableName + "_tmp";
-    
+
     // scan new and old index and verify identical
     Scanner indexScanner1 = env.getConnector().createScanner(tmpIndexTableName, Authorizations.EMPTY);
     Scanner indexScanner2 = env.getConnector().createScanner(indexTableName, Authorizations.EMPTY);
-    
+
     Iterator<Entry<Key,Value>> iter = indexScanner2.iterator();
-    
+
     int count = 0;
-    
+
     for (Entry<Key,Value> entry : indexScanner1) {
       if (!iter.hasNext())
         throw new Exception("index rebuild mismatch " + entry.getKey() + " " + indexTableName);
-      
+
       Key key1 = entry.getKey();
       Key key2 = iter.next().getKey();
-      
+
       if (!key1.equals(key2, PartialKey.ROW_COLFAM_COLQUAL))
         throw new Exception("index rebuild mismatch " + key1 + " " + key2 + " " + indexTableName + " " + tmpIndexTableName);
       count++;
       if (count % 1000 == 0)
         makingProgress();
     }
-    
+
     if (iter.hasNext())
       throw new Exception("index rebuild mismatch " + iter.next().getKey() + " " + tmpIndexTableName);
-    
+
     log.debug("Verified " + count + " index entries ");
-    
+
     env.getConnector().tableOperations().delete(indexTableName);
     env.getConnector().tableOperations().rename(tmpIndexTableName, indexTableName);
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/CreateTable.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/CreateTable.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/CreateTable.java
index 1d1e926..2e14de7 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/CreateTable.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/CreateTable.java
@@ -24,7 +24,7 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class CreateTable extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {}
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/DeleteTable.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/DeleteTable.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/DeleteTable.java
index 379a17d..c0b7e65 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/DeleteTable.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/DeleteTable.java
@@ -23,7 +23,7 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class DeleteTable extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {}
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Ingest.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Ingest.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Ingest.java
index 461ef22..40f3555 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Ingest.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Ingest.java
@@ -23,7 +23,7 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Ingest extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {}
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Scan.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Scan.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Scan.java
index bd82b39..5273ea8 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Scan.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Scan.java
@@ -23,7 +23,7 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Scan extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {}
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Verify.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Verify.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Verify.java
index 1b2f4cf..c41d633 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Verify.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/unit/Verify.java
@@ -23,7 +23,7 @@ import org.apache.accumulo.test.randomwalk.State;
 import org.apache.accumulo.test.randomwalk.Test;
 
 public class Verify extends Test {
-  
+
   @Override
   public void visit(State state, Environment env, Properties props) throws Exception {}
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/replication/ReplicationTablesPrinterThread.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/replication/ReplicationTablesPrinterThread.java b/test/src/main/java/org/apache/accumulo/test/replication/ReplicationTablesPrinterThread.java
index 750557c..0dec94c 100644
--- a/test/src/main/java/org/apache/accumulo/test/replication/ReplicationTablesPrinterThread.java
+++ b/test/src/main/java/org/apache/accumulo/test/replication/ReplicationTablesPrinterThread.java
@@ -23,7 +23,7 @@ import org.apache.accumulo.core.replication.PrintReplicationRecords;
 import org.apache.accumulo.core.util.Daemon;
 
 /**
- * 
+ *
  */
 public class ReplicationTablesPrinterThread extends Daemon {
 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/replication/merkle/MerkleTreeNode.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/replication/merkle/MerkleTreeNode.java b/test/src/main/java/org/apache/accumulo/test/replication/merkle/MerkleTreeNode.java
index dfde41a..33ec056 100644
--- a/test/src/main/java/org/apache/accumulo/test/replication/merkle/MerkleTreeNode.java
+++ b/test/src/main/java/org/apache/accumulo/test/replication/merkle/MerkleTreeNode.java
@@ -32,7 +32,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * Encapsulates the level (height) within the tree, the ranges that it covers, and the new hash 
+ * Encapsulates the level (height) within the tree, the ranges that it covers, and the new hash
  */
 public class MerkleTreeNode {
   private static final Logger log = LoggerFactory.getLogger(MerkleTreeNode.class);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/replication/merkle/RangeSerialization.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/replication/merkle/RangeSerialization.java b/test/src/main/java/org/apache/accumulo/test/replication/merkle/RangeSerialization.java
index 62bc800..6b07f2f 100644
--- a/test/src/main/java/org/apache/accumulo/test/replication/merkle/RangeSerialization.java
+++ b/test/src/main/java/org/apache/accumulo/test/replication/merkle/RangeSerialization.java
@@ -23,7 +23,7 @@ import org.apache.accumulo.core.data.Value;
 import org.apache.hadoop.io.Text;
 
 /**
- * 
+ *
  */
 public class RangeSerialization {
   private static final Text EMPTY = new Text(new byte[0]);
@@ -37,7 +37,7 @@ public class RangeSerialization {
     } else {
       startKey = new Key(holder);
     }
-    
+
     key.getColumnQualifier(holder);
     Key endKey;
     if (0 == holder.getLength()) {

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/CompareTables.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/CompareTables.java b/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/CompareTables.java
index 44a5a3b..8e97beb 100644
--- a/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/CompareTables.java
+++ b/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/CompareTables.java
@@ -41,14 +41,14 @@ import com.beust.jcommander.Parameter;
 /**
  * Accepts a set of tables, computes the hashes for each, and prints the top-level hash for each table.
  * <p>
- * Will automatically create output tables for intermediate hashes instead of requiring their existence.
- * This will raise an exception when the table we want to use already exists.
+ * Will automatically create output tables for intermediate hashes instead of requiring their existence. This will raise an exception when the table we want to
+ * use already exists.
  */
 public class CompareTables {
   private static final Logger log = LoggerFactory.getLogger(CompareTables.class);
 
   public static class CompareTablesOpts extends ClientOpts {
-    @Parameter(names={"--tables"}, description = "Tables to compare", variableArity=true)
+    @Parameter(names = {"--tables"}, description = "Tables to compare", variableArity = true)
     public List<String> tables;
 
     @Parameter(names = {"-nt", "--numThreads"}, required = false, description = "number of concurrent threads calculating digests")

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/ComputeRootHash.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/ComputeRootHash.java b/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/ComputeRootHash.java
index ea241a6..cb2761b 100644
--- a/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/ComputeRootHash.java
+++ b/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/ComputeRootHash.java
@@ -74,7 +74,7 @@ public class ComputeRootHash {
   }
 
   protected ArrayList<MerkleTreeNode> getLeaves(Connector conn, String tableName) throws TableNotFoundException {
-    //TODO make this a bit more resilient to very large merkle trees by lazily reading more data from the table when necessary
+    // TODO make this a bit more resilient to very large merkle trees by lazily reading more data from the table when necessary
     final Scanner s = conn.createScanner(tableName, Authorizations.EMPTY);
     final ArrayList<MerkleTreeNode> leaves = new ArrayList<MerkleTreeNode>();
 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/6bc67602/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/GenerateHashes.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/GenerateHashes.java b/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/GenerateHashes.java
index c2c9a5a..7e960b1 100644
--- a/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/GenerateHashes.java
+++ b/test/src/main/java/org/apache/accumulo/test/replication/merkle/cli/GenerateHashes.java
@@ -120,7 +120,8 @@ public class GenerateHashes {
     }
   }
 
-  public Collection<Range> getRanges(Connector conn, String tableName, String splitsFile) throws TableNotFoundException, AccumuloSecurityException, AccumuloException, FileNotFoundException {
+  public Collection<Range> getRanges(Connector conn, String tableName, String splitsFile) throws TableNotFoundException, AccumuloSecurityException,
+      AccumuloException, FileNotFoundException {
     if (null == splitsFile) {
       log.info("Using table split points");
       Collection<Text> endRows = conn.tableOperations().listSplits(tableName);


Mime
View raw message