accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vi...@apache.org
Subject svn commit: r1433166 [16/20] - in /accumulo/branches/ACCUMULO-259: ./ assemble/ assemble/platform/ assemble/scripts/ assemble/scripts/init.d/ bin/ conf/examples/1GB/native-standalone/ conf/examples/1GB/standalone/ conf/examples/2GB/native-standalone/ c...
Date Mon, 14 Jan 2013 22:03:34 GMT
Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousWalk.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousWalk.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousWalk.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/ContinuousWalk.java Mon Jan 14 22:03:24 2013
@@ -19,7 +19,6 @@ package org.apache.accumulo.server.test.
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.net.InetAddress;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -29,36 +28,40 @@ import java.util.zip.CRC32;
 
 import org.apache.accumulo.cloudtrace.instrument.Span;
 import org.apache.accumulo.cloudtrace.instrument.Trace;
-import org.apache.accumulo.cloudtrace.instrument.Tracer;
-import org.apache.accumulo.cloudtrace.instrument.receivers.ZooSpanClient;
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.zookeeper.ZooUtil;
-import org.apache.accumulo.server.Accumulo;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
+
+import com.beust.jcommander.IStringConverter;
+import com.beust.jcommander.Parameter;
 
 
 public class ContinuousWalk {
   
-  private static String debugLog = null;
-  private static String authsFile = null;
+  static public class Opts extends ContinuousQuery.Opts {
+    class RandomAuthsConverter implements IStringConverter<RandomAuths> {
+      @Override
+      public RandomAuths convert(String value) {
+        try {
+          return new RandomAuths(value);
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      }
+    }
+    @Parameter(names="--authsFile", description="read the authorities to use from a file")
+    RandomAuths randomAuths = new RandomAuths();
+  }
   
   static class BadChecksumException extends RuntimeException {
-    
     private static final long serialVersionUID = 1L;
     
     public BadChecksumException(String msg) {
@@ -67,25 +70,13 @@ public class ContinuousWalk {
     
   }
   
-  private static String[] processOptions(String[] args) {
-    ArrayList<String> al = new ArrayList<String>();
-    
-    for (int i = 0; i < args.length; i++) {
-      if (args[i].equals("--debug")) {
-        debugLog = args[++i];
-      } else if (args[i].equals("--auths")) {
-        authsFile = args[++i];
-      } else {
-        al.add(args[i]);
-      }
-    }
-    
-    return al.toArray(new String[al.size()]);
-  }
-  
   static class RandomAuths {
     private List<Authorizations> auths;
     
+    RandomAuths() {
+      auths = Collections.singletonList(Constants.NO_AUTHS);
+    }
+    
     RandomAuths(String file) throws IOException {
       if (file == null) {
         auths = Collections.singletonList(Constants.NO_AUTHS);
@@ -96,14 +87,14 @@ public class ContinuousWalk {
       
       FileSystem fs = FileSystem.get(new Configuration());
       BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(new Path(file))));
-      
-      String line;
-      
-      while ((line = in.readLine()) != null) {
-        auths.add(new Authorizations(line.split(",")));
+      try {
+        String line;
+        while ((line = in.readLine()) != null) {
+          auths.add(new Authorizations(line.split(",")));
+        }
+      } finally {
+        in.close();
       }
-      
-      in.close();
     }
     
     Authorizations getAuths(Random r) {
@@ -112,50 +103,18 @@ public class ContinuousWalk {
   }
 
   public static void main(String[] args) throws Exception {
+    Opts opts = new Opts();
+    opts.parseArgs(ContinuousWalk.class.getName(), args);
     
-    args = processOptions(args);
-    
-    if (args.length != 8) {
-      throw new IllegalArgumentException("usage : " + ContinuousWalk.class.getName()
-          + " [--debug <debug log>] [--auths <file>] <instance name> <zookeepers> <user> <pass> <table> <min> <max> <sleep time>");
-    }
-    
-    if (debugLog != null) {
-      Logger logger = Logger.getLogger(Constants.CORE_PACKAGE_NAME);
-      logger.setLevel(Level.TRACE);
-      logger.setAdditivity(false);
-      logger.addAppender(new FileAppender(new PatternLayout("%d{dd HH:mm:ss,SSS} [%-8c{2}] %-5p: %m%n"), debugLog, true));
-    }
-    
-    String instanceName = args[0];
-    String zooKeepers = args[1];
-    
-    String user = args[2];
-    String password = args[3];
-    
-    String table = args[4];
-    
-    long min = Long.parseLong(args[5]);
-    long max = Long.parseLong(args[6]);
-    
-    long sleepTime = Long.parseLong(args[7]);
-    
-    Instance instance = new ZooKeeperInstance(instanceName, zooKeepers);
-    
-    String localhost = InetAddress.getLocalHost().getHostName();
-    String path = ZooUtil.getRoot(instance) + Constants.ZTRACERS;
-    Tracer.getInstance().addReceiver(new ZooSpanClient(zooKeepers, path, localhost, "cwalk", 1000));
-    Accumulo.enableTracing(localhost, "ContinuousWalk");
-    Connector conn = instance.getConnector(user, password.getBytes());
+    Connector conn = opts.getConnector();
     
     Random r = new Random();
-    RandomAuths randomAuths = new RandomAuths(authsFile);
     
     ArrayList<Value> values = new ArrayList<Value>();
     
     while (true) {
-      Scanner scanner = conn.createScanner(table, randomAuths.getAuths(r));
-      String row = findAStartRow(min, max, scanner, r);
+      Scanner scanner = conn.createScanner(opts.getTableName(), opts.randomAuths.getAuths(r));
+      String row = findAStartRow(opts.min, opts.max, scanner, r);
       
       while (row != null) {
         
@@ -184,12 +143,12 @@ public class ContinuousWalk {
           row = null;
         }
         
-        if (sleepTime > 0)
-          Thread.sleep(sleepTime);
+        if (opts.sleepTime > 0)
+          Thread.sleep(opts.sleepTime);
       }
       
-      if (sleepTime > 0)
-        Thread.sleep(sleepTime);
+      if (opts.sleepTime > 0)
+        Thread.sleep(opts.sleepTime);
     }
   }
   

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/TimeBinner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/TimeBinner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/TimeBinner.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/TimeBinner.java Mon Jan 14 22:03:24 2013
@@ -21,9 +21,14 @@ import java.io.InputStreamReader;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.Map.Entry;
 import java.util.Set;
 import java.util.TreeMap;
-import java.util.Map.Entry;
+
+import org.apache.accumulo.core.cli.ClientOpts.TimeConverter;
+import org.apache.accumulo.core.cli.Help;
+
+import com.beust.jcommander.Parameter;
 
 public class TimeBinner {
   
@@ -46,18 +51,25 @@ public class TimeBinner {
     return dw;
   }
   
+  static class Opts extends Help {
+    @Parameter(names="--period", description="period", converter=TimeConverter.class, required=true)
+    long period = 0;
+    @Parameter(names="--timeColumn", description="time column", required=true)
+    int timeColumn = 0;
+    @Parameter(names="--dataColumn", description="data column", required=true)
+    int dataColumn = 0;
+    @Parameter(names="--operation", description="one of: AVG, SUM, MIN, MAX, COUNT", required=true)
+    String operation;
+    @Parameter(names="--dateFormat", description="a SimpleDataFormat string that describes the data format")
+    String dateFormat = "MM/dd/yy-HH:mm:ss";
+  }
+  
   public static void main(String[] args) throws Exception {
+    Opts opts = new Opts();
+    opts.parseArgs(TimeBinner.class.getName(), args);
     
-    if (args.length != 5) {
-      System.out.println("usage : " + TimeBinner.class.getName() + " <period (seconds)> <time column> <data column> AVG|SUM|MIN|MAX|COUNT <date format>");
-      System.exit(-1);
-    }
-    
-    long period = Long.parseLong(args[0]) * 1000;
-    int timeColumn = Integer.parseInt(args[1]);
-    int dataColumn = Integer.parseInt(args[2]);
-    Operation operation = Operation.valueOf(args[3]);
-    SimpleDateFormat sdf = new SimpleDateFormat(args[4]);
+    Operation operation = Operation.valueOf(opts.operation);
+    SimpleDateFormat sdf = new SimpleDateFormat(opts.dateFormat);
     
     BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
     
@@ -73,18 +85,18 @@ public class TimeBinner {
       try {
         String tokens[] = line.split("\\s+");
         
-        long time = (long) Double.parseDouble(tokens[timeColumn]);
-        double data = Double.parseDouble(tokens[dataColumn]);
+        long time = (long) Double.parseDouble(tokens[opts.timeColumn]);
+        double data = Double.parseDouble(tokens[opts.dataColumn]);
         
-        time = (time / period) * period;
+        time = (time / opts.period) * opts.period;
         
         double data_min = data;
         double data_max = data;
         
         switch (operation) {
           case AMM_HACK1: {
-            data_min = Double.parseDouble(tokens[dataColumn - 2]);
-            data_max = Double.parseDouble(tokens[dataColumn - 1]);
+            data_min = Double.parseDouble(tokens[opts.dataColumn - 2]);
+            data_max = Double.parseDouble(tokens[opts.dataColumn - 1]);
             // fall through to AMM
           }
           case AMM: {

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/UndefinedAnalyzer.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/UndefinedAnalyzer.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/UndefinedAnalyzer.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/continuous/UndefinedAnalyzer.java Mon Jan 14 22:03:24 2013
@@ -32,17 +32,20 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
 
-import org.apache.accumulo.core.Constants;
+import org.apache.accumulo.server.cli.ClientOnDefaultTable;
+import org.apache.accumulo.core.cli.BatchScannerOpts;
 import org.apache.accumulo.core.client.BatchScanner;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.impl.Tables;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.hadoop.io.Text;
 
+import com.beust.jcommander.Parameter;
+
 /**
  * BUGS This code does not handle the fact that these files could include log events from previous months. It therefore it assumes all dates are in the current
  * month. One solution might be to skip log files that haven't been touched in the last month, but that doesn't prevent newer files that have old dates in them.
@@ -235,22 +238,19 @@ public class UndefinedAnalyzer {
     }
   }
   
+  static class Opts extends ClientOnDefaultTable {
+    @Parameter(names="--logdir", description="directory containing the log files", required=true)
+    String logDir;
+    Opts() { super("ci"); }
+  }
+  
+  /**
+   * Class to analyze undefined references and accumulo logs to isolate the time/tablet where data was lost.
+   */
   public static void main(String[] args) throws Exception {
-    
-    if (args.length != 7) {
-      System.err.println("Usage : " + UndefinedAnalyzer.class.getName() + " <instance> <zoo> <user> <pass> <table> <ci log dir> <acu log dir>");
-      return;
-    }
-    
-    String instanceName = args[0];
-    String zooKeepers = args[1];
-    
-    String user = args[2];
-    String password = args[3];
-    
-    String table = args[4];
-    String logDir = args[5];
-    String acuLogDir = args[6];
+    Opts opts = new Opts();
+    BatchScannerOpts bsOpts = new BatchScannerOpts();
+    opts.parseArgs(UndefinedAnalyzer.class.getName(), args, opts);
     
     List<UndefinedNode> undefs = new ArrayList<UndefinedNode>();
     
@@ -264,10 +264,9 @@ public class UndefinedAnalyzer {
       undefs.add(new UndefinedNode(undef, ref));
     }
     
-    ZooKeeperInstance zki = new ZooKeeperInstance(instanceName, zooKeepers);
-    Connector conn = zki.getConnector(user, password.getBytes());
-    BatchScanner bscanner = conn.createBatchScanner(table, Constants.NO_AUTHS, 20);
-    
+    Connector conn = opts.getConnector();
+    BatchScanner bscanner = conn.createBatchScanner(opts.getTableName(), opts.auths, bsOpts.scanThreads);
+    bscanner.setTimeout(bsOpts.scanTimeout, TimeUnit.MILLISECONDS);
     List<Range> refs = new ArrayList<Range>();
     
     for (UndefinedNode undefinedNode : undefs)
@@ -290,8 +289,8 @@ public class UndefinedAnalyzer {
     
     bscanner.close();
     
-    IngestInfo ingestInfo = new IngestInfo(logDir);
-    TabletHistory tabletHistory = new TabletHistory(Tables.getTableId(zki, table), acuLogDir);
+    IngestInfo ingestInfo = new IngestInfo(opts.logDir);
+    TabletHistory tabletHistory = new TabletHistory(Tables.getTableId(conn.getInstance(), opts.getTableName()), opts.logDir);
     
     SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
     

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BatchWriterFlushTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BatchWriterFlushTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BatchWriterFlushTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BatchWriterFlushTest.java Mon Jan 14 22:03:24 2013
@@ -22,6 +22,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.concurrent.TimeUnit;
 import java.util.Random;
 
 import org.apache.accumulo.core.Constants;
@@ -71,7 +72,7 @@ public class BatchWriterFlushTest extend
   
   private void runLatencyTest() throws Exception {
     // should automatically flush after 3 seconds
-    BatchWriter bw = getConnector().createBatchWriter("bwlt", new BatchWriterConfig());
+    BatchWriter bw = getConnector().createBatchWriter("bwlt", new BatchWriterConfig().setMaxLatency(2000, TimeUnit.MILLISECONDS));
     Scanner scanner = getConnector().createScanner("bwlt", Constants.NO_AUTHS);
     
     Mutation m = new Mutation(new Text(String.format("r_%10d", 1)));

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BloomFilterTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BloomFilterTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BloomFilterTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BloomFilterTest.java Mon Jan 14 22:03:24 2013
@@ -35,6 +35,10 @@ import org.apache.accumulo.core.data.Mut
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.zookeeper.ZooUtil;
+import org.apache.accumulo.fate.ZooStore;
+import org.apache.accumulo.server.master.Master;
+import org.apache.accumulo.server.zookeeper.ZooReaderWriter;
 import org.apache.hadoop.io.Text;
 
 public class BloomFilterTest extends FunctionalTest {
@@ -53,19 +57,11 @@ public class BloomFilterTest extends Fun
   public List<TableSetup> getTablesToCreate() {
     ArrayList<TableSetup> tl = new ArrayList<TableSetup>();
     
-    // tl.add(new TableSetup("bt1",parseConfig(Property.TABLE_BLOOM_ENABLED+"=true", Property.TABLE_BLOOM_KEYDEPTH+"="+PartialKey.ROW.name())));
-    // tl.add(new TableSetup("bt2",parseConfig(Property.TABLE_BLOOM_ENABLED+"=true", Property.TABLE_BLOOM_KEYDEPTH+"="+PartialKey.ROW_COLFAM.name())));
-    // tl.add(new TableSetup("bt3",parseConfig(Property.TABLE_BLOOM_ENABLED+"=true",
-    // Property.TABLE_BLOOM_KEYDEPTH+"="+PartialKey.ROW_COLFAM_COLQUAL.name())));
-    // tl.add(new TableSetup("bt4",parseConfig(Property.TABLE_BLOOM_ENABLED+"=true", Property.TABLE_BLOOM_KEYDEPTH+"="+PartialKey.ROW.name())));
-    tl.add(new TableSetup("bt1", parseConfig(Property.TABLE_BLOOM_ENABLED + "=true", Property.TABLE_BLOOM_KEY_FUNCTOR
-        + "=org.apache.accumulo.core.file.keyfunctor.RowFunctor")));
-    tl.add(new TableSetup("bt2", parseConfig(Property.TABLE_BLOOM_ENABLED + "=true", Property.TABLE_BLOOM_KEY_FUNCTOR
-        + "=org.apache.accumulo.core.file.keyfunctor.ColumnFamilyFunctor")));
-    tl.add(new TableSetup("bt3", parseConfig(Property.TABLE_BLOOM_ENABLED + "=true", Property.TABLE_BLOOM_KEY_FUNCTOR
-        + "=org.apache.accumulo.core.file.keyfunctor.ColumnQualifierFunctor")));
-    tl.add(new TableSetup("bt4", parseConfig(Property.TABLE_BLOOM_ENABLED + "=true", Property.TABLE_BLOOM_KEY_FUNCTOR
-        + "=org.apache.accumulo.core.file.keyfunctor.RowFunctor")));
+    tl.add(new TableSetup("bt1"));
+    tl.add(new TableSetup("bt2"));
+    tl.add(new TableSetup("bt3"));
+    tl.add(new TableSetup("bt4"));
+    
     return tl;
   }
   
@@ -89,11 +85,43 @@ public class BloomFilterTest extends Fun
     super.checkRFiles("bt3", 1, 1, 1, 1);
     super.checkRFiles("bt4", 1, 1, 1, 1);
     
+    // these queries should only run quickly if bloom filters are working, so lets get a base
+    long t1 = query("bt1", 1, 0, 1000000000, 100000, 10000);
+    long t2 = query("bt2", 2, 0, 1000000000, 100000, 10000);
+    long t3 = query("bt3", 3, 0, 1000000000, 100000, 10000);
+    
+    getConnector().tableOperations().setProperty("bt1", Property.TABLE_BLOOM_ENABLED.getKey(), "true");
+    getConnector().tableOperations().setProperty("bt1", Property.TABLE_BLOOM_KEY_FUNCTOR.getKey(), "org.apache.accumulo.core.file.keyfunctor.RowFunctor");
+    getConnector().tableOperations().compact("bt1", null, null, false, false);
+    
+    getConnector().tableOperations().setProperty("bt2", Property.TABLE_BLOOM_ENABLED.getKey(), "true");
+    getConnector().tableOperations().setProperty("bt2", Property.TABLE_BLOOM_KEY_FUNCTOR.getKey(),
+        "org.apache.accumulo.core.file.keyfunctor.ColumnFamilyFunctor");
+    getConnector().tableOperations().compact("bt2", null, null, false, false);
+    
+    getConnector().tableOperations().setProperty("bt3", Property.TABLE_BLOOM_ENABLED.getKey(), "true");
+    getConnector().tableOperations().setProperty("bt3", Property.TABLE_BLOOM_KEY_FUNCTOR.getKey(),
+        "org.apache.accumulo.core.file.keyfunctor.ColumnQualifierFunctor");
+    getConnector().tableOperations().compact("bt3", null, null, false, false);
+    
+    getConnector().tableOperations().setProperty("bt4", Property.TABLE_BLOOM_ENABLED.getKey(), "true");
+    getConnector().tableOperations().setProperty("bt4", Property.TABLE_BLOOM_KEY_FUNCTOR.getKey(), "org.apache.accumulo.core.file.keyfunctor.RowFunctor");
+    getConnector().tableOperations().compact("bt4", null, null, false, false);
+    
+    Thread.sleep(200);
+    
+    ZooStore<Master> zs = new ZooStore<Master>(ZooUtil.getRoot(getConnector().getInstance()) + Constants.ZFATE, ZooReaderWriter.getRetryingInstance());
+    while (!zs.list().isEmpty())
+      Thread.sleep(1000);
     // these queries should only run quickly if bloom
     // filters are working
-    query("bt1", 1, 0, 1000000000, 100000, 10000, 6);
-    query("bt2", 2, 0, 1000000000, 100000, 10000, 6);
-    query("bt3", 3, 0, 1000000000, 100000, 10000, 6);
+    long tb1 = query("bt1", 1, 0, 1000000000, 100000, 10000);
+    long tb2 = query("bt2", 2, 0, 1000000000, 100000, 10000);
+    long tb3 = query("bt3", 3, 0, 1000000000, 100000, 10000);
+    
+    timeCheck(t1, tb1);
+    timeCheck(t2, tb2);
+    timeCheck(t3, tb3);
     
     // test querying for empty key
     Scanner scanner = getConnector().createScanner("bt4", Constants.NO_AUTHS);
@@ -105,7 +133,13 @@ public class BloomFilterTest extends Fun
     
   }
   
-  private void query(String table, int depth, long start, long end, int num, int step, int secs) throws Exception {
+  private void timeCheck(long t1, long t2) throws Exception {
+    if (((t1 - t2) * 1.0 / t1) < .1) {
+      throw new Exception("Queries had less than 10% improvement (old: " + t1 + " new: " + t2 + " improvement: " + ((t1 - t2) * 1.0 / t1) + "%)");
+    }
+  }
+  
+  private long query(String table, int depth, long start, long end, int num, int step) throws Exception {
     Random r = new Random(42);
     
     HashSet<Long> expected = new HashSet<Long>();
@@ -158,12 +192,9 @@ public class BloomFilterTest extends Fun
       throw new Exception("Did not get all expected values " + expected.size());
     }
     
-    if ((t2 - t1) / 1000.0 >= secs) {
-      throw new Exception("Queries exceeded expected run time " + (t2 - t1) / 1000.0 + " " + secs);
-    }
-    
     bs.close();
-
+    
+    return t2 - t1;
   }
   
   private void write(String table, int depth, long start, long end, int step) throws Exception {

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BulkSplitOptimizationTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BulkSplitOptimizationTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BulkSplitOptimizationTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/BulkSplitOptimizationTest.java Mon Jan 14 22:03:24 2013
@@ -63,7 +63,7 @@ public class BulkSplitOptimizationTest e
     FileSystem fs = FileSystem.get(CachedConfiguration.getInstance());
     fs.delete(new Path("/tmp/testmf"), true);
     
-    CreateRFiles.main(new String[] {"tmp/testmf", "8", "0", "100000", "99"});
+    CreateRFiles.main(new String[] { "--output", "tmp/testmf", "--numThreads", "8", "--start", "0", "--end", "100000", "--splits", "99"});
     
     bulkImport(fs, TABLE_NAME, "/tmp/testmf");
     
@@ -82,7 +82,7 @@ public class BulkSplitOptimizationTest e
     
     checkSplits(TABLE_NAME, 50, 100);
     
-    VerifyIngest.main(new String[] {"-timestamp", "1", "-size", "50", "-random", "56", "100000", "0", "1"});
+    VerifyIngest.main(new String[] {"--timestamp", "1", "--size", "50", "--random", "56", "--rows", "100000", "--start", "0", "--cols", "1"});
     
     // ensure each tablet does not have all map files
     checkRFiles(TABLE_NAME, 50, 100, 1, 4);

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/CacheTestClean.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/CacheTestClean.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/CacheTestClean.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/CacheTestClean.java Mon Jan 14 22:03:24 2013
@@ -17,6 +17,7 @@
 package org.apache.accumulo.server.test.functional;
 
 import java.io.File;
+import java.util.Arrays;
 
 import org.apache.accumulo.fate.zookeeper.IZooReaderWriter;
 import org.apache.accumulo.fate.zookeeper.ZooUtil.NodeMissingPolicy;
@@ -42,7 +43,7 @@ public class CacheTestClean {
     } else {
       File[] files = reportDir.listFiles();
       if (files.length != 0)
-        throw new Exception("dir " + reportDir + " is not empty");
+        throw new Exception("dir " + reportDir + " is not empty: " + Arrays.asList(files));
     }
     
   }

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FateStarvationTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FateStarvationTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FateStarvationTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FateStarvationTest.java Mon Jan 14 22:03:24 2013
@@ -48,13 +48,13 @@ public class FateStarvationTest extends 
   public void run() throws Exception {
     getConnector().tableOperations().create("test_ingest");
     
-    getConnector().tableOperations().addSplits("test_ingest", TestIngest.CreateTable.getSplitPoints(0, 100000, 50));
+    getConnector().tableOperations().addSplits("test_ingest", TestIngest.getSplitPoints(0, 100000, 50));
     
     TestIngest.main(new String[] {"-random", "89", "-timestamp", "7", "-size", "" + 50, "100000", "0", "1"});
     
     getConnector().tableOperations().flush("test_ingest", null, null, true);
     
-    List<Text> splits = new ArrayList<Text>(TestIngest.CreateTable.getSplitPoints(0, 100000, 67));
+    List<Text> splits = new ArrayList<Text>(TestIngest.getSplitPoints(0, 100000, 67));
     Random rand = new Random();
     
     for (int i = 0; i < 100; i++) {

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FunctionalTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FunctionalTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FunctionalTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/FunctionalTest.java Mon Jan 14 22:03:24 2013
@@ -28,6 +28,7 @@ import java.util.TreeMap;
 import java.util.TreeSet;
 
 import org.apache.accumulo.core.Constants;
+import org.apache.accumulo.server.cli.ClientOpts;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
@@ -42,38 +43,16 @@ import org.apache.accumulo.core.data.Ran
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.thrift.AuthInfo;
 import org.apache.accumulo.server.conf.ServerConfiguration;
-import org.apache.accumulo.start.classloader.AccumuloClassLoader;
-import org.apache.commons.cli.BasicParser;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
+import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+
 public abstract class FunctionalTest {
-  private static Options opts;
-  private static Option masterOpt;
-  private static Option passwordOpt;
-  private static Option usernameOpt;
-  private static Option instanceNameOpt;
-  
-  static {
-    usernameOpt = new Option("u", "username", true, "username");
-    passwordOpt = new Option("p", "password", true, "password");
-    masterOpt = new Option("m", "master", true, "master");
-    instanceNameOpt = new Option("i", "instanceName", true, "instance name");
-    
-    opts = new Options();
-    
-    opts.addOption(usernameOpt);
-    opts.addOption(passwordOpt);
-    opts.addOption(masterOpt);
-    opts.addOption(instanceNameOpt);
-  }
-  
+
   public static Map<String,String> parseConfig(String... perTableConfigs) {
     
     TreeMap<String,String> config = new TreeMap<String,String>();
@@ -126,19 +105,10 @@ public abstract class FunctionalTest {
     
   }
   
-  private String master = "";
   private String username = "";
   private String password = "";
   private String instanceName = "";
   
-  protected void setMaster(String master) {
-    this.master = master;
-  }
-  
-  protected String getMaster() {
-    return master;
-  }
-  
   protected void setUsername(String username) {
     this.username = username;
   }
@@ -270,57 +240,49 @@ public abstract class FunctionalTest {
     
   }
   
+  static class Opts extends ClientOpts {
+    @Parameter(names="--classname", required=true, description="name of the class under test")
+    String classname = null;
+    
+    @Parameter(names="--opt", required=true, description="the options for test")
+    String opt = null;
+  }
+  
+  
   public static void main(String[] args) throws Exception {
-    CommandLine cl = null;
-    try {
-      cl = new BasicParser().parse(opts, args);
-    } catch (ParseException e) {
-      printHelpAndExit(e.toString());
-    }
-    
-    String master = cl.getOptionValue(masterOpt.getOpt(), "localhost");
-    String username = cl.getOptionValue(usernameOpt.getOpt(), "root");
-    String password = cl.getOptionValue(passwordOpt.getOpt(), "secret");
-    String instanceName = cl.getOptionValue(instanceNameOpt.getOpt(), "FuncTest");
-    
-    String remainingArgs[] = cl.getArgs();
-    if (remainingArgs.length < 2) {
-      printHelpAndExit("Missing java classname to test and/or options.");
-    }
-    String clazz = remainingArgs[0];
-    String opt = remainingArgs[1];
+    Opts opts = new Opts();
+    opts.parseArgs(FunctionalTest.class.getName(), args);
     
-    Class<? extends FunctionalTest> testClass = AccumuloClassLoader.loadClass(clazz, FunctionalTest.class);
+    Class<? extends FunctionalTest> testClass = AccumuloVFSClassLoader.loadClass(opts.classname, FunctionalTest.class);
     FunctionalTest fTest = testClass.newInstance();
     
-    fTest.setMaster(master);
-    fTest.setUsername(username);
-    fTest.setPassword(password);
-    fTest.setInstanceName(instanceName);
+    //fTest.setMaster(master);
+    fTest.setUsername(opts.user);
+    fTest.setPassword(new String(opts.getPassword()));
+    fTest.setInstanceName(opts.instance);
     
-    if (opt.equals("getConfig")) {
+    if (opts.opt.equals("getConfig")) {
       Map<String,String> iconfig = fTest.getInitialConfig();
       System.out.println("{");
       for (Entry<String,String> entry : iconfig.entrySet()) {
         System.out.println("'" + entry.getKey() + "':'" + entry.getValue() + "',");
       }
       System.out.println("}");
-    } else if (opt.equals("setup")) {
+    } else if (opts.opt.equals("setup")) {
       fTest.setup();
-    } else if (opt.equals("run")) {
+    } else if (opts.opt.equals("run")) {
       fTest.run();
-    } else if (opt.equals("cleanup")) {
+    } else if (opts.opt.equals("cleanup")) {
       fTest.cleanup();
     } else {
-    	printHelpAndExit("Unknown option: " + opt);
+    	printHelpAndExit("Unknown option: " + opts.opt);
     }
     
   }
 
   static void printHelpAndExit(String message) {
       System.out.println(message);
-      HelpFormatter formatter = new HelpFormatter();
-      formatter.printHelp( "FunctionalTest {options} java_class [getconfig|setup|run|cleanup]", opts );
+      new JCommander(new Opts()).usage();
       System.exit(1);
   }
   

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/LargeRowTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/LargeRowTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/LargeRowTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/LargeRowTest.java Mon Jan 14 22:03:24 2013
@@ -35,6 +35,7 @@ import org.apache.accumulo.core.data.Val
 import org.apache.accumulo.core.util.UtilWaitThread;
 import org.apache.accumulo.server.test.TestIngest;
 import org.apache.hadoop.io.Text;
+import org.apache.log4j.Logger;
 
 public class LargeRowTest extends FunctionalTest {
   
@@ -51,7 +52,7 @@ public class LargeRowTest extends Functi
   
   @Override
   public Map<String,String> getInitialConfig() {
-    return parseConfig(Property.TSERV_MAJC_DELAY + "=1");
+    return parseConfig(Property.TSERV_MAJC_DELAY + "=10ms");
   }
   
   @Override
@@ -94,8 +95,8 @@ public class LargeRowTest extends Functi
     
     getConnector().tableOperations().setProperty(REG_TABLE_NAME, Property.TABLE_SPLIT_THRESHOLD.getKey(), "" + SPLIT_THRESH);
     
-    UtilWaitThread.sleep(5000);
-    
+    UtilWaitThread.sleep(12000);
+    Logger.getLogger(LargeRowTest.class).warn("checking splits");
     checkSplits(REG_TABLE_NAME, 1, 9);
     
     verify(REG_TABLE_NAME);
@@ -136,8 +137,8 @@ public class LargeRowTest extends Functi
     // verify while table flush is running
     verify(table);
     
-    // give flush time to complete
-    UtilWaitThread.sleep(4000);
+    // give split time to complete
+    getConnector().tableOperations().flush(table, null, null, true);
     
     checkSplits(table, expectedSplits, expectedSplits);
     

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/MaxOpenTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/MaxOpenTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/MaxOpenTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/MaxOpenTest.java Mon Jan 14 22:03:24 2013
@@ -53,7 +53,7 @@ public class MaxOpenTest extends Functio
   @Override
   public List<TableSetup> getTablesToCreate() {
     Map<String,String> config = parseConfig(Property.TABLE_MAJC_RATIO + "=10");
-    TableSetup ts = new TableSetup("test_ingest", config, TestIngest.CreateTable.getSplitPoints(0, NUM_TO_INGEST, NUM_TABLETS));
+    TableSetup ts = new TableSetup("test_ingest", config, TestIngest.getSplitPoints(0, NUM_TO_INGEST, NUM_TABLETS));
     return Collections.singletonList(ts);
   }
   

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/PermissionsTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/PermissionsTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/PermissionsTest.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/PermissionsTest.java Mon Jan 14 22:03:24 2013
@@ -76,7 +76,7 @@ public class PermissionsTest {
       verifyHasOnlyTheseSystemPermissions(getConnector(), getConnector().whoami(), SystemPermission.values());
       
       // create the test user
-      getConnector().securityOperations().createUser(TEST_USER, TEST_PASS.getBytes(), Constants.NO_AUTHS);
+      getConnector().securityOperations().createUser(TEST_USER, TEST_PASS.getBytes());
       Connector test_user_conn = getInstance().getConnector(TEST_USER, TEST_PASS.getBytes());
       verifyHasNoSystemPermissions(getConnector(), TEST_USER, SystemPermission.values());
       
@@ -168,7 +168,7 @@ public class PermissionsTest {
         case CREATE_USER:
           user = "__CREATE_USER_WITHOUT_PERM_TEST__";
           try {
-            test_user_conn.securityOperations().createUser(user, password.getBytes(), Constants.NO_AUTHS);
+            test_user_conn.securityOperations().createUser(user, password.getBytes());
             throw new IllegalStateException("Should NOT be able to create a user");
           } catch (AccumuloSecurityException e) {
             if (e.getErrorCode() != SecurityErrorCode.PERMISSION_DENIED || root_conn.securityOperations().authenticateUser(user, password.getBytes()))
@@ -177,7 +177,7 @@ public class PermissionsTest {
           break;
         case DROP_USER:
           user = "__DROP_USER_WITHOUT_PERM_TEST__";
-          root_conn.securityOperations().createUser(user, password.getBytes(), Constants.NO_AUTHS);
+          root_conn.securityOperations().createUser(user, password.getBytes());
           try {
             test_user_conn.securityOperations().dropUser(user);
             throw new IllegalStateException("Should NOT be able to delete a user");
@@ -188,7 +188,7 @@ public class PermissionsTest {
           break;
         case ALTER_USER:
           user = "__ALTER_USER_WITHOUT_PERM_TEST__";
-          root_conn.securityOperations().createUser(user, password.getBytes(), Constants.NO_AUTHS);
+          root_conn.securityOperations().createUser(user, password.getBytes());
           try {
             test_user_conn.securityOperations().changeUserAuthorizations(user, new Authorizations("A", "B"));
             throw new IllegalStateException("Should NOT be able to alter a user");
@@ -250,20 +250,20 @@ public class PermissionsTest {
           break;
         case CREATE_USER:
           user = "__CREATE_USER_WITH_PERM_TEST__";
-          test_user_conn.securityOperations().createUser(user, password.getBytes(), Constants.NO_AUTHS);
+          test_user_conn.securityOperations().createUser(user, password.getBytes());
           if (!root_conn.securityOperations().authenticateUser(user, password.getBytes()))
             throw new IllegalStateException("Should be able to create a user");
           break;
         case DROP_USER:
           user = "__DROP_USER_WITH_PERM_TEST__";
-          root_conn.securityOperations().createUser(user, password.getBytes(), Constants.NO_AUTHS);
+          root_conn.securityOperations().createUser(user, password.getBytes());
           test_user_conn.securityOperations().dropUser(user);
           if (root_conn.securityOperations().authenticateUser(user, password.getBytes()))
             throw new IllegalStateException("Should be able to delete a user");
           break;
         case ALTER_USER:
           user = "__ALTER_USER_WITH_PERM_TEST__";
-          root_conn.securityOperations().createUser(user, password.getBytes(), Constants.NO_AUTHS);
+          root_conn.securityOperations().createUser(user, password.getBytes());
           test_user_conn.securityOperations().changeUserAuthorizations(user, new Authorizations("A", "B"));
           if (root_conn.securityOperations().getUserAuthorizations(user).isEmpty())
             throw new IllegalStateException("Should be able to alter a user");
@@ -326,7 +326,7 @@ public class PermissionsTest {
     @Override
     public void run() throws AccumuloException, AccumuloSecurityException, TableExistsException, TableNotFoundException, MutationsRejectedException {
       // create the test user
-      getConnector().securityOperations().createUser(TEST_USER, TEST_PASS.getBytes(), Constants.NO_AUTHS);
+      getConnector().securityOperations().createUser(TEST_USER, TEST_PASS.getBytes());
       Connector test_user_conn = getInstance().getConnector(TEST_USER, TEST_PASS.getBytes());
       
       // check for read-only access to metadata table

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/RunTests.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/RunTests.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/RunTests.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/RunTests.java Mon Jan 14 22:03:24 2013
@@ -22,6 +22,7 @@ import java.io.InputStream;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.accumulo.core.cli.Help;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -36,6 +37,8 @@ import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Logger;
 
+import com.beust.jcommander.Parameter;
+
 /**
  * Runs the functional tests via map-reduce.
  * 
@@ -56,7 +59,7 @@ import org.apache.log4j.Logger;
  * Run the map-reduce job:
  * 
  * <pre>
- *  $ ./bin/accumulo accumulo.server.test.functional.RunTests /user/hadoop/tests /user/hadoop/results
+ *  $ ./bin/accumulo accumulo.server.test.functional.RunTests --tests /user/hadoop/tests --output /user/hadoop/results
  * </pre>
  * 
  * Note that you will need to have some configuration in conf/accumulo-site.xml (to locate zookeeper). The map-reduce jobs will not use your local accumulo
@@ -70,6 +73,13 @@ public class RunTests extends Configured
   
   private Job job = null;
   
+  static class Opts extends Help {
+    @Parameter(names="--tests", description="newline separated list of tests to run", required=true)
+    String testFile;
+    @Parameter(names="--output", description="destination for the results of tests in HDFS", required=true)
+    String outputPath;
+  }
+  
   static public class TestMapper extends Mapper<LongWritable,Text,Text,Text> {
     
     @Override
@@ -103,6 +113,8 @@ public class RunTests extends Configured
   public int run(String[] args) throws Exception {
     job = new Job(getConf(), JOB_NAME);
     job.setJarByClass(this.getClass());
+    Opts opts = new Opts();
+    opts.parseArgs(RunTests.class.getName(), args);
     
     // this is like 1-2 tests per mapper
     Configuration conf = job.getConfiguration();
@@ -113,14 +125,14 @@ public class RunTests extends Configured
     
     // set input
     job.setInputFormatClass(TextInputFormat.class);
-    TextInputFormat.setInputPaths(job, new Path(args[0]));
+    TextInputFormat.setInputPaths(job, new Path(opts.testFile));
     
     // set output
     job.setOutputFormatClass(TextOutputFormat.class);
     FileSystem fs = FileSystem.get(conf);
-    Path destination = new Path(args[1]);
+    Path destination = new Path(opts.outputPath);
     if (fs.exists(destination)) {
-      log.info("Deleting existing output directory " + args[1]);
+      log.info("Deleting existing output directory " + opts.outputPath);
       fs.delete(destination, true);
     }
     TextOutputFormat.setOutputPath(job, destination);

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/ZombieTServer.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/ZombieTServer.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/ZombieTServer.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/functional/ZombieTServer.java Mon Jan 14 22:03:24 2013
@@ -99,7 +99,7 @@ public class ZombieTServer {
     TransactionWatcher watcher = new TransactionWatcher();
     final ThriftClientHandler tch = new ThriftClientHandler(instance, watcher);
     Processor<Iface> processor = new Processor<Iface>(tch);
-    ServerPort serverPort = TServerUtils.startTServer(port, processor, "ZombieTServer", "walking dead", 2, 1000);
+    ServerPort serverPort = TServerUtils.startTServer(port, processor, "ZombieTServer", "walking dead", 2, 1000, 10*1024*1024);
     
     InetSocketAddress addr = new InetSocketAddress(InetAddress.getLocalHost(), serverPort.port);
     String addressString = AddressUtil.toString(addr);

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/scan/CollectTabletStats.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/scan/CollectTabletStats.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/scan/CollectTabletStats.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/scan/CollectTabletStats.java Mon Jan 14 22:03:24 2013
@@ -38,9 +38,10 @@ import java.util.concurrent.ExecutorServ
 import java.util.concurrent.Executors;
 
 import org.apache.accumulo.core.Constants;
+import org.apache.accumulo.core.cli.ScannerOpts;
 import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.impl.Tables;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
 import org.apache.accumulo.core.data.ArrayByteSequence;
@@ -69,6 +70,7 @@ import org.apache.accumulo.core.util.Cac
 import org.apache.accumulo.core.util.MetadataTable;
 import org.apache.accumulo.core.util.Stat;
 import org.apache.accumulo.server.ServerConstants;
+import org.apache.accumulo.server.cli.ClientOnRequiredTable;
 import org.apache.accumulo.server.conf.ServerConfiguration;
 import org.apache.accumulo.server.conf.TableConfiguration;
 import org.apache.hadoop.conf.Configuration;
@@ -77,74 +79,67 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
+import org.apache.log4j.Logger;
+
+import com.beust.jcommander.Parameter;
 
 public class CollectTabletStats {
-  public static void main(String[] args) throws Exception {
-    
+  private static final Logger log = Logger.getLogger(CollectTabletStats.class);
+  
+  static class CollectOptions extends ClientOnRequiredTable {
+    @Parameter(names="--iterations", description="number of iterations")
     int iterations = 3;
+    @Parameter(names="-t", description="number of threads")
     int numThreads = 1;
-    boolean selectLocalTablets = true;
-    String columnsTmp[] = new String[] {};
+    @Parameter(names="-f", description="select far tablets, default is to use local tablets")
+    boolean selectFarTablets = false;
+    @Parameter(names="-c", description="comma separated list of columns")
+    String columns;
+  }
+
+  public static void main(String[] args) throws Exception {
     
-    int index = 0;
-    String processedArgs[] = new String[8];
-    for (int i = 0; i < args.length; i++) {
-      if (args[i].equals("-i"))
-        iterations = Integer.parseInt(args[++i]);
-      else if (args[i].equals("-t"))
-        numThreads = Integer.parseInt(args[++i]);
-      else if (args[i].equals("-l"))
-        selectLocalTablets = true;
-      else if (args[i].equals("-f"))
-        selectLocalTablets = false;
-      else if (args[i].equals("-c"))
-        columnsTmp = args[++i].split(",");
-      else
-        processedArgs[index++] = args[i];
-    }
+    final CollectOptions opts = new CollectOptions();
+    final ScannerOpts scanOpts = new ScannerOpts();
+    opts.parseArgs(CollectTabletStats.class.getName(), args, scanOpts);
     
+    String columnsTmp[] = new String[] {};
+    if (opts.columns != null)
+      columnsTmp = opts.columns.split(",");
     final String columns[] = columnsTmp;
     
-    if (index != 7) {
-      System.err.println("USAGE : " + CollectTabletStats.class
-          + " [-i <iterations>] [-t <num threads>] [-l|-f] [-c <column fams>] <instance> <zookeepers> <user> <pass> <table> <auths> <batch size>");
-      return;
-    }
     final FileSystem fs = FileSystem.get(CachedConfiguration.getInstance());
 
-    String instance = processedArgs[0];
-    String zookeepers = processedArgs[1];
-    String user = processedArgs[2];
-    String pass = processedArgs[3];
-    final String tableName = processedArgs[4];
-    final String auths[] = processedArgs[5].split(",");
-    final int batchSize = Integer.parseInt(processedArgs[6]);
-    ZooKeeperInstance zki = new ZooKeeperInstance(instance, zookeepers);
-    final ServerConfiguration sconf = new ServerConfiguration(zki);
+    Instance instance = opts.getInstance();
+    final ServerConfiguration sconf = new ServerConfiguration(instance);
     
-    String tableId = Tables.getNameToIdMap(zki).get(tableName);
+    String tableId = Tables.getNameToIdMap(instance).get(opts.tableName);
+    if (tableId == null) {
+      log.error("Unable to find table named " + opts.tableName);
+      System.exit(-1);
+    }
     
     Map<KeyExtent,String> locations = new HashMap<KeyExtent,String>();
-    List<KeyExtent> candidates = findTablets(selectLocalTablets, user, pass, tableName, zki, locations);
+    List<KeyExtent> candidates = findTablets(!opts.selectFarTablets, opts.user, opts.getPassword(), opts.tableName, instance, locations);
     
-    if (candidates.size() < numThreads) {
-      System.err.println("ERROR : Unable to find " + numThreads + " " + (selectLocalTablets ? "local" : "far") + " tablets");
+    if (candidates.size() < opts.numThreads) {
+      System.err.println("ERROR : Unable to find " + opts.numThreads + " " + (opts.selectFarTablets ? "far" : "local") + " tablets");
       System.exit(-1);
     }
     
-    List<KeyExtent> tabletsToTest = selectRandomTablets(numThreads, candidates);
+    List<KeyExtent> tabletsToTest = selectRandomTablets(opts.numThreads, candidates);
     
     Map<KeyExtent,List<String>> tabletFiles = new HashMap<KeyExtent,List<String>>();
     
     for (KeyExtent ke : tabletsToTest) {
-      List<String> files = getTabletFiles(user, pass, zki, tableId, ke);
+      List<String> files = getTabletFiles(opts.user, opts.getPassword(), opts.getInstance(), tableId, ke);
       tabletFiles.put(ke, files);
     }
     
     System.out.println();
     System.out.println("run location      : " + InetAddress.getLocalHost().getHostName() + "/" + InetAddress.getLocalHost().getHostAddress());
-    System.out.println("num threads       : " + numThreads);
-    System.out.println("table             : " + tableName);
+    System.out.println("num threads       : " + opts.numThreads);
+    System.out.println("table             : " + opts.tableName);
     System.out.println("table id          : " + tableId);
     
     for (KeyExtent ke : tabletsToTest) {
@@ -156,9 +151,9 @@ public class CollectTabletStats {
     
     System.out.println("%n*** RUNNING TEST ***%n");
     
-    ExecutorService threadPool = Executors.newFixedThreadPool(numThreads);
+    ExecutorService threadPool = Executors.newFixedThreadPool(opts.numThreads);
     
-    for (int i = 0; i < iterations; i++) {
+    for (int i = 0; i < opts.iterations; i++) {
       
       ArrayList<Test> tests = new ArrayList<Test>();
       
@@ -174,10 +169,10 @@ public class CollectTabletStats {
         tests.add(test);
       }
       
-      runTest("read files", tests, numThreads, threadPool);
+      runTest("read files", tests, opts.numThreads, threadPool);
     }
     
-    for (int i = 0; i < iterations; i++) {
+    for (int i = 0; i < opts.iterations; i++) {
       
       ArrayList<Test> tests = new ArrayList<Test>();
       
@@ -185,59 +180,59 @@ public class CollectTabletStats {
         final List<String> files = tabletFiles.get(ke);
         Test test = new Test(ke) {
           public int runTest() throws Exception {
-            return readFilesUsingIterStack(fs, sconf, files, auths, ke, columns, false);
+            return readFilesUsingIterStack(fs, sconf, files, opts.auths, ke, columns, false);
           }
         };
         
         tests.add(test);
       }
       
-      runTest("read tablet files w/ system iter stack", tests, numThreads, threadPool);
+      runTest("read tablet files w/ system iter stack", tests, opts.numThreads, threadPool);
     }
     
-    for (int i = 0; i < iterations; i++) {
+    for (int i = 0; i < opts.iterations; i++) {
       ArrayList<Test> tests = new ArrayList<Test>();
       
       for (final KeyExtent ke : tabletsToTest) {
         final List<String> files = tabletFiles.get(ke);
         Test test = new Test(ke) {
           public int runTest() throws Exception {
-            return readFilesUsingIterStack(fs, sconf, files, auths, ke, columns, true);
+            return readFilesUsingIterStack(fs, sconf, files, opts.auths, ke, columns, true);
           }
         };
         
         tests.add(test);
       }
       
-      runTest("read tablet files w/ table iter stack", tests, numThreads, threadPool);
+      runTest("read tablet files w/ table iter stack", tests, opts.numThreads, threadPool);
     }
     
-    for (int i = 0; i < iterations; i++) {
+    for (int i = 0; i < opts.iterations; i++) {
       
       ArrayList<Test> tests = new ArrayList<Test>();
       
-      final Connector conn = zki.getConnector(user, pass.getBytes());
+      final Connector conn = opts.getConnector();
       
       for (final KeyExtent ke : tabletsToTest) {
         Test test = new Test(ke) {
           public int runTest() throws Exception {
-            return scanTablet(conn, tableName, auths, batchSize, ke.getPrevEndRow(), ke.getEndRow(), columns);
+            return scanTablet(conn, opts.tableName, opts.auths, scanOpts.scanBatchSize, ke.getPrevEndRow(), ke.getEndRow(), columns);
           }
         };
         
         tests.add(test);
       }
       
-      runTest("read tablet data through accumulo", tests, numThreads, threadPool);
+      runTest("read tablet data through accumulo", tests, opts.numThreads, threadPool);
     }
     
     for (final KeyExtent ke : tabletsToTest) {
-      final Connector conn = zki.getConnector(user, pass.getBytes());
+      final Connector conn = opts.getConnector();
       
       threadPool.submit(new Runnable() {
         public void run() {
           try {
-            calcTabletStats(conn, tableName, auths, batchSize, ke, columns);
+            calcTabletStats(conn, opts.tableName, opts.auths, scanOpts.scanBatchSize, ke, columns);
           } catch (Exception e) {
             e.printStackTrace();
           }
@@ -345,11 +340,11 @@ public class CollectTabletStats {
     
   }
   
-  private static List<KeyExtent> findTablets(boolean selectLocalTablets, String user, String pass, String table, ZooKeeperInstance zki,
+  private static List<KeyExtent> findTablets(boolean selectLocalTablets, String user, byte[] pass, String table, Instance zki,
       Map<KeyExtent,String> locations) throws Exception {
     SortedSet<KeyExtent> tablets = new TreeSet<KeyExtent>();
     
-    MetadataTable.getEntries(zki, new AuthInfo(user, ByteBuffer.wrap(pass.getBytes()), zki.getInstanceID()), table, false, locations, tablets);
+    MetadataTable.getEntries(zki, new AuthInfo(user, ByteBuffer.wrap(pass), zki.getInstanceID()), table, false, locations, tablets);
     
     InetAddress localaddress = InetAddress.getLocalHost();
     
@@ -380,11 +375,11 @@ public class CollectTabletStats {
     return tabletsToTest;
   }
   
-  private static List<String> getTabletFiles(String user, String pass, ZooKeeperInstance zki, String tableId, KeyExtent ke) {
+  private static List<String> getTabletFiles(String user, byte[] pass, Instance zki, String tableId, KeyExtent ke) {
     List<String> files = new ArrayList<String>();
     
     SortedMap<Key,Value> tkv = new TreeMap<Key,Value>();
-    MetadataTable.getTabletAndPrevTabletKeyValues(zki, tkv, ke, null, new AuthInfo(user, ByteBuffer.wrap(pass.getBytes()), zki.getInstanceID()));
+    MetadataTable.getTabletAndPrevTabletKeyValues(zki, tkv, ke, null, new AuthInfo(user, ByteBuffer.wrap(pass), zki.getInstanceID()));
     
     Set<Entry<Key,Value>> es = tkv.entrySet();
     for (Entry<Key,Value> entry : es) {
@@ -477,7 +472,7 @@ public class CollectTabletStats {
     return columnSet;
   }
   
-  private static int readFilesUsingIterStack(FileSystem fs, ServerConfiguration aconf, List<String> files, String auths[], KeyExtent ke, String[] columns,
+  private static int readFilesUsingIterStack(FileSystem fs, ServerConfiguration aconf, List<String> files, Authorizations auths, KeyExtent ke, String[] columns,
       boolean useTableIterators)
       throws Exception {
     
@@ -492,7 +487,7 @@ public class CollectTabletStats {
     List<IterInfo> emptyIterinfo = Collections.emptyList();
     Map<String,Map<String,String>> emptySsio = Collections.emptyMap();
     TableConfiguration tconf = aconf.getTableConfiguration(ke.getTableId().toString());
-    reader = createScanIterator(ke, readers, new Authorizations(auths), new byte[] {}, new HashSet<Column>(), emptyIterinfo, emptySsio, useTableIterators, tconf);
+    reader = createScanIterator(ke, readers,auths, new byte[] {}, new HashSet<Column>(), emptyIterinfo, emptySsio, useTableIterators, tconf);
     
     HashSet<ByteSequence> columnSet = createColumnBSS(columns);
     
@@ -509,9 +504,9 @@ public class CollectTabletStats {
     
   }
   
-  private static int scanTablet(Connector conn, String table, String[] auths, int batchSize, Text prevEndRow, Text endRow, String[] columns) throws Exception {
+  private static int scanTablet(Connector conn, String table, Authorizations auths, int batchSize, Text prevEndRow, Text endRow, String[] columns) throws Exception {
     
-    Scanner scanner = conn.createScanner(table, new Authorizations(auths));
+    Scanner scanner = conn.createScanner(table, auths);
     scanner.setBatchSize(batchSize);
     scanner.setRange(new Range(prevEndRow, false, endRow, true));
     
@@ -529,11 +524,11 @@ public class CollectTabletStats {
     return count;
   }
   
-  private static void calcTabletStats(Connector conn, String table, String[] auths, int batchSize, KeyExtent ke, String[] columns) throws Exception {
+  private static void calcTabletStats(Connector conn, String table, Authorizations auths, int batchSize, KeyExtent ke, String[] columns) throws Exception {
     
     // long t1 = System.currentTimeMillis();
     
-    Scanner scanner = conn.createScanner(table, new Authorizations(auths));
+    Scanner scanner = conn.createScanner(table, auths);
     scanner.setBatchSize(batchSize);
     scanner.setRange(new Range(ke.getPrevEndRow(), false, ke.getEndRow(), true));
     

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/thrift/NullTserver.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/thrift/NullTserver.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/thrift/NullTserver.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/performance/thrift/NullTserver.java Mon Jan 14 22:03:24 2013
@@ -25,9 +25,12 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.accumulo.cloudtrace.thrift.TInfo;
+import org.apache.accumulo.core.cli.Help;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.impl.Tables;
+import org.apache.accumulo.core.conf.DefaultConfiguration;
+import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.data.KeyExtent;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.thrift.InitialMultiScan;
@@ -44,6 +47,7 @@ import org.apache.accumulo.core.data.thr
 import org.apache.accumulo.core.data.thrift.UpdateErrors;
 import org.apache.accumulo.core.master.thrift.TabletServerStatus;
 import org.apache.accumulo.core.security.thrift.AuthInfo;
+import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
 import org.apache.accumulo.core.security.thrift.ThriftSecurityException;
 import org.apache.accumulo.core.tabletserver.thrift.ActiveScan;
 import org.apache.accumulo.core.tabletserver.thrift.TabletClientService;
@@ -64,6 +68,8 @@ import org.apache.accumulo.server.zookee
 import org.apache.hadoop.io.Text;
 import org.apache.thrift.TException;
 
+import com.beust.jcommander.Parameter;
+
 
 /**
  * The purpose of this class is to server as fake tserver that is a data sink like /dev/null. NullTserver modifies the !METADATA location entries for a table to
@@ -91,7 +97,7 @@ public class NullTserver {
     
     @Override
     public UpdateErrors closeUpdate(TInfo tinfo, long updateID) {
-      return new UpdateErrors(new HashMap<TKeyExtent,Long>(), new ArrayList<TConstraintViolationSummary>(), new ArrayList<TKeyExtent>());
+      return new UpdateErrors(new HashMap<TKeyExtent,Long>(), new ArrayList<TConstraintViolationSummary>(), new HashMap<TKeyExtent, SecurityErrorCode>());
     }
     
     @Override
@@ -198,28 +204,36 @@ public class NullTserver {
     }
   }
   
+  static class Opts extends Help {
+    @Parameter(names={"-i", "--instance"}, description="instance name", required=true)
+    String iname = null;
+    @Parameter(names={"-z", "--keepers"}, description="comma-separated list of zookeeper host:ports", required=true)
+    String keepers = null;
+    @Parameter(names="--table", description="table to adopt", required=true)
+    String tableName = null;
+    @Parameter(names="--port", description="port number to use")
+    int port = DefaultConfiguration.getInstance().getPort(Property.TSERV_CLIENTPORT);
+  }
+  
   public static void main(String[] args) throws Exception {
-    
-    String iname = args[0];
-    String keepers = args[1];
-    String tableName = args[2];
-    int port = Integer.parseInt(args[3]);
+    Opts opts = new Opts();
+    opts.parseArgs(NullTserver.class.getName(), args);
     
     TransactionWatcher watcher = new TransactionWatcher();
     ThriftClientHandler tch = new ThriftClientHandler(HdfsZooInstance.getInstance(), watcher);
     Processor<Iface> processor = new Processor<Iface>(tch);
-    TServerUtils.startTServer(port, processor, "NullTServer", "null tserver", 2, 1000);
+    TServerUtils.startTServer(opts.port, processor, "NullTServer", "null tserver", 2, 1000, 10*1024*1024);
     
-    InetSocketAddress addr = new InetSocketAddress(InetAddress.getLocalHost(), port);
+    InetSocketAddress addr = new InetSocketAddress(InetAddress.getLocalHost(), opts.port);
     
     // modify !METADATA
-    ZooKeeperInstance zki = new ZooKeeperInstance(iname, keepers);
-    String tableId = Tables.getTableId(zki, tableName);
+    ZooKeeperInstance zki = new ZooKeeperInstance(opts.iname, opts.keepers);
+    String tableId = Tables.getTableId(zki, opts.tableName);
     
     // read the locations for the table
     Range tableRange = new KeyExtent(new Text(tableId), null, null).toMetadataRange();
     MetaDataTableScanner s = new MetaDataTableScanner(zki, SecurityConstants.getSystemCredentials(), tableRange);
-    long randomSessionID = port;
+    long randomSessionID = opts.port;
     TServerInstance instance = new TServerInstance(addr, randomSessionID);
     List<Assignment> assignments = new ArrayList<Assignment>();
     while (s.hasNext()) {

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/Framework.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/Framework.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/Framework.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/Framework.java Mon Jan 14 22:03:24 2013
@@ -20,9 +20,12 @@ import java.io.File;
 import java.io.FileInputStream;
 import java.util.HashMap;
 import java.util.Properties;
+
 import org.apache.log4j.Logger;
 import org.apache.log4j.xml.DOMConfigurator;
 
+import com.beust.jcommander.Parameter;
+
 public class Framework {
   
   private static final Logger log = Logger.getLogger(Framework.class);
@@ -92,28 +95,33 @@ public class Framework {
     return node;
   }
   
+  static class Opts extends org.apache.accumulo.core.cli.Help {
+    @Parameter(names="--configDir", required=true, description="directory containing the test configuration")
+    String configDir;
+    @Parameter(names="--logDir", required=true, description="location of the local logging directory")
+    String localLogPath;
+    @Parameter(names="--logId", required=true, description="a unique log identifier (like a hostname, or pid)")
+    String logId;
+    @Parameter(names="--module", required=true, description="the name of the module to run")
+    String module;
+  }
+  
   public static void main(String[] args) throws Exception {
-    
-    if (args.length != 4) {
-      throw new IllegalArgumentException("usage : Framework <configDir> <localLogPath> <logId> <module>");
-    }
-    String configDir = args[0];
-    String localLogPath = args[1];
-    String logId = args[2];
-    String module = args[3];
-    
+    Opts opts = new Opts();
+    opts.parseArgs(Framework.class.getName(), args);
+
     Properties props = new Properties();
-    FileInputStream fis = new FileInputStream(configDir + "/randomwalk.conf");
+    FileInputStream fis = new FileInputStream(opts.configDir + "/randomwalk.conf");
     props.load(fis);
     fis.close();
     
-    System.setProperty("localLog", localLogPath + "/" + logId);
-    System.setProperty("nfsLog", props.getProperty("NFS_LOGPATH") + "/" + logId);
+    System.setProperty("localLog", opts.localLogPath + "/" + opts.logId);
+    System.setProperty("nfsLog", props.getProperty("NFS_LOGPATH") + "/" + opts.logId);
     
-    DOMConfigurator.configure(configDir + "logger.xml");
+    DOMConfigurator.configure(opts.configDir + "logger.xml");
     
     State state = new State(props);
-    int retval = getInstance().run(module, state, configDir);
+    int retval = getInstance().run(opts.module, state, configDir);
     
     System.exit(retval);
   }

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/concurrent/CreateUser.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/concurrent/CreateUser.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/concurrent/CreateUser.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/concurrent/CreateUser.java Mon Jan 14 22:03:24 2013
@@ -20,7 +20,6 @@ import java.util.List;
 import java.util.Properties;
 import java.util.Random;
 
-import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.server.test.randomwalk.State;
@@ -40,7 +39,7 @@ public class CreateUser extends Test {
     
     try {
       log.debug("Creating user " + userName);
-      conn.securityOperations().createUser(userName, (userName + "pass").getBytes(), Constants.NO_AUTHS);
+      conn.securityOperations().createUser(userName, (userName + "pass").getBytes());
     } catch (AccumuloSecurityException ex) {
       log.debug("Create user failed " + ex.getCause());
     }

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/CreateUser.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/CreateUser.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/CreateUser.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/CreateUser.java Mon Jan 14 22:03:24 2013
@@ -21,7 +21,6 @@ import java.util.Properties;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.server.test.randomwalk.State;
 import org.apache.accumulo.server.test.randomwalk.Test;
 
@@ -37,7 +36,7 @@ public class CreateUser extends Test {
     boolean hasPermission = WalkingSecurity.get(state).canCreateUser(WalkingSecurity.get(state).getSysAuthInfo(), tableUserName);
     byte[] tabUserPass = "Super Sekret Table User Password".getBytes();
     try {
-      conn.securityOperations().createUser(tableUserName, tabUserPass, new Authorizations());
+      conn.securityOperations().createUser(tableUserName, tabUserPass);
     } catch (AccumuloSecurityException ae) {
       switch (ae.getErrorCode()) {
         case PERMISSION_DENIED:
@@ -47,7 +46,7 @@ public class CreateUser extends Test {
           // create user anyway for sake of state
           {
             if (!exists) {
-              state.getConnector().securityOperations().createUser(tableUserName, tabUserPass, new Authorizations());
+              state.getConnector().securityOperations().createUser(tableUserName, tabUserPass);
               WalkingSecurity.get(state).createUser(tableUserName, tabUserPass);
             }
             return;

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/SecurityFixture.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/SecurityFixture.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/SecurityFixture.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/randomwalk/security/SecurityFixture.java Mon Jan 14 22:03:24 2013
@@ -48,7 +48,7 @@ public class SecurityFixture extends Fix
       conn.securityOperations().dropUser(systemUserName);
     
     byte[] sysUserPass = "sysUser".getBytes();
-    conn.securityOperations().createUser(systemUserName, sysUserPass, new Authorizations());
+    conn.securityOperations().createUser(systemUserName, sysUserPass);
     
     WalkingSecurity.get(state).setTableName(secTableName);
     state.set("rootUserPass", state.getAuthInfo().password.array());

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/scalability/Run.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/scalability/Run.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/scalability/Run.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/test/scalability/Run.java Mon Jan 14 22:03:24 2013
@@ -20,33 +20,39 @@ import java.io.FileInputStream;
 import java.util.Properties;
 import java.net.InetAddress;
 
+import org.apache.accumulo.core.cli.Help;
 import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.accumulo.server.test.scalability.ScaleTest;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+import com.beust.jcommander.Parameter;
+
 public class Run {
   
+  static class Opts extends Help {
+    @Parameter(names="--testId", required=true)
+    String testId;
+    @Parameter(names="--action", required=true, description="one of 'setup', 'teardown' or 'client'")
+    String action;
+    @Parameter(names="--count", description="number of tablet servers", required=true)
+    int numTabletServers; 
+  }
+  
   public static void main(String[] args) throws Exception {
     
     final String sitePath = "/tmp/scale-site.conf";
     final String testPath = "/tmp/scale-test.conf";
-    
-    // parse command line
-    if (args.length != 3) {
-      throw new IllegalArgumentException("usage : Run <testId> <action> <numTabletServers>");
-    }
-    String testId = args[0];
-    String action = args[1];
-    int numTabletServers = Integer.parseInt(args[2]);
+    Opts opts = new Opts();
+    opts.parseArgs(Run.class.getName(), args);
     
     Configuration conf = CachedConfiguration.getInstance();
     FileSystem fs;
     fs = FileSystem.get(conf);
     
     fs.copyToLocalFile(new Path("/accumulo-scale/conf/site.conf"), new Path(sitePath));
-    fs.copyToLocalFile(new Path(String.format("/accumulo-scale/conf/%s.conf", testId)), new Path(testPath));
+    fs.copyToLocalFile(new Path(String.format("/accumulo-scale/conf/%s.conf", opts.testId)), new Path(testPath));
     
     // load configuration file properties
     Properties scaleProps = new Properties();
@@ -62,19 +68,19 @@ public class Run {
       e.printStackTrace();
     }
     
-    ScaleTest test = (ScaleTest) Class.forName(String.format("accumulo.server.test.scalability.%s", testId)).newInstance();
+    ScaleTest test = (ScaleTest) Class.forName(String.format("accumulo.server.test.scalability.%s", opts.testId)).newInstance();
     
-    test.init(scaleProps, testProps, numTabletServers);
+    test.init(scaleProps, testProps, opts.numTabletServers);
     
-    if (action.equalsIgnoreCase("setup")) {
+    if (opts.action.equalsIgnoreCase("setup")) {
       test.setup();
-    } else if (action.equalsIgnoreCase("client")) {
+    } else if (opts.action.equalsIgnoreCase("client")) {
       InetAddress addr = InetAddress.getLocalHost();
       String host = addr.getHostName();
       fs.createNewFile(new Path("/accumulo-scale/clients/" + host));
       test.client();
       fs.copyFromLocalFile(new Path("/tmp/scale.out"), new Path("/accumulo-scale/results/" + host));
-    } else if (action.equalsIgnoreCase("teardown")) {
+    } else if (opts.action.equalsIgnoreCase("teardown")) {
       test.teardown();
     }
   }

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceFileSystem.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceFileSystem.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceFileSystem.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceFileSystem.java Mon Jan 14 22:03:24 2013
@@ -35,11 +35,10 @@ import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
 
-
 // If FileSystem was an interface, we could use a Proxy, but it's not, so we have to override everything manually
 
 public class TraceFileSystem extends FileSystem {
-
+  
   @Override
   public void setConf(Configuration conf) {
     Span span = Trace.start("setConf");
@@ -63,6 +62,7 @@ public class TraceFileSystem extends Fil
     }
   }
   
+  @Override
   public BlockLocation[] getFileBlockLocations(FileStatus file, long start, long len) throws IOException {
     Span span = Trace.start("getFileBlockLocations");
     try {
@@ -229,6 +229,7 @@ public class TraceFileSystem extends Fil
     }
   }
   
+  @Deprecated
   @Override
   public short getReplication(Path src) throws IOException {
     Span span = Trace.start("getReplication");
@@ -671,7 +672,7 @@ public class TraceFileSystem extends Fil
   public FileSystem getImplementation() {
     return impl;
   }
-
+  
   @Override
   public URI getUri() {
     Span span = Trace.start("getUri");

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceServer.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceServer.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceServer.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/trace/TraceServer.java Mon Jan 14 22:03:24 2013
@@ -19,7 +19,6 @@ package org.apache.accumulo.server.trace
 import java.net.InetSocketAddress;
 import java.net.ServerSocket;
 import java.nio.channels.ServerSocketChannel;
-import java.nio.charset.Charset;
 import java.util.TimerTask;
 import java.util.concurrent.TimeUnit;
 
@@ -77,8 +76,6 @@ public class TraceServer implements Watc
   private Connector connector;
   final String table;
 
-  private static final Charset utf8 = Charset.forName("UTF8");
-
   private static void put(Mutation m, String cf, String cq, byte[] bytes, int len) {
     m.put(new Text(cf), new Text(cq), new Value(bytes, 0, len));
   }
@@ -124,7 +121,7 @@ public class TraceServer implements Watc
       Mutation spanMutation = new Mutation(new Text(idString));
       Mutation indexMutation = new Mutation(new Text("idx:" + s.svc + ":" + startString));
       long diff = s.stop - s.start;
-      indexMutation.put(new Text(s.description), new Text(s.sender), new Value((idString + ":" + Long.toHexString(diff)).getBytes(utf8)));
+      indexMutation.put(new Text(s.description), new Text(s.sender), new Value((idString + ":" + Long.toHexString(diff)).getBytes()));
       ByteArrayTransport transport = new ByteArrayTransport();
       TCompactProtocol protocol = new TCompactProtocol(transport);
       s.write(protocol);
@@ -160,7 +157,7 @@ public class TraceServer implements Watc
     table = conf.get(Property.TRACE_TABLE);
     while (true) {
       try {
-        connector = serverConfiguration.getInstance().getConnector(conf.get(Property.TRACE_USER), conf.get(Property.TRACE_PASSWORD).getBytes(utf8));
+        connector = serverConfiguration.getInstance().getConnector(conf.get(Property.TRACE_USER), conf.get(Property.TRACE_PASSWORD).getBytes());
         if (!connector.tableOperations().exists(table)) {
           connector.tableOperations().create(table);
         }
@@ -224,7 +221,7 @@ public class TraceServer implements Watc
   private void registerInZooKeeper(String name) throws Exception {
     String root = ZooUtil.getRoot(serverConfiguration.getInstance()) + Constants.ZTRACERS;
     IZooReaderWriter zoo = ZooReaderWriter.getInstance();
-    String path = zoo.putEphemeralSequential(root + "/trace-", name.getBytes(utf8));
+    String path = zoo.putEphemeralSequential(root + "/trace-", name.getBytes());
     zoo.exists(path, this);
   }
   
@@ -245,12 +242,20 @@ public class TraceServer implements Watc
   public void process(WatchedEvent event) {
     log.debug("event " + event.getPath() + " " + event.getType() + " " + event.getState());
     if (event.getState() == KeeperState.Expired) {
-      log.warn("Logger lost zookeeper registration at " + event.getPath());
+      log.warn("Trace server lost zookeeper registration at " + event.getPath());
       server.stop();
     } else if (event.getType() == EventType.NodeDeleted) {
-      log.warn("Logger zookeeper entry lost " + event.getPath());
+      log.warn("Trace server zookeeper entry lost " + event.getPath());
       server.stop();
     }
+    try {
+    if (ZooReaderWriter.getInstance().exists(event.getPath(), this))
+      return;
+    } catch (Exception ex) {
+      log.error(ex, ex);
+    }
+    log.warn("Trace server unable to reset watch on zookeeper registration");
+    server.stop();
   }
   
 }

Modified: accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/util/AddFilesWithMissingEntries.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/util/AddFilesWithMissingEntries.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/util/AddFilesWithMissingEntries.java (original)
+++ accumulo/branches/ACCUMULO-259/server/src/main/java/org/apache/accumulo/server/util/AddFilesWithMissingEntries.java Mon Jan 14 22:03:24 2013
@@ -16,15 +16,13 @@
  */
 package org.apache.accumulo.server.util;
 
-import java.nio.charset.Charset;
-import java.util.Arrays;
 import java.util.HashSet;
 import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.core.client.BatchWriterConfig;
-import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.server.cli.ClientOpts;
+import org.apache.accumulo.core.cli.BatchWriterOpts;
 import org.apache.accumulo.core.client.MultiTableBatchWriter;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.data.Key;
@@ -33,10 +31,7 @@ import org.apache.accumulo.core.data.Mut
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.security.thrift.AuthInfo;
 import org.apache.accumulo.server.ServerConstants;
-import org.apache.accumulo.server.client.HdfsZooInstance;
-import org.apache.accumulo.server.security.SecurityConstants;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -44,27 +39,33 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Logger;
 
+import com.beust.jcommander.Parameter;
+
 public class AddFilesWithMissingEntries {
   
   static final Logger log = Logger.getLogger(AddFilesWithMissingEntries.class);
-  static boolean update = false;
-
-  private static final Charset utf8 = Charset.forName("UTF8");
+  
+  public static class Opts extends ClientOpts {
+    @Parameter(names="-update", description="Make changes to the !METADATA table to include missing files")
+    boolean update = false;
+  }
+  
   
   /**
+   * A utility to add files to the !METADATA table that are not listed in the root tablet.  
+   * This is a recovery tool for someone who knows what they are doing.  It might be better to 
+   * save off files, and recover your instance by re-initializing and importing the existing files.
+   *  
    * @param args
    */
   public static void main(String[] args) throws Exception {
-    if (args.length > 1 || new HashSet<String>(Arrays.asList(args)).contains("-?")) {
-      System.err.println("Usage: bin/accumulo " + AddFilesWithMissingEntries.class.getName() + " [update]");
-      System.exit(1);
-    }
-    update = args.length > 0;
-    final AuthInfo creds = SecurityConstants.getSystemCredentials();
-    final Connector connector = HdfsZooInstance.getInstance().getConnector(creds.getUser(), creds.getPassword());
+    Opts opts = new Opts();
+    BatchWriterOpts bwOpts = new BatchWriterOpts();
+    opts.parseArgs(AddFilesWithMissingEntries.class.getName(), args, bwOpts);
+    
     final Key rootTableEnd = new Key(Constants.ROOT_TABLET_EXTENT.getEndRow());
     final Range range = new Range(rootTableEnd.followingKey(PartialKey.ROW), true, Constants.METADATA_RESERVED_KEYSPACE_START_KEY, false);
-    final Scanner scanner = connector.createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS);
+    final Scanner scanner = opts.getConnector().createScanner(Constants.METADATA_TABLE_NAME, Constants.NO_AUTHS);
     scanner.setRange(range);
     final Configuration conf = new Configuration();
     final FileSystem fs = FileSystem.get(conf);
@@ -74,7 +75,7 @@ public class AddFilesWithMissingEntries 
     Set<String> knownFiles = new HashSet<String>();
     
     int count = 0;
-    final MultiTableBatchWriter writer = connector.createMultiTableBatchWriter(new BatchWriterConfig());
+    final MultiTableBatchWriter writer = opts.getConnector().createMultiTableBatchWriter(bwOpts.getBatchWriterConfig());
     
     // collect the list of known files and the directory for each extent
     for (Entry<Key,Value> entry : scanner) {
@@ -84,7 +85,7 @@ public class AddFilesWithMissingEntries 
       if (!ke.equals(last)) {
         if (directory != null) {
           // add any files in the directory unknown to the key extent
-          count += addUnknownFiles(fs, directory, knownFiles, last, writer);
+          count += addUnknownFiles(fs, directory, knownFiles, last, writer, opts.update);
         }
         directory = null;
         knownFiles.clear();
@@ -101,19 +102,21 @@ public class AddFilesWithMissingEntries 
     }
     if (directory != null) {
       // catch the last key extent
-      count += addUnknownFiles(fs, directory, knownFiles, last, writer);
+      count += addUnknownFiles(fs, directory, knownFiles, last, writer, opts.update);
     }
     log.info("There were " + count + " files that are unknown to the metadata table");
     writer.close();
   }
   
-  private static int addUnknownFiles(FileSystem fs, String directory, Set<String> knownFiles, KeyExtent ke, MultiTableBatchWriter writer) throws Exception {
+  private static int addUnknownFiles(FileSystem fs, String directory, Set<String> knownFiles, KeyExtent ke, MultiTableBatchWriter writer, boolean update) throws Exception {
     int count = 0;
     final String tableId = ke.getTableId().toString();
     final Text row = ke.getMetadataEntry();
     log.info(row.toString());
     final Path path = new Path(ServerConstants.getTablesDir() + "/" + tableId + directory);
     for (FileStatus file : fs.listStatus(path)) {
+      if (file.getPath().getName().endsWith("_tmp") || file.getPath().getName().endsWith("_tmp.rf"))
+        continue;
       final String filename = directory + "/" + file.getPath().getName();
       if (!knownFiles.contains(filename)) {
         count++;
@@ -121,7 +124,7 @@ public class AddFilesWithMissingEntries 
         String size = Long.toString(file.getLen());
         String entries = "1"; // lie
         String value = size + "," + entries;
-        m.put(Constants.METADATA_DATAFILE_COLUMN_FAMILY, new Text(filename), new Value(value.getBytes(utf8)));
+        m.put(Constants.METADATA_DATAFILE_COLUMN_FAMILY, new Text(filename), new Value(value.getBytes()));
         if (update) {
           writer.getBatchWriter(Constants.METADATA_TABLE_NAME).addMutation(m);
         }



Mime
View raw message