hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bus...@apache.org
Subject hbase git commit: HBASE-13728 Remove direct use of Hadoop's GenericOptionParser.
Date Fri, 29 May 2015 18:25:04 GMT
Repository: hbase
Updated Branches:
  refs/heads/master 18fef4690 -> a016b23e8


HBASE-13728 Remove direct use of Hadoop's GenericOptionParser.

* In most cases, just shift to proper use of ToolRunner
* Fix timing issue in TestImportExport
** add some diagnostic logs to Import and TestImportExport
** when testing for WAL use under different durability requests, ignore meta edits.
* In the case of TestImportTsv make a local anonymous class so we can get at internals.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a016b23e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a016b23e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a016b23e

Branch: refs/heads/master
Commit: a016b23e850208dce9e7a85f9b6cf748411d80f4
Parents: 18fef46
Author: Sean Busbey <busbey@apache.org>
Authored: Wed May 20 15:00:28 2015 -0500
Committer: Sean Busbey <busbey@apache.org>
Committed: Fri May 29 12:38:52 2015 -0500

----------------------------------------------------------------------
 .../hadoop/hbase/mapreduce/IndexBuilder.java    | 24 +++---
 .../hadoop/hbase/mapreduce/SampleUploader.java  | 23 +++---
 .../mapreduce/IntegrationTestImportTsv.java     | 13 ++--
 .../test/IntegrationTestBigLinkedList.java      | 13 ++--
 .../hadoop/hbase/mapreduce/CellCounter.java     |  6 +-
 .../hadoop/hbase/mapreduce/CopyTable.java       |  9 +--
 .../apache/hadoop/hbase/mapreduce/Export.java   |  8 +-
 .../apache/hadoop/hbase/mapreduce/Import.java   | 15 ++--
 .../hadoop/hbase/mapreduce/ImportTsv.java       | 12 +--
 .../hadoop/hbase/mapreduce/RowCounter.java      | 35 +++++++--
 .../hadoop/hbase/mapreduce/WALPlayer.java       | 19 ++---
 .../org/apache/hadoop/hbase/util/Merge.java     | 33 ++------
 .../hadoop/hbase/mapreduce/TestCellCounter.java | 16 ++--
 .../hadoop/hbase/mapreduce/TestCopyTable.java   | 80 ++++++++++----------
 .../hbase/mapreduce/TestImportExport.java       | 39 +++++-----
 .../hadoop/hbase/mapreduce/TestImportTsv.java   | 19 +++--
 .../hadoop/hbase/mapreduce/TestRowCounter.java  | 17 +----
 .../hadoop/hbase/mapreduce/TestWALPlayer.java   |  6 +-
 .../apache/hadoop/hbase/util/TestMergeTool.java |  2 +-
 .../hadoop/hbase/thrift2/ThriftServer.java      | 26 ++++---
 20 files changed, 206 insertions(+), 209 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
index 6df345b..758ecb8 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.TreeMap;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -30,7 +31,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * Example map/reduce job to construct index tables that can be used to quickly
@@ -62,7 +64,7 @@ import org.apache.hadoop.util.GenericOptionsParser;
  * This code was written against HBase 0.21 trunk.
  * </p>
  */
-public class IndexBuilder {
+public class IndexBuilder extends Configured implements Tool {
   /** the column family containing the indexed row key */
   public static final byte[] INDEX_COLUMN = Bytes.toBytes("INDEX");
   /** the qualifier containing the indexed row key */
@@ -135,15 +137,19 @@ public class IndexBuilder {
     return job;
   }
 
-  public static void main(String[] args) throws Exception {
-    Configuration conf = HBaseConfiguration.create();
-    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
-    if(otherArgs.length < 3) {
-      System.err.println("Only " + otherArgs.length + " arguments supplied, required: 3");
+  public int run(String[] args) throws Exception {
+    Configuration conf = HBaseConfiguration.create(getConf());
+    if(args.length < 3) {
+      System.err.println("Only " + args.length + " arguments supplied, required: 3");
       System.err.println("Usage: IndexBuilder <TABLE_NAME> <COLUMN_FAMILY> <ATTR> [<ATTR> ...]");
       System.exit(-1);
     }
-    Job job = configureJob(conf, otherArgs);
-    System.exit(job.waitForCompletion(true) ? 0 : 1);
+    Job job = configureJob(conf, args);
+    return (job.waitForCompletion(true) ? 0 : 1);
+  }
+
+  public static void main(String[] args) throws Exception {
+    int result = ToolRunner.run(new Configuration(), new IndexBuilder(), args);
+    System.exit(result);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
----------------------------------------------------------------------
diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
index d39676c..011147a 100644
--- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
+++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mapreduce;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.Put;
@@ -33,7 +34,8 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * Sample Uploader MapReduce
@@ -57,7 +59,7 @@ import org.apache.hadoop.util.GenericOptionsParser;
  * <p>
  * This code was written against HBase 0.21 trunk.
  */
-public class SampleUploader {
+public class SampleUploader extends Configured implements Tool {
 
   private static final String NAME = "SampleUploader";
 
@@ -130,18 +132,21 @@ public class SampleUploader {
   /**
    * Main entry point.
    *
-   * @param args  The command line parameters.
+   * @param otherArgs  The command line parameters after ToolRunner handles standard.
    * @throws Exception When running the job fails.
    */
-  public static void main(String[] args) throws Exception {
-    Configuration conf = HBaseConfiguration.create();
-    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
+  public int run(String[] otherArgs) throws Exception {
     if(otherArgs.length != 2) {
       System.err.println("Wrong number of arguments: " + otherArgs.length);
       System.err.println("Usage: " + NAME + " <input> <tablename>");
-      System.exit(-1);
+      return -1;
     }
-    Job job = configureJob(conf, otherArgs);
-    System.exit(job.waitForCompletion(true) ? 0 : 1);
+    Job job = configureJob(getConf(), otherArgs);
+    return (job.waitForCompletion(true) ? 0 : 1);
+  }
+
+  public static void main(String[] args) throws Exception {
+    int status = ToolRunner.run(HBaseConfiguration.create(), new SampleUploader(), args);
+    System.exit(status);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
index 657217f..bd03afe 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
@@ -31,7 +31,7 @@ import java.util.UUID;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
@@ -61,7 +60,7 @@ import org.junit.experimental.categories.Category;
  * Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster.
  */
 @Category(IntegrationTests.class)
-public class IntegrationTestImportTsv implements Configurable, Tool {
+public class IntegrationTestImportTsv extends Configured implements Tool {
 
   private static final String NAME = IntegrationTestImportTsv.class.getSimpleName();
   private static final Log LOG = LogFactory.getLog(IntegrationTestImportTsv.class);
@@ -103,7 +102,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
   }
 
   public void setConf(Configuration conf) {
-    throw new IllegalArgumentException("setConf not supported");
+    LOG.debug("Ignoring setConf call.");
   }
 
   @BeforeClass
@@ -217,7 +216,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
       System.err.println(format("%s [genericOptions]", NAME));
       System.err.println("  Runs ImportTsv integration tests against a distributed cluster.");
       System.err.println();
-      GenericOptionsParser.printGenericCommandUsage(System.err);
+      ToolRunner.printGenericCommandUsage(System.err);
       return 1;
     }
 
@@ -234,9 +233,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool {
     Configuration conf = HBaseConfiguration.create();
     IntegrationTestingUtility.setUseDistributedCluster(conf);
     util = new IntegrationTestingUtility(conf);
-    // not using ToolRunner to avoid unnecessary call to setConf()
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-    int status = new IntegrationTestImportTsv().run(args);
+    int status = ToolRunner.run(conf, new IntegrationTestImportTsv(), args);
     System.exit(status);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
----------------------------------------------------------------------
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index 1fe5306..d44139d 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -659,9 +659,6 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
      * WALPlayer override that searches for keys loaded in the setup.
      */
     public static class WALSearcher extends WALPlayer {
-      public WALSearcher(Configuration conf) {
-        super(conf);
-      }
 
       /**
        * The actual searcher mapper.
@@ -723,9 +720,13 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
       Path oldWalsDir = new Path(hbaseDir, HConstants.HREGION_OLDLOGDIR_NAME);
       LOG.info("Running Search with keys inputDir=" + inputDir +", numMappers=" + numMappers +
         " against " + getConf().get(HConstants.HBASE_DIR));
-      int ret = ToolRunner.run(new WALSearcher(getConf()), new String [] {walsDir.toString(), ""});
-      if (ret != 0) return ret;
-      return ToolRunner.run(new WALSearcher(getConf()), new String [] {oldWalsDir.toString(), ""});
+      int ret = ToolRunner.run(getConf(), new WALSearcher(),
+          new String [] {walsDir.toString(), ""});
+      if (ret != 0) {
+        return ret;
+      }
+      return ToolRunner.run(getConf(), new WALSearcher(),
+          new String [] {oldWalsDir.toString(), ""});
     }
 
     static SortedSet<byte []> readKeysToSearch(final Configuration conf)

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index 218c670..0675b73 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -271,8 +270,7 @@ public class CellCounter extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    if (otherArgs.length < 2) {
+    if (args.length < 2) {
       System.err.println("ERROR: Wrong number of parameters: " + args.length);
       System.err.println("Usage: CellCounter ");
       System.err.println("       <tablename> <outputDir> <reportSeparator> [^[regex pattern] or " +
@@ -287,7 +285,7 @@ public class CellCounter extends Configured implements Tool {
           "operation to a limited subset of rows from the table based on regex or prefix pattern.");
       return -1;
     }
-    Job job = createSubmittableJob(getConf(), otherArgs);
+    Job job = createSubmittableJob(getConf(), args);
     return (job.waitForCompletion(true) ? 0 : 1);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
index 7584bc2..c46cb57 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -72,9 +71,6 @@ public class CopyTable extends Configured implements Tool {
 
   private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
 
-  public CopyTable(Configuration conf) {
-    super(conf);
-  }
   /**
    * Sets up the actual job.
    *
@@ -338,14 +334,13 @@ public class CopyTable extends Configured implements Tool {
    * @throws Exception When running the job fails.
    */
   public static void main(String[] args) throws Exception {
-    int ret = ToolRunner.run(new CopyTable(HBaseConfiguration.create()), args);
+    int ret = ToolRunner.run(HBaseConfiguration.create(), new CopyTable(), args);
     System.exit(ret);
   }
 
   @Override
   public int run(String[] args) throws Exception {
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    Job job = createSubmittableJob(otherArgs);
+    Job job = createSubmittableJob(args);
     if (job == null) return 1;
     if (!job.waitForCompletion(true)) {
       LOG.info("Map-reduce job failed!");

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
index 14786ab..66c0057 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -181,12 +180,11 @@ public class Export extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    if (otherArgs.length < 2) {
-      usage("Wrong number of arguments: " + otherArgs.length);
+    if (args.length < 2) {
+      usage("Wrong number of arguments: " + args.length);
       return -1;
     }
-    Job job = createSubmittableJob(getConf(), otherArgs);
+    Job job = createSubmittableJob(getConf(), args);
     return (job.waitForCompletion(true) ? 0 : 1);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
index e2f4ce0..92efd27 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
@@ -51,7 +51,6 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.zookeeper.KeeperException;
@@ -229,12 +228,16 @@ public class Import extends Configured implements Tool {
 
     @Override
     public void setup(Context context) {
+      LOG.info("Setting up " + getClass() + " mapper.");
       Configuration conf = context.getConfiguration();
       cfRenameMap = createCfRenameMap(conf);
       filter = instantiateFilter(conf);
       String durabilityStr = conf.get(WAL_DURABILITY);
       if(durabilityStr != null){
         durability = Durability.valueOf(durabilityStr.toUpperCase());
+        LOG.info("setting WAL durability to " + durability);
+      } else {
+        LOG.info("setting WAL durability to default.");
       }
       // TODO: This is kind of ugly doing setup of ZKW just to read the clusterid.
       ZooKeeperWatcher zkw = null;
@@ -452,6 +455,7 @@ public class Import extends Configured implements Tool {
     }
 
     if (hfileOutPath != null) {
+      LOG.info("writing to hfiles for bulk load.");
       job.setMapperClass(KeyValueImporter.class);
       try (Connection conn = ConnectionFactory.createConnection(conf); 
           Table table = conn.getTable(tableName);
@@ -466,6 +470,7 @@ public class Import extends Configured implements Tool {
             com.google.common.base.Preconditions.class);
       }
     } else {
+      LOG.info("writing directly to table from Mapper.");
       // No reducers.  Just write straight to table.  Call initTableReducerJob
       // because it sets up the TableOutputFormat.
       job.setMapperClass(Importer.class);
@@ -522,6 +527,7 @@ public class Import extends Configured implements Tool {
     // Need to flush if the data is written to hbase and skip wal is enabled.
     if (conf.get(BULK_OUTPUT_CONF_KEY) == null && durability != null
         && Durability.SKIP_WAL.name().equalsIgnoreCase(durability)) {
+      LOG.info("Flushing all data that skipped the WAL.");
       try {
         connection = ConnectionFactory.createConnection(conf);
         hAdmin = connection.getAdmin();
@@ -539,16 +545,15 @@ public class Import extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    if (otherArgs.length < 2) {
-      usage("Wrong number of arguments: " + otherArgs.length);
+    if (args.length < 2) {
+      usage("Wrong number of arguments: " + args.length);
       return -1;
     }
     String inputVersionString = System.getProperty(ResultSerialization.IMPORT_FORMAT_VER);
     if (inputVersionString != null) {
       getConf().set(ResultSerialization.IMPORT_FORMAT_VER, inputVersionString);
     }
-    Job job = createSubmittableJob(getConf(), otherArgs);
+    Job job = createSubmittableJob(getConf(), args);
     boolean isJobSuccessful = job.waitForCompletion(true);
     if(isJobSuccessful){
       // Flush all the regions of the table

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
index d4394eb..5c11bb9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
@@ -30,7 +30,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -54,7 +53,6 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -649,10 +647,8 @@ public class ImportTsv extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
-    setConf(HBaseConfiguration.create(getConf()));
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    if (otherArgs.length < 2) {
-      usage("Wrong number of arguments: " + otherArgs.length);
+    if (args.length < 2) {
+      usage("Wrong number of arguments: " + args.length);
       return -1;
     }
 
@@ -717,12 +713,12 @@ public class ImportTsv extends Configured implements Tool {
     // system time
     getConf().setLong(TIMESTAMP_CONF_KEY, timstamp);
 
-    Job job = createSubmittableJob(getConf(), otherArgs);
+    Job job = createSubmittableJob(getConf(), args);
     return job.waitForCompletion(true) ? 0 : 1;
   }
 
   public static void main(String[] args) throws Exception {
-    int status = ToolRunner.run(new ImportTsv(), args);
+    int status = ToolRunner.run(new Configuration(), new ImportTsv(), args);
     System.exit(status);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
index 5a506e1..7df2f47 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
@@ -35,9 +37,9 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.filter.FirstKeyValueMatchingQualifiersFilter;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -49,10 +51,13 @@ import org.apache.hadoop.util.ToolRunner;
 @InterfaceStability.Stable
 public class RowCounter extends Configured implements Tool {
 
+  private static final Log LOG = LogFactory.getLog(RowCounter.class);
+
   /** Name of this 'program'. */
   static final String NAME = "rowcounter";
 
   private final static String JOB_NAME_CONF_KEY = "mapreduce.job.name";
+  private final static String EXPECTED_COUNT_KEY = RowCounter.class.getName() + ".expected_count";
 
   /**
    * Mapper that runs the count.
@@ -103,6 +108,7 @@ public class RowCounter extends Configured implements Tool {
     final String rangeSwitch = "--range=";
     final String startTimeArgKey = "--starttime=";
     final String endTimeArgKey = "--endtime=";
+    final String expectedCountArg = "--expected-count=";
 
     // First argument is table name, starting from second
     for (int i = 1; i < args.length; i++) {
@@ -128,6 +134,11 @@ public class RowCounter extends Configured implements Tool {
         endTime = Long.parseLong(args[i].substring(endTimeArgKey.length()));
         continue;
       }
+      if (args[i].startsWith(expectedCountArg)) {
+        conf.setLong(EXPECTED_COUNT_KEY,
+            Long.parseLong(args[i].substring(expectedCountArg.length())));
+        continue;
+      }
       else {
         // if no switch, assume column names
         sb.append(args[i]);
@@ -183,8 +194,9 @@ public class RowCounter extends Configured implements Tool {
     printUsage();
   }
 
-  /*
-   * Prints usage without error message
+  /**
+   * Prints usage without error message.
+   * Note that we don't document --expected-count, because it's intended for test.
    */
   private static void printUsage() {
     System.err.println("Usage: RowCounter [options] <tablename> " +
@@ -197,16 +209,25 @@ public class RowCounter extends Configured implements Tool {
 
   @Override
   public int run(String[] args) throws Exception {
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    if (otherArgs.length < 1) {
+    if (args.length < 1) {
       printUsage("Wrong number of parameters: " + args.length);
       return -1;
     }
-    Job job = createSubmittableJob(getConf(), otherArgs);
+    Job job = createSubmittableJob(getConf(), args);
     if (job == null) {
       return -1;
     }
-    return (job.waitForCompletion(true) ? 0 : 1);
+    boolean success = job.waitForCompletion(true);
+    final long expectedCount = getConf().getLong(EXPECTED_COUNT_KEY, -1);
+    if (success && expectedCount != -1) {
+      final Counter counter = job.getCounters().findCounter(RowCounterMapper.Counters.ROWS);
+      success = expectedCount == counter.getValue();
+      if (!success) {
+        LOG.error("Failing job because count of '" + counter.getValue() +
+            "' does not match expected count of '" + expectedCount + "'");
+      }
+    }
+    return (success ? 0 : 1);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index fabd203..713ca40 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -45,7 +45,6 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -210,13 +209,6 @@ public class WALPlayer extends Configured implements Tool {
     }
   }
 
-  /**
-   * @param conf The {@link Configuration} to use.
-   */
-  public WALPlayer(Configuration conf) {
-    super(conf);
-  }
-
   void setupTime(Configuration conf, String option) throws IOException {
     String val = conf.get(option);
     if (null == val) return;
@@ -335,18 +327,17 @@ public class WALPlayer extends Configured implements Tool {
    * @throws Exception When running the job fails.
    */
   public static void main(String[] args) throws Exception {
-    int ret = ToolRunner.run(new WALPlayer(HBaseConfiguration.create()), args);
+    int ret = ToolRunner.run(HBaseConfiguration.create(), new WALPlayer(), args);
     System.exit(ret);
   }
 
   @Override
   public int run(String[] args) throws Exception {
-    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
-    if (otherArgs.length < 2) {
-      usage("Wrong number of arguments: " + otherArgs.length);
+    if (args.length < 2) {
+      usage("Wrong number of arguments: " + args.length);
       System.exit(-1);
     }
-    Job job = createSubmittableJob(otherArgs);
+    Job job = createSubmittableJob(args);
     return job.waitForCompletion(true) ? 0 : 1;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/main/java/org/apache/hadoop/hbase/util/Merge.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/Merge.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/Merge.java
index 0a9243d..f190fa1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/Merge.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/Merge.java
@@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -61,20 +60,7 @@ public class Merge extends Configured implements Tool {
   private TableName tableName;               // Name of table
   private volatile byte [] region1;        // Name of region 1
   private volatile byte [] region2;        // Name of region 2
-  private volatile HRegionInfo mergeInfo;
-
-  /** default constructor */
-  public Merge() {
-    super();
-  }
-
-  /**
-   * @param conf configuration
-   */
-  public Merge(Configuration conf) {
-    this.mergeInfo = null;
-    setConf(conf);
-  }
+  private volatile HRegionInfo mergeInfo = null;
 
   @Override
   public int run(String[] args) throws Exception {
@@ -226,26 +212,23 @@ public class Merge extends Configured implements Tool {
     meta.delete(delete);
   }
 
-  /*
-   * Parse given arguments including generic arguments and assign table name and regions names.
+  /**
+   * Parse given arguments and assign table name and regions names.
+   * (generic args are handled by ToolRunner.)
    *
    * @param args the arguments to parse
    *
    * @throws IOException
    */
   private int parseArgs(String[] args) throws IOException {
-    GenericOptionsParser parser =
-      new GenericOptionsParser(getConf(), args);
-
-    String[] remainingArgs = parser.getRemainingArgs();
-    if (remainingArgs.length != 3) {
+    if (args.length != 3) {
       usage();
       return -1;
     }
-    tableName = TableName.valueOf(remainingArgs[0]);
+    tableName = TableName.valueOf(args[0]);
 
-    region1 = Bytes.toBytesBinary(remainingArgs[1]);
-    region2 = Bytes.toBytesBinary(remainingArgs[2]);
+    region1 = Bytes.toBytesBinary(args[1]);
+    region2 = Bytes.toBytesBinary(args[2]);
     int status = 0;
     if (notInTable(tableName, region1) || notInTable(tableName, region2)) {
       status = -1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
index 22bc330..6b23e37 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -231,17 +230,12 @@ public class TestCellCounter {
   }
 
 
-  private boolean runCount(String[] args) throws IOException, InterruptedException,
-      ClassNotFoundException {
+  private boolean runCount(String[] args) throws Exception {
     // need to make a copy of the configuration because to make sure
     // different temp dirs are used.
-    GenericOptionsParser opts = new GenericOptionsParser(
-        new Configuration(UTIL.getConfiguration()), args);
-    Configuration configuration = opts.getConfiguration();
-    args = opts.getRemainingArgs();
-    Job job = CellCounter.createSubmittableJob(configuration, args);
-    job.waitForCompletion(false);
-    return job.isSuccessful();
+    int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new CellCounter(),
+        args);
+    return status == 0;
   }
 
   /**
@@ -327,4 +321,4 @@ public class TestCellCounter {
     assertEquals("CellCounter should exit with -1 as output directory is not specified.", -1,
       ToolRunner.run(HBaseConfiguration.create(), new CellCounter(), args));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index 4b11abb..c96d7c4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -79,40 +79,40 @@ public class TestCopyTable {
     final byte[] FAMILY = Bytes.toBytes("family");
     final byte[] COLUMN1 = Bytes.toBytes("c1");
 
-    Table t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY);
-    Table t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY);
+    try (Table t1 = TEST_UTIL.createTable(TABLENAME1, FAMILY);
+         Table t2 = TEST_UTIL.createTable(TABLENAME2, FAMILY);) {
+      // put rows into the first table
+      for (int i = 0; i < 10; i++) {
+        Put p = new Put(Bytes.toBytes("row" + i));
+        p.add(FAMILY, COLUMN1, COLUMN1);
+        t1.put(p);
+      }
 
-    // put rows into the first table
-    for (int i = 0; i < 10; i++) {
-      Put p = new Put(Bytes.toBytes("row" + i));
-      p.add(FAMILY, COLUMN1, COLUMN1);
-      t1.put(p);
-    }
+      CopyTable copy = new CopyTable();
 
-    CopyTable copy = new CopyTable(TEST_UTIL.getConfiguration());
+      int code;
+      if (bulkload) {
+        code = ToolRunner.run(new Configuration(TEST_UTIL.getConfiguration()),
+            copy, new String[] { "--new.name=" + TABLENAME2.getNameAsString(),
+            "--bulkload", TABLENAME1.getNameAsString() });
+      } else {
+        code = ToolRunner.run(new Configuration(TEST_UTIL.getConfiguration()),
+            copy, new String[] { "--new.name=" + TABLENAME2.getNameAsString(),
+            TABLENAME1.getNameAsString() });
+      }
+      assertEquals("copy job failed", 0, code);
 
-    int code;
-    if (bulkload) {
-      code = copy.run(new String[] { "--new.name=" + TABLENAME2.getNameAsString(),
-          "--bulkload", TABLENAME1.getNameAsString() });
-    } else {
-      code = copy.run(new String[] { "--new.name=" + TABLENAME2.getNameAsString(),
-          TABLENAME1.getNameAsString() });
-    }
-    assertEquals("copy job failed", 0, code);
-
-    // verify the data was copied into table 2
-    for (int i = 0; i < 10; i++) {
-      Get g = new Get(Bytes.toBytes("row" + i));
-      Result r = t2.get(g);
-      assertEquals(1, r.size());
-      assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], COLUMN1));
+      // verify the data was copied into table 2
+      for (int i = 0; i < 10; i++) {
+        Get g = new Get(Bytes.toBytes("row" + i));
+        Result r = t2.get(g);
+        assertEquals(1, r.size());
+        assertTrue(CellUtil.matchingQualifier(r.rawCells()[0], COLUMN1));
+      }
+    } finally {
+      TEST_UTIL.deleteTable(TABLENAME1);
+      TEST_UTIL.deleteTable(TABLENAME2);
     }
-    
-    t1.close();
-    t2.close();
-    TEST_UTIL.deleteTable(TABLENAME1);
-    TEST_UTIL.deleteTable(TABLENAME2);
   }
 
   /**
@@ -156,10 +156,11 @@ public class TestCopyTable {
     p.add(FAMILY, COLUMN1, COLUMN1);
     t1.put(p);
 
-    CopyTable copy = new CopyTable(TEST_UTIL.getConfiguration());
+    CopyTable copy = new CopyTable();
     assertEquals(
       0,
-      copy.run(new String[] { "--new.name=" + TABLENAME2, "--startrow=row1",
+      ToolRunner.run(new Configuration(TEST_UTIL.getConfiguration()),
+          copy, new String[] { "--new.name=" + TABLENAME2, "--startrow=row1",
           "--stoprow=row2", TABLENAME1.getNameAsString() }));
 
     // verify the data was copied into table 2
@@ -253,14 +254,9 @@ public class TestCopyTable {
     assertTrue(data.toString().contains("Usage:"));
   }
 
-  private boolean runCopy(String[] args) throws IOException, InterruptedException,
-      ClassNotFoundException {
-    GenericOptionsParser opts = new GenericOptionsParser(
-        new Configuration(TEST_UTIL.getConfiguration()), args);
-    Configuration configuration = opts.getConfiguration();
-    args = opts.getRemainingArgs();
-    Job job = new CopyTable(configuration).createSubmittableJob(args);
-    job.waitForCompletion(false);
-    return job.isSuccessful();
+  private boolean runCopy(String[] args) throws Exception {
+    int status = ToolRunner.run(new Configuration(TEST_UTIL.getConfiguration()), new CopyTable(),
+        args);
+    return status == 0;
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index baabc0b..1109ae2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -34,6 +34,8 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -68,14 +70,16 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper.Context;
-import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.ToolRunner;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
@@ -84,6 +88,7 @@ import org.mockito.stubbing.Answer;
  */
 @Category({VerySlowMapReduceTests.class, MediumTests.class})
 public class TestImportExport {
+  private static final Log LOG = LogFactory.getLog(TestImportExport.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static final byte[] ROW1 = Bytes.toBytes("row1");
   private static final byte[] ROW2 = Bytes.toBytes("row2");
@@ -111,6 +116,14 @@ public class TestImportExport {
     UTIL.shutdownMiniCluster();
   }
 
+  @Rule
+  public final TestName name = new TestName();
+
+  @Before
+  public void announce() {
+    LOG.info("Running " + name.getMethodName());
+  }
+
   @Before
   @After
   public void cleanup() throws Exception {
@@ -126,14 +139,10 @@ public class TestImportExport {
    * @throws InterruptedException
    * @throws ClassNotFoundException
    */
-  boolean runExport(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
+  boolean runExport(String[] args) throws Exception {
     // need to make a copy of the configuration because to make sure different temp dirs are used.
-    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
-    Configuration conf = opts.getConfiguration();
-    args = opts.getRemainingArgs();
-    Job job = Export.createSubmittableJob(conf, args);
-    job.waitForCompletion(false);
-    return job.isSuccessful();
+    int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Export(), args);
+    return status == 0;
   }
 
   /**
@@ -144,14 +153,10 @@ public class TestImportExport {
    * @throws InterruptedException
    * @throws ClassNotFoundException
    */
-  boolean runImport(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
+  boolean runImport(String[] args) throws Exception {
     // need to make a copy of the configuration because to make sure different temp dirs are used.
-    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
-    Configuration conf = opts.getConfiguration();
-    args = opts.getRemainingArgs();
-    Job job = Import.createSubmittableJob(conf, args);
-    job.waitForCompletion(false);
-    return job.isSuccessful();
+    int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args);
+    return status == 0;
   }
 
   /**
@@ -617,7 +622,7 @@ public class TestImportExport {
   }
 
   @Test
-  public void testDurability() throws IOException, InterruptedException, ClassNotFoundException {
+  public void testDurability() throws Exception {
     // Create an export table.
     String exportTableName = "exporttestDurability";
     Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);
@@ -687,7 +692,7 @@ public class TestImportExport {
 
     @Override
     public void visitLogEntryBeforeWrite(HTableDescriptor htd, WALKey logKey, WALEdit logEdit) {
-      if (tableName.equalsIgnoreCase(htd.getNameAsString())) {
+      if (tableName.equalsIgnoreCase(htd.getNameAsString()) && (!logEdit.isMetaEdit())) {
         isVisited = true;
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
index 7918274..5208ffb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
@@ -56,7 +56,6 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
@@ -223,12 +222,18 @@ public class TestImportTsv implements Configurable {
             "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table,
             INPUT_FILE
             };
-    GenericOptionsParser opts = new GenericOptionsParser(util.getConfiguration(), args);
-    args = opts.getRemainingArgs();
-    Job job = ImportTsv.createSubmittableJob(util.getConfiguration(), args);
-    assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
-    assertTrue(job.getReducerClass().equals(TextSortReducer.class));
-    assertTrue(job.getMapOutputValueClass().equals(Text.class));
+    assertEquals("running test job configuration failed.", 0, ToolRunner.run(
+        new Configuration(util.getConfiguration()),
+        new ImportTsv() {
+          @Override
+          public int run(String[] args) throws Exception {
+            Job job = createSubmittableJob(getConf(), args);
+            assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
+            assertTrue(job.getReducerClass().equals(TextSortReducer.class));
+            assertTrue(job.getMapOutputValueClass().equals(Text.class));
+            return 0;
+          }
+        }, args));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 0ac9d9f..80af874 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -42,9 +42,7 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.mapreduce.RowCounter.RowCounterMapper;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -207,16 +205,9 @@ public class TestRowCounter {
    * @throws Exception
    */
   private void runRowCount(String[] args, int expectedCount) throws Exception {
-    GenericOptionsParser opts = new GenericOptionsParser(
-        TEST_UTIL.getConfiguration(), args);
-    Configuration conf = opts.getConfiguration();
-    args = opts.getRemainingArgs();
-    Job job = RowCounter.createSubmittableJob(conf, args);
-    job.waitForCompletion(true);
-    assertTrue(job.isSuccessful());
-    Counter counter = job.getCounters().findCounter(
-        RowCounterMapper.Counters.ROWS);
-    assertEquals(expectedCount, counter.getValue());
+    final RowCounter counter = new RowCounter();
+    assertEquals("job failed either due to failure or miscount (see log output).", 0,
+        ToolRunner.run(TEST_UTIL.getConfiguration(), counter, args));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 68cf8ba..e524f38 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Mapper.Context;
+import org.apache.hadoop.util.ToolRunner;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -113,12 +114,13 @@ public class TestWALPlayer {
         .getRootDir(), HConstants.HREGION_LOGDIR_NAME).toString();
 
     Configuration configuration= TEST_UTIL.getConfiguration();
-    WALPlayer player = new WALPlayer(configuration);
+    WALPlayer player = new WALPlayer();
     String optionName="_test_.name";
     configuration.set(optionName, "1000");
     player.setupTime(configuration, optionName);
     assertEquals(1000,configuration.getLong(optionName,0));
-    assertEquals(0, player.run(new String[] {walInputDir, TABLENAME1.getNameAsString(),
+    assertEquals(0, ToolRunner.run(configuration, player,
+        new String[] {walInputDir, TABLENAME1.getNameAsString(),
         TABLENAME2.getNameAsString() }));
 
     

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
index 6400186..8688c61 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
@@ -203,7 +203,7 @@ public class TestMergeTool extends HBaseTestCase {
   private HRegion mergeAndVerify(final String msg, final String regionName1,
     final String regionName2, final WAL log, final int upperbound)
   throws Exception {
-    Merge merger = new Merge(this.conf);
+    Merge merger = new Merge();
     LOG.info(msg);
     LOG.info("fs2=" + this.conf.get("fs.defaultFS"));
     int errCode = ToolRunner.run(this.conf, merger,

http://git-wip-us.apache.org/repos/asf/hbase/blob/a016b23e/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
index 66ecc18..7a808e0 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
@@ -49,6 +49,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.filter.ParseFilter;
@@ -63,7 +64,8 @@ import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
-import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.thrift.TException;
 import org.apache.thrift.TProcessor;
 import org.apache.thrift.protocol.TBinaryProtocol;
@@ -91,7 +93,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
  */
 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
 @SuppressWarnings({ "rawtypes", "unchecked" })
-public class ThriftServer {
+public class ThriftServer extends Configured implements Tool {
   private static final Log log = LogFactory.getLog(ThriftServer.class);
 
   /**
@@ -142,10 +144,8 @@ public class ThriftServer {
 
   private static CommandLine parseArguments(Configuration conf, Options options, String[] args)
       throws ParseException, IOException {
-    GenericOptionsParser genParser = new GenericOptionsParser(conf, args);
-    String[] remainingArgs = genParser.getRemainingArgs();
     CommandLineParser parser = new PosixParser();
-    return parser.parse(options, remainingArgs);
+    return parser.parse(options, args);
   }
 
   private static TProtocolFactory getTProtocolFactory(boolean isCompact) {
@@ -304,13 +304,19 @@ public class ThriftServer {
 
   /**
    * Start up the Thrift2 server.
-   *
-   * @param args
    */
   public static void main(String[] args) throws Exception {
+    final Configuration conf = HBaseConfiguration.create();
+    // for now, only time we return is on an argument error.
+    final int status = ToolRunner.run(conf, new ThriftServer(), args);
+    System.exit(status);
+  }
+
+  @Override
+  public int run(String[] args) throws Exception {
+    final Configuration conf = getConf();
     TServer server = null;
     Options options = getOptions();
-    Configuration conf = HBaseConfiguration.create();
     CommandLine cmd = parseArguments(conf, options, args);
     int workerThreads = 0;
 
@@ -321,7 +327,7 @@ public class ThriftServer {
     List<?> argList = cmd.getArgList();
     if (cmd.hasOption("help") || !argList.contains("start") || argList.contains("stop")) {
       printUsage();
-      System.exit(1);
+      return 1;
     }
 
     // Get address to bind
@@ -485,5 +491,7 @@ public class ThriftServer {
           return null;
         }
       });
+    // when tserver.stop eventually happens we'll get here.
+    return 0;
   }
 }


Mime
View raw message