hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject hbase git commit: HBASE-7541 Convert all tests that use HBaseTestingUtility.createMultiRegions to HBA.createTable (Jonathan Lawlor)
Date Wed, 14 Jan 2015 06:10:24 GMT
Repository: hbase
Updated Branches:
  refs/heads/master 172324528 -> 608025ae6


HBASE-7541 Convert all tests that use HBaseTestingUtility.createMultiRegions to HBA.createTable (Jonathan Lawlor)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/608025ae
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/608025ae
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/608025ae

Branch: refs/heads/master
Commit: 608025ae6755752de7d1a9f159ddb9e05c060902
Parents: 1723245
Author: stack <stack@apache.org>
Authored: Tue Jan 13 22:10:12 2015 -0800
Committer: stack <stack@apache.org>
Committed: Tue Jan 13 22:10:12 2015 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/HBaseTestingUtility.java       | 288 ++++++++++---------
 .../hadoop/hbase/TestFullLogReconstruction.java |   5 +-
 .../hadoop/hbase/TestGlobalMemStoreSize.java    |  17 +-
 .../hadoop/hbase/TestMetaTableAccessor.java     |   9 +-
 .../apache/hadoop/hbase/client/TestAdmin2.java  |   3 +-
 .../hadoop/hbase/client/TestFromClientSide.java |  69 ++---
 .../org/apache/hadoop/hbase/client/TestHCM.java |  13 +-
 .../hadoop/hbase/client/TestMetaScanner.java    |  16 +-
 .../hadoop/hbase/client/TestMultiParallel.java  |   3 +-
 .../coprocessor/TestAggregateProtocol.java      |  13 +-
 .../TestBigDecimalColumnInterpreter.java        |  13 +-
 .../TestDoubleColumnInterpreter.java            |   5 +-
 .../hbase/coprocessor/TestMasterObserver.java   |   3 +-
 ...gionServerCoprocessorExceptionWithAbort.java |  17 +-
 ...ionServerCoprocessorExceptionWithRemove.java |  19 +-
 .../hbase/mapreduce/TestHFileOutputFormat.java  |  38 +--
 .../hbase/mapreduce/TestHFileOutputFormat2.java |  59 ++--
 .../mapreduce/TestMultiTableInputFormat.java    |   4 +-
 .../mapreduce/TestMultithreadedTableMapper.java |  18 +-
 .../mapreduce/TestTableInputFormatScan1.java    |   2 +-
 .../mapreduce/TestTableInputFormatScanBase.java |   4 +-
 .../hbase/mapreduce/TestTableMapReduceBase.java |   4 +-
 .../master/TestDistributedLogSplitting.java     |  17 +-
 .../TestMasterRestartAfterDisablingTable.java   |  10 +-
 .../hbase/master/TestMasterTransitions.java     |  12 +-
 .../hadoop/hbase/master/TestRollingRestart.java |   9 +-
 .../regionserver/TestRegionFavoredNodes.java    |   5 +-
 .../regionserver/TestServerCustomProtocol.java  |  12 +-
 28 files changed, 361 insertions(+), 326 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 5ac5f96..f981185 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -17,6 +17,34 @@
  */
 package org.apache.hadoop.hbase;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.security.MessageDigest;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.Random;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -94,34 +122,6 @@ import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.ZooKeeper.States;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-import java.net.InetAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.UnknownHostException;
-import java.security.MessageDigest;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.Random;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 /**
  * Facility for testing HBase. Replacement for
  * old HBaseTestCase and HBaseClusterTestCase functionality.
@@ -1243,6 +1243,24 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
     return createTable(tableName, new byte[][]{family});
   }
 
+  /**
+   * Create a table with multiple regions.
+   * @param tableName
+   * @param family
+   * @param numRegions
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createMultiRegionTable(TableName tableName, byte[] family, int numRegions)
+      throws IOException {
+    if (numRegions < 3) throw new IOException("Must create at least 3 regions");
+    byte[] startKey = Bytes.toBytes("aaaaa");
+    byte[] endKey = Bytes.toBytes("zzzzz");
+    byte[][] splitKeys = Bytes.split(startKey, endKey, numRegions - 3);
+
+    return createTable(tableName, new byte[][] { family }, splitKeys);
+  }
+
 
   /**
    * Create a table.
@@ -1261,13 +1279,36 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
    * Create a table.
    * @param tableName
    * @param families
-   * @return An HT
-   * able instance for the created table.
+   * @return An HTable instance for the created table.
    * @throws IOException
    */
   public HTable createTable(TableName tableName, byte[][] families)
   throws IOException {
-    return createTable(tableName, families, new Configuration(getConfiguration()));
+    return createTable(tableName, families, (byte[][]) null);
+  }
+
+  /**
+   * Create a table with multiple regions.
+   * @param tableName
+   * @param families
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createMultiRegionTable(TableName tableName, byte[][] families) throws IOException {
+    return createTable(tableName, families, KEYS_FOR_HBA_CREATE_TABLE);
+  }
+
+  /**
+   * Create a table.
+   * @param tableName
+   * @param families
+   * @param splitKeys
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createTable(TableName tableName, byte[][] families, byte[][] splitKeys)
+      throws IOException {
+    return createTable(tableName, families, splitKeys, new Configuration(getConfiguration()));
   }
 
   public HTable createTable(byte[] tableName, byte[][] families,
@@ -1307,7 +1348,21 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
    */
   public HTable createTable(HTableDescriptor htd, byte[][] families, Configuration c)
   throws IOException {
-    for(byte[] family : families) {
+    return createTable(htd, families, (byte[][]) null, c);
+  }
+
+  /**
+   * Create a table.
+   * @param htd
+   * @param families
+   * @param splitKeys
+   * @param c Configuration to use
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createTable(HTableDescriptor htd, byte[][] families, byte[][] splitKeys,
+      Configuration c) throws IOException {
+    for (byte[] family : families) {
       HColumnDescriptor hcd = new HColumnDescriptor(family);
       // Disable blooms (they are on by default as of 0.95) but we disable them here because
       // tests have hard coded counts of what to expect in block cache, etc., and blooms being
@@ -1315,10 +1370,11 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
       hcd.setBloomFilterType(BloomType.NONE);
       htd.addFamily(hcd);
     }
-    getHBaseAdmin().createTable(htd);
-    // HBaseAdmin only waits for regions to appear in hbase:meta we should wait until they are assigned
+    getHBaseAdmin().createTable(htd, splitKeys);
+    // HBaseAdmin only waits for regions to appear in hbase:meta we should wait until they are
+    // assigned
     waitUntilAllRegionsAssigned(htd.getTableName());
-    return (HTable)getConnection().getTable(htd.getTableName());
+    return (HTable) getConnection().getTable(htd.getTableName());
   }
 
   /**
@@ -1347,7 +1403,21 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   public HTable createTable(TableName tableName, byte[][] families,
       final Configuration c)
   throws IOException {
-    return createTable(new HTableDescriptor(tableName), families, c);
+    return createTable(tableName, families, (byte[][]) null, c);
+  }
+
+  /**
+   * Create a table.
+   * @param tableName
+   * @param families
+   * @param splitKeys
+   * @param c Configuration to use
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createTable(TableName tableName, byte[][] families, byte[][] splitKeys,
+      final Configuration c) throws IOException {
+    return createTable(new HTableDescriptor(tableName), families, splitKeys, c);
   }
 
   /**
@@ -1471,22 +1541,50 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   public HTable createTable(TableName tableName, byte[][] families,
       int numVersions)
   throws IOException {
+    return createTable(tableName, families, numVersions, (byte[][]) null);
+  }
+
+  /**
+   * Create a table.
+   * @param tableName
+   * @param families
+   * @param numVersions
+   * @param splitKeys
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createTable(TableName tableName, byte[][] families, int numVersions,
+      byte[][] splitKeys) throws IOException {
     HTableDescriptor desc = new HTableDescriptor(tableName);
     for (byte[] family : families) {
       HColumnDescriptor hcd = new HColumnDescriptor(family).setMaxVersions(numVersions);
       desc.addFamily(hcd);
     }
-    getHBaseAdmin().createTable(desc);
+    getHBaseAdmin().createTable(desc, splitKeys);
     // HBaseAdmin only waits for regions to appear in hbase:meta we should wait until they are assigned
     waitUntilAllRegionsAssigned(tableName);
     return (HTable) getConnection().getTable(tableName);
   }
 
   /**
+   * Create a table with multiple regions.
+   * @param tableName
+   * @param families
+   * @param numVersions
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createMultiRegionTable(TableName tableName, byte[][] families, int numVersions)
+      throws IOException {
+    return createTable(tableName, families, numVersions, KEYS_FOR_HBA_CREATE_TABLE);
+  }
+
+  /**
    * Create a table.
    * @param tableName
    * @param families
    * @param numVersions
+   * @param blockSize
    * @return An HTable instance for the created table.
    * @throws IOException
    */
@@ -1501,6 +1599,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
    * @param tableName
    * @param families
    * @param numVersions
+   * @param blockSize
    * @return An HTable instance for the created table.
    * @throws IOException
    */
@@ -1591,6 +1690,17 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   }
 
   /**
+   * Create a table with multiple regions.
+   * @param tableName
+   * @param family
+   * @return An HTable instance for the created table.
+   * @throws IOException
+   */
+  public HTable createMultiRegionTable(TableName tableName, byte[] family) throws IOException {
+    return createTable(tableName, family, KEYS_FOR_HBA_CREATE_TABLE);
+  }
+
+  /**
    * Create a table.
    * @param tableName
    * @param families
@@ -2122,19 +2232,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
     return digest.toString();
   }
 
-  /**
-   * Creates many regions names "aaa" to "zzz".
-   *
-   * @param table  The table to use for the data.
-   * @param columnFamily  The family to insert the data into.
-   * @return count of regions created.
-   * @throws IOException When creating the regions fails.
-   */
-  public int createMultiRegions(HTable table, byte[] columnFamily)
-  throws IOException {
-    return createMultiRegions(getConfiguration(), table, columnFamily);
-  }
-
   /** All the row values for the data loaded by {@link #loadTable(HTable, byte[])} */
   public static final byte[][] ROWS = new byte[(int) Math.pow('z' - 'a' + 1, 3)][3]; // ~52KB
   static {
@@ -2176,97 +2273,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
   };
 
   /**
-   * Creates many regions names "aaa" to "zzz".
-   * @param c Configuration to use.
-   * @param table  The table to use for the data.
-   * @param columnFamily  The family to insert the data into.
-   * @return count of regions created.
-   * @throws IOException When creating the regions fails.
-   */
-  public int createMultiRegions(final Configuration c, final HTable table,
-      final byte[] columnFamily)
-  throws IOException {
-    return createMultiRegions(c, table, columnFamily, KEYS);
-  }
-
-  /**
-   * Creates the specified number of regions in the specified table.
-   * @param c
-   * @param table
-   * @param family
-   * @param numRegions
-   * @return
-   * @throws IOException
-   */
-  public int createMultiRegions(final Configuration c, final HTable table,
-      final byte [] family, int numRegions)
-  throws IOException {
-    if (numRegions < 3) throw new IOException("Must create at least 3 regions");
-    byte [] startKey = Bytes.toBytes("aaaaa");
-    byte [] endKey = Bytes.toBytes("zzzzz");
-    byte [][] splitKeys = Bytes.split(startKey, endKey, numRegions - 3);
-    byte [][] regionStartKeys = new byte[splitKeys.length+1][];
-    System.arraycopy(splitKeys, 0, regionStartKeys, 1, splitKeys.length);
-    regionStartKeys[0] = HConstants.EMPTY_BYTE_ARRAY;
-    return createMultiRegions(c, table, family, regionStartKeys);
-  }
-
-  public int createMultiRegions(final Configuration c, final HTable table,
-      final byte[] columnFamily, byte [][] startKeys)
-  throws IOException {
-    Arrays.sort(startKeys, Bytes.BYTES_COMPARATOR);
-    try (Table meta = new HTable(c, TableName.META_TABLE_NAME)) {
-      HTableDescriptor htd = table.getTableDescriptor();
-      if(!htd.hasFamily(columnFamily)) {
-        HColumnDescriptor hcd = new HColumnDescriptor(columnFamily);
-        htd.addFamily(hcd);
-      }
-      // remove empty region - this is tricky as the mini cluster during the test
-      // setup already has the "<tablename>,,123456789" row with an empty start
-      // and end key. Adding the custom regions below adds those blindly,
-      // including the new start region from empty to "bbb". lg
-      List<byte[]> rows = getMetaTableRows(htd.getTableName());
-      String regionToDeleteInFS = table
-          .getRegionsInRange(Bytes.toBytes(""), Bytes.toBytes("")).get(0)
-          .getRegionInfo().getEncodedName();
-      List<HRegionInfo> newRegions = new ArrayList<HRegionInfo>(startKeys.length);
-      // add custom ones
-      int count = 0;
-      for (int i = 0; i < startKeys.length; i++) {
-        int j = (i + 1) % startKeys.length;
-        HRegionInfo hri = new HRegionInfo(table.getName(),
-          startKeys[i], startKeys[j]);
-        MetaTableAccessor.addRegionToMeta(meta, hri);
-        newRegions.add(hri);
-        count++;
-      }
-      // see comment above, remove "old" (or previous) single region
-      for (byte[] row : rows) {
-        LOG.info("createMultiRegions: deleting meta row -> " +
-          Bytes.toStringBinary(row));
-        meta.delete(new Delete(row));
-      }
-      // remove the "old" region from FS
-      Path tableDir = new Path(getDefaultRootDirPath().toString()
-          + System.getProperty("file.separator") + htd.getTableName()
-          + System.getProperty("file.separator") + regionToDeleteInFS);
-      FileSystem.get(c).delete(tableDir, true);
-      // flush cache of regions
-      HConnection conn = table.getConnection();
-      conn.clearRegionCache();
-      // assign all the new regions IF table is enabled.
-      Admin admin = conn.getAdmin();
-      if (admin.isTableEnabled(table.getName())) {
-        for(HRegionInfo hri : newRegions) {
-          admin.assign(hri.getRegionName());
-        }
-      }
-
-      return count;
-    }
-  }
-
-  /**
    * Create rows in hbase:meta for regions of the specified table with the specified
    * start keys.  The first startKey should be a 0 length byte array if you
    * want to form a proper range of regions.

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
index 59ddfd7..d5b6a9c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
@@ -91,10 +91,7 @@ public class TestFullLogReconstruction {
    */
   @Test (timeout=300000)
   public void testReconstruction() throws Exception {
-
-    HTable table = TEST_UTIL.createTable(TABLE_NAME, FAMILY);
-
-    TEST_UTIL.createMultiRegions(table, Bytes.toBytes("family"));
+    HTable table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, FAMILY);
 
     // Load up the table with simple rows and count them
     int initialCount = TEST_UTIL.loadTable(table, FAMILY);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
index 7be5074..d8178f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
@@ -18,18 +18,21 @@
  */
 package org.apache.hadoop.hbase;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.List;
 import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -73,9 +76,11 @@ public class TestGlobalMemStoreSize {
     byte [] table = Bytes.toBytes("TestGlobalMemStoreSize");
     byte [] family = Bytes.toBytes("family");
     LOG.info("Creating table with " + regionNum + " regions");
-    HTable ht = TEST_UTIL.createTable(TableName.valueOf(table), family);
-    int numRegions = TEST_UTIL.createMultiRegions(conf, ht, family,
-        regionNum);
+    HTable ht = TEST_UTIL.createMultiRegionTable(TableName.valueOf(table), family, regionNum);
+    int numRegions = -1;
+    try (RegionLocator r = ht.getRegionLocator()) {
+      numRegions = r.getStartKeys().length;
+    }
     assertEquals(regionNum,numRegions);
     waitForAllRegionsAssigned();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
index eadebd3..969394d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java
@@ -33,9 +33,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -87,8 +87,11 @@ public class TestMetaTableAccessor {
     final TableName name =
         TableName.valueOf("testRetrying");
     LOG.info("Started " + name);
-    HTable t = UTIL.createTable(name, HConstants.CATALOG_FAMILY);
-    int regionCount = UTIL.createMultiRegions(t, HConstants.CATALOG_FAMILY);
+    HTable t = UTIL.createMultiRegionTable(name, HConstants.CATALOG_FAMILY);
+    int regionCount = -1;
+    try (RegionLocator r = t.getRegionLocator()) {
+      regionCount = r.getStartKeys().length;
+    }
     // Test it works getting a region from just made user table.
     final List<HRegionInfo> regions =
       testGettingTableRegions(connection, name, regionCount);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
index bdf7de2..a352c4e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java
@@ -716,8 +716,7 @@ public class TestAdmin2 {
 
     final TableName tableName = TableName.valueOf("testGetRegion");
     LOG.info("Started " + tableName);
-    HTable t = TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY);
-    TEST_UTIL.createMultiRegions(t, HConstants.CATALOG_FAMILY);
+    HTable t = TEST_UTIL.createMultiRegionTable(tableName, HConstants.CATALOG_FAMILY);
 
     HRegionLocation regionLocation = t.getRegionLocation("mmm");
     HRegionInfo region = regionLocation.getRegionInfo();

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 5dd691a..0ebafaf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -27,6 +27,22 @@ import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.UUID;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -89,23 +105,6 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.UUID;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicReference;
-
 /**
  * Run tests that use the HBase clients; {@link HTable}.
  * Sets up the HBase mini cluster once at start and runs through all client tests.
@@ -4985,17 +4984,18 @@ public class TestFromClientSide {
 
     // Set up test table:
     // Create table:
-    HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY);
-
-    // Create multiple regions for this table
-    int numOfRegions = TEST_UTIL.createMultiRegions(ht, FAMILY);
-    // Create 3 rows in the table, with rowkeys starting with "z*" so that
+    HTable ht = TEST_UTIL.createMultiRegionTable(TABLENAME, FAMILY);
+    int numOfRegions = -1;
+    try (RegionLocator r = ht.getRegionLocator()) {
+      numOfRegions = r.getStartKeys().length;
+    }
+    // Create 3 rows in the table, with rowkeys starting with "zzz*" so that
     // scan are forced to hit all the regions.
-    Put put1 = new Put(Bytes.toBytes("z1"));
+    Put put1 = new Put(Bytes.toBytes("zzz1"));
     put1.add(FAMILY, QUALIFIER, VALUE);
-    Put put2 = new Put(Bytes.toBytes("z2"));
+    Put put2 = new Put(Bytes.toBytes("zzz2"));
     put2.add(FAMILY, QUALIFIER, VALUE);
-    Put put3 = new Put(Bytes.toBytes("z3"));
+    Put put3 = new Put(Bytes.toBytes("zzz3"));
     put3.add(FAMILY, QUALIFIER, VALUE);
     ht.put(Arrays.asList(put1, put2, put3));
 
@@ -5245,9 +5245,12 @@ public class TestFromClientSide {
     byte [] startKey = Bytes.toBytes("ddc");
     byte [] endKey = Bytes.toBytes("mmm");
     TableName TABLE = TableName.valueOf("testGetRegionsInRange");
-    HTable table = TEST_UTIL.createTable(TABLE, new byte[][] {FAMILY}, 10);
-    int numOfRegions = TEST_UTIL.createMultiRegions(table, FAMILY);
-    assertEquals(25, numOfRegions);
+    HTable table = TEST_UTIL.createMultiRegionTable(TABLE, new byte[][] { FAMILY }, 10);
+    int numOfRegions = -1;
+    try (RegionLocator r = table.getRegionLocator()) {
+      numOfRegions = r.getStartKeys().length;
+    }
+    assertEquals(26, numOfRegions);
 
     // Get the regions in this range
     List<HRegionLocation> regionsList = table.getRegionsInRange(startKey,
@@ -5270,22 +5273,22 @@ public class TestFromClientSide {
 
     // Empty end key
     regionsList = table.getRegionsInRange(startKey, HConstants.EMPTY_END_ROW);
-    assertEquals(20, regionsList.size());
+    assertEquals(21, regionsList.size());
 
     // Both start and end keys empty
     regionsList = table.getRegionsInRange(HConstants.EMPTY_START_ROW,
       HConstants.EMPTY_END_ROW);
-    assertEquals(25, regionsList.size());
+    assertEquals(26, regionsList.size());
 
     // Change the end key to somewhere in the last block
-    endKey = Bytes.toBytes("yyz");
+    endKey = Bytes.toBytes("zzz1");
     regionsList = table.getRegionsInRange(startKey, endKey);
-    assertEquals(20, regionsList.size());
+    assertEquals(21, regionsList.size());
 
     // Change the start key to somewhere in the first block
     startKey = Bytes.toBytes("aac");
     regionsList = table.getRegionsInRange(startKey, endKey);
-    assertEquals(25, regionsList.size());
+    assertEquals(26, regionsList.size());
 
     // Make start and end key the same
     startKey = endKey = Bytes.toBytes("ccc");

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index a86e4fa..4117eb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -52,8 +52,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.testclassification.FlakeyTests;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
@@ -71,6 +69,8 @@ import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
+import org.apache.hadoop.hbase.testclassification.FlakeyTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
@@ -601,13 +601,12 @@ public class TestHCM {
    */
   @Test
   public void testRegionCaching() throws Exception{
-    TEST_UTIL.createTable(TABLE_NAME, FAM_NAM).close();
+    TEST_UTIL.createMultiRegionTable(TABLE_NAME, FAM_NAM).close();
     Configuration conf =  new Configuration(TEST_UTIL.getConfiguration());
     conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
     Connection connection = ConnectionFactory.createConnection(conf);
     final HTable table = (HTable) connection.getTable(TABLE_NAME);
 
-    TEST_UTIL.createMultiRegions(table, FAM_NAM);
     TEST_UTIL.waitUntilAllRegionsAssigned(table.getName());
     Put put = new Put(ROW);
     put.add(FAM_NAM, ROW, ROW);
@@ -809,8 +808,7 @@ public class TestHCM {
    */
   @Test(timeout = 60000)
   public void testCacheSeqNums() throws Exception{
-    HTable table = TEST_UTIL.createTable(TABLE_NAME2, FAM_NAM);
-    TEST_UTIL.createMultiRegions(table, FAM_NAM);
+    HTable table = TEST_UTIL.createMultiRegionTable(TABLE_NAME2, FAM_NAM);
     Put put = new Put(ROW);
     put.add(FAM_NAM, ROW, ROW);
     table.put(put);
@@ -1023,9 +1021,8 @@ public class TestHCM {
 
   @Test (timeout=30000)
   public void testMulti() throws Exception {
-    HTable table = TEST_UTIL.createTable(TABLE_NAME3, FAM_NAM);
+    HTable table = TEST_UTIL.createMultiRegionTable(TABLE_NAME3, FAM_NAM);
      try {
-       TEST_UTIL.createMultiRegions(table, FAM_NAM);
        ConnectionManager.HConnectionImplementation conn =
            ( ConnectionManager.HConnectionImplementation)table.getConnection();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
index 70e2c33..eaab61b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
@@ -32,13 +32,12 @@ import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -73,14 +72,11 @@ public class TestMetaScanner {
     setUp();
     final TableName TABLENAME = TableName.valueOf("testMetaScanner");
     final byte[] FAMILY = Bytes.toBytes("family");
-    TEST_UTIL.createTable(TABLENAME, FAMILY);
-    Configuration conf = TEST_UTIL.getConfiguration();
+    final byte[][] SPLIT_KEYS =
+        new byte[][] { Bytes.toBytes("region_a"), Bytes.toBytes("region_b") };
+
+    TEST_UTIL.createTable(TABLENAME, FAMILY, SPLIT_KEYS);
     HTable table = (HTable) connection.getTable(TABLENAME);
-    TEST_UTIL.createMultiRegions(conf, table, FAMILY,
-        new byte[][]{
-          HConstants.EMPTY_START_ROW,
-          Bytes.toBytes("region_a"),
-          Bytes.toBytes("region_b")});
     // Make sure all the regions are deployed
     TEST_UTIL.countRows(table);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
index 03d310f..bab78ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
@@ -75,8 +75,7 @@ public class TestMultiParallel {
     //((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
     //((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
     UTIL.startMiniCluster(slaves);
-    HTable t = UTIL.createTable(TEST_TABLE, Bytes.toBytes(FAMILY));
-    UTIL.createMultiRegions(t, Bytes.toBytes(FAMILY));
+    HTable t = UTIL.createMultiRegionTable(TEST_TABLE, Bytes.toBytes(FAMILY));
     UTIL.waitTableEnabled(TEST_TABLE);
     t.close();
     CONNECTION = ConnectionFactory.createConnection(UTIL.getConfiguration());

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
index e1a7a8d..860f6e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
@@ -23,11 +23,13 @@ import static org.junit.Assert.assertEquals;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.coprocessor.AggregationClient;
 import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -80,10 +82,9 @@ public class TestAggregateProtocol {
         "org.apache.hadoop.hbase.coprocessor.AggregateImplementation");
 
     util.startMiniCluster(2);
-    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
-    util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY,
-        new byte[][] { HConstants.EMPTY_BYTE_ARRAY, ROWS[rowSeperator1],
-            ROWS[rowSeperator2] });
+    final byte[][] SPLIT_KEYS = new byte[][] { ROWS[rowSeperator1],
+        ROWS[rowSeperator2] };
+    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
     /**
      * The testtable has one CQ which is always populated and one variable CQ
      * for each row rowkey1: CF:CQ CF:CQ1 rowKey2: CF:CQ CF:CQ2

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
index 7e2d96e..ac75660 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
@@ -18,15 +18,19 @@
 package org.apache.hadoop.hbase.coprocessor;
 
 import static org.junit.Assert.assertEquals;
+
 import java.math.BigDecimal;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.coprocessor.AggregationClient;
 import org.apache.hadoop.hbase.client.coprocessor.BigDecimalColumnInterpreter;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -78,9 +82,8 @@ public class TestBigDecimalColumnInterpreter {
       "org.apache.hadoop.hbase.coprocessor.AggregateImplementation");
 
     util.startMiniCluster(2);
-    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
-    util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY, new byte[][] {
-        HConstants.EMPTY_BYTE_ARRAY, ROWS[rowSeperator1], ROWS[rowSeperator2] });
+    final byte[][] SPLIT_KEYS = new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] };
+    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
     /**
      * The testtable has one CQ which is always populated and one variable CQ for each row rowkey1:
      * CF:CQ CF:CQ1 rowKey2: CF:CQ CF:CQ2

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
index 8669a6c..baea95d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
@@ -80,9 +80,8 @@ public class TestDoubleColumnInterpreter {
       "org.apache.hadoop.hbase.coprocessor.AggregateImplementation");
 
     util.startMiniCluster(2);
-    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
-    util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY, new byte[][] {
-        HConstants.EMPTY_BYTE_ARRAY, ROWS[rowSeperator1], ROWS[rowSeperator2] });
+    final byte[][] SPLIT_KEYS = new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] };
+    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
     /**
      * The testtable has one CQ which is always populated and one variable CQ for each row rowkey1:
      * CF:CQ CF:CQ1 rowKey2: CF:CQ CF:CQ2

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
index 3f5fe9c..094555e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
@@ -1524,10 +1524,9 @@ public class TestMasterObserver {
     cp.enableBypass(false);
     cp.resetStates();
 
-    HTable table = UTIL.createTable(TEST_TABLE, TEST_FAMILY);
+    HTable table = UTIL.createMultiRegionTable(TEST_TABLE, TEST_FAMILY);
 
     try {
-      UTIL.createMultiRegions(table, TEST_FAMILY);
       UTIL.waitUntilAllRegionsAssigned(TEST_TABLE);
 
       NavigableMap<HRegionInfo, ServerName> regions = table.getRegionLocations();

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
index 469dd4e..61908ea 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
@@ -19,27 +19,31 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter.Predicate;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.*;
-
 /**
  * Tests unhandled exceptions thrown by coprocessors running on a regionserver..
  * Expected result is that the regionserver will abort with an informative
@@ -93,8 +97,7 @@ public class TestRegionServerCoprocessorExceptionWithAbort {
       // hosts the region we attempted to write to) to abort.
       final byte[] TEST_FAMILY = Bytes.toBytes("aaa");
 
-      HTable table = TEST_UTIL.createTable(TABLE_NAME, TEST_FAMILY);
-      TEST_UTIL.createMultiRegions(table, TEST_FAMILY);
+      HTable table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, TEST_FAMILY);
       TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME);
 
       // Note which regionServer will abort (after put is attempted).

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java
index af1cd59..ba18e41 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java
@@ -19,25 +19,28 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.*;
-
 /**
  * Tests unhandled exceptions thrown by coprocessors running on regionserver.
  * Expected result is that the region server will remove the buggy coprocessor from
@@ -91,12 +94,10 @@ public class TestRegionServerCoprocessorExceptionWithRemove {
     // execute, which will set the rsZKNodeDeleted flag to true, which will
     // pass this test.
 
-    TableName TEST_TABLE =
-        TableName.valueOf("observed_table");
+    TableName TEST_TABLE = TableName.valueOf("observed_table");
     byte[] TEST_FAMILY = Bytes.toBytes("aaa");
 
-    HTable table = TEST_UTIL.createTable(TEST_TABLE, TEST_FAMILY);
-    TEST_UTIL.createMultiRegions(table, TEST_FAMILY);
+    HTable table = TEST_UTIL.createMultiRegionTable(TEST_TABLE, TEST_FAMILY);
     TEST_UTIL.waitUntilAllRegionsAssigned(TEST_TABLE);
     // Note which regionServer that should survive the buggy coprocessor's
     // prePut().

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index 92a7b42..ecea98e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -24,7 +24,6 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNotSame;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
-import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -54,11 +53,9 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.HadoopShims;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.PerformanceEvaluation;
-import org.apache.hadoop.hbase.TableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HRegionLocator;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
@@ -359,6 +356,16 @@ public class TestHFileOutputFormat  {
     return ret;
   }
 
+  private byte[][] generateRandomSplitKeys(int numKeys) {
+    Random random = new Random();
+    byte[][] ret = new byte[numKeys][];
+    for (int i = 0; i < numKeys; i++) {
+      ret[i] =
+          PerformanceEvaluation.generateData(random, PerformanceEvaluation.DEFAULT_VALUE_LENGTH);
+    }
+    return ret;
+  }
+
   @Test
   public void testMRIncrementalLoad() throws Exception {
     LOG.info("\nStarting test testMRIncrementalLoad\n");
@@ -375,17 +382,19 @@ public class TestHFileOutputFormat  {
       boolean shouldChangeRegions) throws Exception {
     util = new HBaseTestingUtility();
     Configuration conf = util.getConfiguration();
-    byte[][] startKeys = generateRandomStartKeys(5);
+    byte[][] splitKeys = generateRandomSplitKeys(4);
     HBaseAdmin admin = null;
     try {
       util.startMiniCluster();
       Path testDir = util.getDataTestDirOnTestFS("testLocalMRIncrementalLoad");
       admin = util.getHBaseAdmin();
-      HTable table = util.createTable(TABLE_NAME, FAMILIES);
+      HTable table = util.createTable(TABLE_NAME, FAMILIES, splitKeys);
       assertEquals("Should start with empty table",
           0, util.countRows(table));
-      int numRegions = util.createMultiRegions(
-          util.getConfiguration(), table, FAMILIES[0], startKeys);
+      int numRegions = -1;
+      try(RegionLocator r = table.getRegionLocator()) {
+        numRegions = r.getStartKeys().length;
+      }
       assertEquals("Should make 5 regions", numRegions, 5);
 
       // Generate the bulk load files
@@ -416,10 +425,9 @@ public class TestHFileOutputFormat  {
           Threads.sleep(200);
           LOG.info("Waiting on table to finish disabling");
         }
-        byte[][] newStartKeys = generateRandomStartKeys(15);
-        util.createMultiRegions(
-            util.getConfiguration(), table, FAMILIES[0], newStartKeys);
-        admin.enableTable(table.getName());
+        util.deleteTable(table.getName());
+        byte[][] newSplitKeys = generateRandomSplitKeys(14);
+        table = util.createTable(TABLE_NAME, FAMILIES, newSplitKeys);
         while (table.getRegionLocations().size() != 15 ||
             !admin.isTableAvailable(table.getName())) {
           Thread.sleep(200);
@@ -1057,12 +1065,8 @@ public class TestHFileOutputFormat  {
     util = new HBaseTestingUtility(conf);
     if ("newtable".equals(args[0])) {
       TableName tname = TableName.valueOf(args[1]);
-      HTable table = util.createTable(tname, FAMILIES);
-      HBaseAdmin admin = new HBaseAdmin(conf);
-      admin.disableTable(tname);
-      byte[][] startKeys = generateRandomStartKeys(5);
-      util.createMultiRegions(conf, table, FAMILIES[0], startKeys);
-      admin.enableTable(tname);
+      byte[][] splitKeys = generateRandomSplitKeys(4);
+      HTable table = util.createTable(tname, FAMILIES, splitKeys);
     } else if ("incremental".equals(args[0])) {
       TableName tname = TableName.valueOf(args[1]);
       HTable table = (HTable) util.getConnection().getTable(tname);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 827d47b..0f60f3b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -25,6 +25,16 @@ import static org.junit.Assert.assertNotSame;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.Callable;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -80,16 +90,6 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
 
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.Callable;
-
 /**
  * Simple test for {@link CellSortReducer} and {@link HFileOutputFormat2}.
  * Sets up and runs a mapreduce job that writes hfile output.
@@ -358,6 +358,16 @@ public class TestHFileOutputFormat2  {
     return ret;
   }
 
+  private byte[][] generateRandomSplitKeys(int numKeys) {
+    Random random = new Random();
+    byte[][] ret = new byte[numKeys][];
+    for (int i = 0; i < numKeys; i++) {
+      ret[i] =
+          PerformanceEvaluation.generateData(random, PerformanceEvaluation.DEFAULT_VALUE_LENGTH);
+    }
+    return ret;
+  }
+
   @Test
   public void testMRIncrementalLoad() throws Exception {
     LOG.info("\nStarting test testMRIncrementalLoad\n");
@@ -374,15 +384,18 @@ public class TestHFileOutputFormat2  {
       boolean shouldChangeRegions) throws Exception {
     util = new HBaseTestingUtility();
     Configuration conf = util.getConfiguration();
-    byte[][] startKeys = generateRandomStartKeys(5);
+    byte[][] splitKeys = generateRandomSplitKeys(4);
     util.startMiniCluster();
-    try (HTable table = util.createTable(TABLE_NAME, FAMILIES);
-        Admin admin = table.getConnection().getAdmin()) {
+    try {
+      HTable table = util.createTable(TABLE_NAME, FAMILIES, splitKeys);
+      Admin admin = table.getConnection().getAdmin();
       Path testDir = util.getDataTestDirOnTestFS("testLocalMRIncrementalLoad");
       assertEquals("Should start with empty table",
           0, util.countRows(table));
-      int numRegions = util.createMultiRegions(
-          util.getConfiguration(), table, FAMILIES[0], startKeys);
+      int numRegions = -1;
+      try (RegionLocator r = table.getRegionLocator()) {
+        numRegions = r.getStartKeys().length;
+      }
       assertEquals("Should make 5 regions", numRegions, 5);
 
       // Generate the bulk load files
@@ -413,10 +426,10 @@ public class TestHFileOutputFormat2  {
           Threads.sleep(200);
           LOG.info("Waiting on table to finish disabling");
         }
-        byte[][] newStartKeys = generateRandomStartKeys(15);
-        util.createMultiRegions(
-            util.getConfiguration(), table, FAMILIES[0], newStartKeys);
-        admin.enableTable(table.getName());
+        util.deleteTable(table.getName());
+        byte[][] newSplitKeys = generateRandomSplitKeys(14);
+        table = util.createTable(TABLE_NAME, FAMILIES, newSplitKeys);
+
         while (table.getRegionLocator().getAllRegionLocations().size() != 15 ||
             !admin.isTableAvailable(table.getName())) {
           Thread.sleep(200);
@@ -1061,12 +1074,8 @@ public class TestHFileOutputFormat2  {
     util = new HBaseTestingUtility(conf);
     if ("newtable".equals(args[0])) {
       TableName tname = TableName.valueOf(args[1]);
-      try (HTable table = util.createTable(tname, FAMILIES);
-           Admin admin = table.getConnection().getAdmin()) {
-        admin.disableTable(tname);
-        byte[][] startKeys = generateRandomStartKeys(5);
-        util.createMultiRegions(conf, table, FAMILIES[0], startKeys);
-        admin.enableTable(tname);
+      byte[][] splitKeys = generateRandomSplitKeys(4);
+      try (HTable table = util.createTable(tname, FAMILIES, splitKeys)) {
       }
     } else if ("incremental".equals(args[0])) {
       TableName tname = TableName.valueOf(args[1]);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
index a46e76a..3226cc6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
@@ -77,8 +77,8 @@ public class TestMultiTableInputFormat {
     // create and fill table
     for (int i = 0; i < 3; i++) {
       try (HTable table =
-          TEST_UTIL.createTable(TableName.valueOf(TABLE_NAME + String.valueOf(i)), INPUT_FAMILY)) {
-        TEST_UTIL.createMultiRegions(TEST_UTIL.getConfiguration(), table, INPUT_FAMILY, 4);
+          TEST_UTIL.createMultiRegionTable(TableName.valueOf(TABLE_NAME + String.valueOf(i)),
+            INPUT_FAMILY, 4)) {
         TEST_UTIL.loadTable(table, INPUT_FAMILY, false);
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 99684e9..6180632 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.File;
 import java.io.IOException;
 import java.util.Iterator;
@@ -25,10 +28,13 @@ import java.util.NavigableMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -46,9 +52,6 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertTrue;
-
 /**
  * Test Map/Reduce job over HBase tables. The map/reduce process we're testing
  * on our tables is simple - take every row in the table, reverse the value of
@@ -67,8 +70,9 @@ public class TestMultithreadedTableMapper {
   @BeforeClass
   public static void beforeClass() throws Exception {
     UTIL.startMiniCluster();
-    HTable table = UTIL.createTable(MULTI_REGION_TABLE_NAME, new byte[][] {INPUT_FAMILY, OUTPUT_FAMILY});
-    UTIL.createMultiRegions(table, INPUT_FAMILY);
+    HTable table =
+        UTIL.createMultiRegionTable(MULTI_REGION_TABLE_NAME, new byte[][] { INPUT_FAMILY,
+            OUTPUT_FAMILY });
     UTIL.loadTable(table, INPUT_FAMILY, false);
     UTIL.startMiniMapReduceCluster();
     UTIL.waitUntilAllRegionsAssigned(MULTI_REGION_TABLE_NAME);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
index 0503f19..7d8a895 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
@@ -111,7 +111,7 @@ public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
    */
   @Test
   public void testGetSplits() throws IOException, InterruptedException, ClassNotFoundException {
-    testNumOfSplits("-1", 50);
+    testNumOfSplits("-1", 52);
     testNumOfSplits("100", 1);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
index eb42092..c4b0c74 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.NavigableMap;
@@ -83,8 +82,7 @@ public abstract class TestTableInputFormatScanBase {
     // start mini hbase cluster
     TEST_UTIL.startMiniCluster(3);
     // create and fill table
-    table = TEST_UTIL.createTable(TableName.valueOf(TABLE_NAME), INPUT_FAMILY);
-    TEST_UTIL.createMultiRegions(table, INPUT_FAMILY);
+    table = TEST_UTIL.createMultiRegionTable(TableName.valueOf(TABLE_NAME), INPUT_FAMILY);
     TEST_UTIL.loadTable(table, INPUT_FAMILY, false);
     // start MR cluster
     TEST_UTIL.startMiniMapReduceCluster();

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index b42966c..2972222 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -77,8 +77,8 @@ public abstract class TestTableMapReduceBase {
   public static void beforeClass() throws Exception {
     UTIL.startMiniCluster();
     HTable table =
-        UTIL.createTable(MULTI_REGION_TABLE_NAME, new byte[][] { INPUT_FAMILY, OUTPUT_FAMILY });
-    UTIL.createMultiRegions(table, INPUT_FAMILY);
+        UTIL.createMultiRegionTable(MULTI_REGION_TABLE_NAME, new byte[][] { INPUT_FAMILY,
+            OUTPUT_FAMILY });
     UTIL.loadTable(table, INPUT_FAMILY, false);
     UTIL.startMiniMapReduceCluster();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index 793d299..f56811e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -76,6 +76,7 @@ import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.NonceGenerator;
 import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Table;
@@ -91,11 +92,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.wal.DefaultWALProvider;
-import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.hadoop.hbase.wal.WALFactory;
-import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -104,6 +100,10 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
 import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.hadoop.hbase.wal.DefaultWALProvider;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALFactory;
+import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
@@ -1433,8 +1433,11 @@ public class TestDistributedLogSplitting {
     TableName table = TableName.valueOf(tname);
     byte [] family = Bytes.toBytes(fname);
     LOG.info("Creating table with " + nrs + " regions");
-    HTable ht = TEST_UTIL.createTable(table, family);
-    int numRegions = TEST_UTIL.createMultiRegions(conf, ht, family, nrs);
+    HTable ht = TEST_UTIL.createMultiRegionTable(table, family, nrs);
+    int numRegions = -1;
+    try (RegionLocator r = ht.getRegionLocator()) {
+      numRegions = r.getStartKeys().length;
+    }
     assertEquals(nrs, numRegions);
       LOG.info("Waiting for no more RIT\n");
     blockUntilNoRIT(zkw, master);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
index 1594f80..20e0e54 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
@@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.TableState;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -67,9 +67,11 @@ public class TestMasterRestartAfterDisablingTable {
     TableName table = TableName.valueOf("tableRestart");
     byte[] family = Bytes.toBytes("family");
     log("Creating table with " + NUM_REGIONS_TO_CREATE + " regions");
-    HTable ht = TEST_UTIL.createTable(table, family);
-    int numRegions = TEST_UTIL.createMultiRegions(conf, ht, family,
-        NUM_REGIONS_TO_CREATE);
+    HTable ht = TEST_UTIL.createMultiRegionTable(table, family, NUM_REGIONS_TO_CREATE);
+    int numRegions = -1;
+    try (RegionLocator r = ht.getRegionLocator()) {
+      numRegions = r.getStartKeys().length;
+    }
     numRegions += 1; // catalogs
     log("Waiting for no more RIT\n");
     TEST_UTIL.waitUntilNoRegionsInTransition(60000);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
index 7ee69f9..a09c5f2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
@@ -22,16 +22,17 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -64,9 +65,12 @@ public class TestMasterTransitions {
     TEST_UTIL.getConfiguration().setBoolean("dfs.support.append", true);
     TEST_UTIL.startMiniCluster(2);
     // Create a table of three families.  This will assign a region.
-    TEST_UTIL.createTable(TABLENAME, FAMILIES);
+    TEST_UTIL.createMultiRegionTable(TABLENAME, FAMILIES);
     HTable t = (HTable) TEST_UTIL.getConnection().getTable(TABLENAME);
-    int countOfRegions = TEST_UTIL.createMultiRegions(t, getTestFamily());
+    int countOfRegions = -1;
+    try (RegionLocator r = t.getRegionLocator()) {
+      countOfRegions = r.getStartKeys().length;
+    }
     TEST_UTIL.waitUntilAllRegionsAssigned(TABLENAME);
     addToEachStartKey(countOfRegions);
     t.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
index d58b689..21a5d00 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
@@ -75,9 +76,11 @@ public class  TestRollingRestart {
     TableName table = TableName.valueOf("tableRestart");
     byte [] family = Bytes.toBytes("family");
     log("Creating table with " + NUM_REGIONS_TO_CREATE + " regions");
-    HTable ht = TEST_UTIL.createTable(table, family);
-    int numRegions = TEST_UTIL.createMultiRegions(conf, ht, family,
-        NUM_REGIONS_TO_CREATE);
+    HTable ht = TEST_UTIL.createMultiRegionTable(table, family, NUM_REGIONS_TO_CREATE);
+    int numRegions = -1;
+    try (RegionLocator r = ht.getRegionLocator()) {
+      numRegions = r.getStartKeys().length;
+    }
     numRegions += 1; // catalogs
     log("Waiting for no more RIT\n");
     TEST_UTIL.waitUntilNoRegionsInTransition(60000);

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
index c89c0df..8e0bd21 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionFavoredNodes.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
@@ -72,8 +72,7 @@ public class TestRegionFavoredNodes {
       return;
     }
     TEST_UTIL.startMiniCluster(REGION_SERVERS);
-    table = TEST_UTIL.createTable(TABLE_NAME, COLUMN_FAMILY);
-    TEST_UTIL.createMultiRegions(table, COLUMN_FAMILY);
+    table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, COLUMN_FAMILY);
     TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/608025ae/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index f5cf33d..5ce4456 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -31,13 +31,10 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
@@ -59,6 +56,8 @@ import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.NoopRes
 import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingRequest;
 import org.apache.hadoop.hbase.coprocessor.protobuf.generated.PingProtos.PingResponse;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -156,9 +155,8 @@ public class TestServerCustomProtocol {
 
   @Before
   public void before()  throws Exception {
-    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY);
-    util.createMultiRegions(util.getConfiguration(), table, TEST_FAMILY,
-      new byte[][]{ HConstants.EMPTY_BYTE_ARRAY, ROW_B, ROW_C});
+    final byte[][] SPLIT_KEYS = new byte[][] { ROW_B, ROW_C };
+    HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
 
     Put puta = new Put( ROW_A );
     puta.add(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));


Mime
View raw message