Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id E7B0C18C1C for ; Thu, 25 Jun 2015 21:42:49 +0000 (UTC) Received: (qmail 98532 invoked by uid 500); 25 Jun 2015 21:42:49 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 98390 invoked by uid 500); 25 Jun 2015 21:42:49 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 98173 invoked by uid 99); 25 Jun 2015 21:42:49 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 25 Jun 2015 21:42:49 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 2C479E36B6; Thu, 25 Jun 2015 21:42:49 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: stack@apache.org To: commits@hbase.apache.org Date: Thu, 25 Jun 2015 21:42:52 -0000 Message-Id: In-Reply-To: <30a629a7129446179e981f6a069ff0ba@git.apache.org> References: <30a629a7129446179e981f6a069ff0ba@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [4/6] hbase git commit: HBASE-13893 Replace HTable with Table in client tests (Jurriaan Mous) http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java index 43ba242..908fcdf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexer.java @@ -64,7 +64,7 @@ public class TestHTableMultiplexer { TEST_UTIL.shutdownMiniCluster(); } - private static void checkExistence(HTable htable, byte[] row, byte[] family, byte[] quality) + private static void checkExistence(Table htable, byte[] row, byte[] family, byte[] quality) throws Exception { // verify that the Get returns the correct result Result r; @@ -93,63 +93,65 @@ public class TestHTableMultiplexer { HTableMultiplexer multiplexer = new HTableMultiplexer(TEST_UTIL.getConfiguration(), PER_REGIONSERVER_QUEUE_SIZE); - HTable htable1 = + Table htable1 = TEST_UTIL.createTable(TABLE_1, new byte[][] { FAMILY }, VERSION, Bytes.toBytes("aaaaa"), Bytes.toBytes("zzzzz"), NUM_REGIONS); - HTable htable2 = + Table htable2 = TEST_UTIL.createTable(TABLE_2, new byte[][] { FAMILY }, VERSION, Bytes.toBytes("aaaaa"), Bytes.toBytes("zzzzz"), NUM_REGIONS); TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_1); TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_2); - byte[][] startRows = htable1.getStartKeys(); - byte[][] endRows = htable1.getEndKeys(); - - // SinglePut case - for (int i = 0; i < NUM_REGIONS; i++) { - byte [] row = startRows[i]; - if (row == null || row.length <= 0) continue; - Put put = new Put(row).add(FAMILY, QUALIFIER, VALUE1); - success = multiplexer.put(TABLE_1, put); - assertTrue("multiplexer.put returns", success); - - put = new Put(row).add(FAMILY, QUALIFIER, VALUE1); - success = multiplexer.put(TABLE_2, put); - assertTrue("multiplexer.put failed", success); - - LOG.info("Put for " + Bytes.toStringBinary(startRows[i]) + " @ iteration " + (i + 1)); + try (RegionLocator rl = TEST_UTIL.getConnection().getRegionLocator(TABLE_1)) { + byte[][] startRows = rl.getStartKeys(); + byte[][] endRows = rl.getEndKeys(); + + // SinglePut case + for (int i = 0; i < NUM_REGIONS; i++) { + byte [] row = startRows[i]; + if (row == null || row.length <= 0) continue; + Put put = new Put(row).add(FAMILY, QUALIFIER, VALUE1); + success = multiplexer.put(TABLE_1, put); + assertTrue("multiplexer.put returns", success); + + put = new Put(row).add(FAMILY, QUALIFIER, VALUE1); + success = multiplexer.put(TABLE_2, put); + assertTrue("multiplexer.put failed", success); + + LOG.info("Put for " + Bytes.toStringBinary(startRows[i]) + " @ iteration " + (i + 1)); + + // verify that the Get returns the correct result + checkExistence(htable1, startRows[i], FAMILY, QUALIFIER); + checkExistence(htable2, startRows[i], FAMILY, QUALIFIER); + } + + // MultiPut case + List multiput = new ArrayList(); + for (int i = 0; i < NUM_REGIONS; i++) { + byte [] row = endRows[i]; + if (row == null || row.length <= 0) continue; + Put put = new Put(row); + put.add(FAMILY, QUALIFIER, VALUE2); + multiput.add(put); + } + failedPuts = multiplexer.put(TABLE_1, multiput); + assertTrue(failedPuts == null); // verify that the Get returns the correct result - checkExistence(htable1, startRows[i], FAMILY, QUALIFIER); - checkExistence(htable2, startRows[i], FAMILY, QUALIFIER); - } - - // MultiPut case - List multiput = new ArrayList(); - for (int i = 0; i < NUM_REGIONS; i++) { - byte [] row = endRows[i]; - if (row == null || row.length <= 0) continue; - Put put = new Put(row); - put.add(FAMILY, QUALIFIER, VALUE2); - multiput.add(put); - } - failedPuts = multiplexer.put(TABLE_1, multiput); - assertTrue(failedPuts == null); - - // verify that the Get returns the correct result - for (int i = 0; i < NUM_REGIONS; i++) { - byte [] row = endRows[i]; - if (row == null || row.length <= 0) continue; - Get get = new Get(row); - get.addColumn(FAMILY, QUALIFIER); - Result r; - int nbTry = 0; - do { - assertTrue(nbTry++ < 50); - Thread.sleep(100); - r = htable1.get(get); - } while (r == null || r.getValue(FAMILY, QUALIFIER) == null || - Bytes.compareTo(VALUE2, r.getValue(FAMILY, QUALIFIER)) != 0); + for (int i = 0; i < NUM_REGIONS; i++) { + byte [] row = endRows[i]; + if (row == null || row.length <= 0) continue; + Get get = new Get(row); + get.addColumn(FAMILY, QUALIFIER); + Result r; + int nbTry = 0; + do { + assertTrue(nbTry++ < 50); + Thread.sleep(100); + r = htable1.get(get); + } while (r == null || r.getValue(FAMILY, QUALIFIER) == null || + Bytes.compareTo(VALUE2, r.getValue(FAMILY, QUALIFIER)) != 0); + } } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java index 4091e58..b71e881 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java @@ -64,7 +64,7 @@ public class TestHTableMultiplexerFlushCache { TEST_UTIL.shutdownMiniCluster(); } - private static void checkExistence(final HTable htable, final byte[] row, final byte[] family, + private static void checkExistence(final Table htable, final byte[] row, final byte[] family, final byte[] quality, final byte[] value) throws Exception { // verify that the Get returns the correct result @@ -86,31 +86,33 @@ public class TestHTableMultiplexerFlushCache { public void testOnRegionChange() throws Exception { TableName TABLE = TableName.valueOf("testOnRegionChange"); final int NUM_REGIONS = 10; - HTable htable = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY }, 3, + Table htable = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY }, 3, Bytes.toBytes("aaaaa"), Bytes.toBytes("zzzzz"), NUM_REGIONS); HTableMultiplexer multiplexer = new HTableMultiplexer(TEST_UTIL.getConfiguration(), PER_REGIONSERVER_QUEUE_SIZE); - byte[][] startRows = htable.getStartKeys(); - byte[] row = startRows[1]; - assertTrue("2nd region should not start with empty row", row != null && row.length > 0); + try (RegionLocator r = TEST_UTIL.getConnection().getRegionLocator(TABLE)) { + byte[][] startRows = r.getStartKeys(); + byte[] row = startRows[1]; + assertTrue("2nd region should not start with empty row", row != null && row.length > 0); - Put put = new Put(row).add(FAMILY, QUALIFIER1, VALUE1); - assertTrue("multiplexer.put returns", multiplexer.put(TABLE, put)); - - checkExistence(htable, row, FAMILY, QUALIFIER1, VALUE1); + Put put = new Put(row).add(FAMILY, QUALIFIER1, VALUE1); + assertTrue("multiplexer.put returns", multiplexer.put(TABLE, put)); + + checkExistence(htable, row, FAMILY, QUALIFIER1, VALUE1); - // Now let's shutdown the regionserver and let regions moved to other servers. - HRegionLocation loc = htable.getRegionLocation(row); - MiniHBaseCluster hbaseCluster = TEST_UTIL.getHBaseCluster(); - hbaseCluster.stopRegionServer(loc.getServerName()); - TEST_UTIL.waitUntilAllRegionsAssigned(TABLE); + // Now let's shutdown the regionserver and let regions moved to other servers. + HRegionLocation loc = r.getRegionLocation(row); + MiniHBaseCluster hbaseCluster = TEST_UTIL.getHBaseCluster(); + hbaseCluster.stopRegionServer(loc.getServerName()); + TEST_UTIL.waitUntilAllRegionsAssigned(TABLE); - // put with multiplexer. - put = new Put(row).add(FAMILY, QUALIFIER2, VALUE2); - assertTrue("multiplexer.put returns", multiplexer.put(TABLE, put)); + // put with multiplexer. + put = new Put(row).addColumn(FAMILY, QUALIFIER2, VALUE2); + assertTrue("multiplexer.put returns", multiplexer.put(TABLE, put)); - checkExistence(htable, row, FAMILY, QUALIFIER2, VALUE2); + checkExistence(htable, row, FAMILY, QUALIFIER2, VALUE2); + } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java index b145109..0e5bd9c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaWithReplicas.java @@ -157,19 +157,19 @@ public class TestMetaWithReplicas { byte[] data = ZKUtil.getData(zkw, primaryMetaZnode); ServerName primary = ServerName.parseFrom(data); - byte[] TABLE = Bytes.toBytes("testShutdownHandling"); + TableName TABLE = TableName.valueOf("testShutdownHandling"); byte[][] FAMILIES = new byte[][] { Bytes.toBytes("foo") }; if (util.getHBaseAdmin().tableExists(TABLE)) { util.getHBaseAdmin().disableTable(TABLE); util.getHBaseAdmin().deleteTable(TABLE); } - Table htable = util.createTable(TABLE, FAMILIES, conf); + Table htable = util.createTable(TABLE, FAMILIES); util.getHBaseAdmin().flush(TableName.META_TABLE_NAME); Thread.sleep(conf.getInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, 30000) * 6); Connection c = ConnectionFactory.createConnection(util.getConfiguration()); - List regions = MetaTableAccessor.getTableRegions(c, TableName.valueOf(TABLE)); + List regions = MetaTableAccessor.getTableRegions(c, TABLE); HRegionLocation hrl = MetaTableAccessor.getRegionLocation(c, regions.get(0)); // Ensure that the primary server for test table is not the same one as the primary // of the meta region since we will be killing the srv holding the meta's primary... @@ -198,11 +198,11 @@ public class TestMetaWithReplicas { } ((ClusterConnection)c).clearRegionCache(); htable.close(); - htable = c.getTable(TableName.valueOf(TABLE)); + htable = c.getTable(TABLE); byte[] row = "test".getBytes(); Put put = new Put(row); put.add("foo".getBytes(), row, row); - BufferedMutator m = c.getBufferedMutator(TableName.valueOf(TABLE)); + BufferedMutator m = c.getBufferedMutator(TABLE); m.mutate(put); m.flush(); // Try to do a get of the row that was just put @@ -217,22 +217,22 @@ public class TestMetaWithReplicas { ((ClusterConnection)c).clearRegionCache(); htable.close(); conf.setBoolean(HConstants.USE_META_REPLICAS, false); - htable = c.getTable(TableName.valueOf(TABLE)); + htable = c.getTable(TABLE); r = htable.get(get); assertTrue(Arrays.equals(r.getRow(), row)); } @Test public void testMetaLookupThreadPoolCreated() throws Exception { - byte[] TABLE = Bytes.toBytes("testMetaLookupThreadPoolCreated"); + TableName TABLE = TableName.valueOf("testMetaLookupThreadPoolCreated"); byte[][] FAMILIES = new byte[][] { Bytes.toBytes("foo") }; if (TEST_UTIL.getHBaseAdmin().tableExists(TABLE)) { TEST_UTIL.getHBaseAdmin().disableTable(TABLE); TEST_UTIL.getHBaseAdmin().deleteTable(TABLE); } - Table htable = TEST_UTIL.createTable(TABLE, FAMILIES, TEST_UTIL.getConfiguration()); + Table htable = TEST_UTIL.createTable(TABLE, FAMILIES); byte[] row = "test".getBytes(); - ConnectionImplementation c = ((ConnectionImplementation)((HTable)htable).connection); + ConnectionImplementation c = ((ConnectionImplementation) TEST_UTIL.getConnection()); // check that metalookup pool would get created c.relocateRegion(TABLE, row); ExecutorService ex = c.getCurrentMetaLookupPool(); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java index 2958834..95faf1a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java @@ -75,7 +75,7 @@ public class TestMultiParallel { //((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL); //((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL); UTIL.startMiniCluster(slaves); - HTable t = UTIL.createMultiRegionTable(TEST_TABLE, Bytes.toBytes(FAMILY)); + Table t = UTIL.createMultiRegionTable(TEST_TABLE, Bytes.toBytes(FAMILY)); UTIL.waitTableEnabled(TEST_TABLE); t.close(); CONNECTION = ConnectionFactory.createConnection(UTIL.getConfiguration()); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java index 91a8673..e6bde4e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicaWithCluster.java @@ -142,10 +142,10 @@ public class TestReplicaWithCluster { HTableDescriptor hdt = HTU.createTableDescriptor("testCreateDeleteTable"); hdt.setRegionReplication(NB_SERVERS); hdt.addCoprocessor(SlowMeCopro.class.getName()); - Table table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + Table table = HTU.createTable(hdt, new byte[][]{f}, null); Put p = new Put(row); - p.add(f, row, row); + p.addColumn(f, row, row); table.put(p); Get g = new Get(row); @@ -174,11 +174,11 @@ public class TestReplicaWithCluster { HTableDescriptor hdt = HTU.createTableDescriptor("testChangeTable"); hdt.setRegionReplication(NB_SERVERS); hdt.addCoprocessor(SlowMeCopro.class.getName()); - Table table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + Table table = HTU.createTable(hdt, new byte[][]{f}, null); // basic test: it should work. Put p = new Put(row); - p.add(f, row, row); + p.addColumn(f, row, row); table.put(p); Get g = new Get(row); @@ -314,7 +314,7 @@ public class TestReplicaWithCluster { HTableDescriptor hdt = HTU.createTableDescriptor("testBulkLoad"); hdt.setRegionReplication(NB_SERVERS); hdt.addCoprocessor(SlowMeCopro.class.getName()); - Table table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + Table table = HTU.createTable(hdt, new byte[][]{f}, null); // create hfiles to load. LOG.debug("Creating test data"); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java index bfc1230..37e98e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestReplicasClient.java @@ -85,7 +85,7 @@ public class TestReplicasClient { } private static final int NB_SERVERS = 1; - private static HTable table = null; + private static Table table = null; private static final byte[] row = TestReplicasClient.class.getName().getBytes(); private static HRegionInfo hriPrimary; @@ -177,9 +177,11 @@ public class TestReplicasClient { // Create table then get the single region for our new table. HTableDescriptor hdt = HTU.createTableDescriptor(TestReplicasClient.class.getSimpleName()); hdt.addCoprocessor(SlowMeCopro.class.getName()); - table = HTU.createTable(hdt, new byte[][]{f}, HTU.getConfiguration()); + table = HTU.createTable(hdt, new byte[][]{f}, null); - hriPrimary = table.getRegionLocation(row, false).getRegionInfo(); + try (RegionLocator locator = HTU.getConnection().getRegionLocator(hdt.getTableName())) { + hriPrimary = locator.getRegionLocation(row, false).getRegionInfo(); + } // mock a secondary region info to open hriSecondary = new HRegionInfo(hriPrimary.getTable(), hriPrimary.getStartKey(), @@ -547,8 +549,7 @@ public class TestReplicasClient { Thread.sleep(1000 + REFRESH_PERIOD * 2); - AsyncProcess ap = ((ClusterConnection) HTU.getHBaseAdmin().getConnection()) - .getAsyncProcess(); + AsyncProcess ap = ((ClusterConnection) HTU.getConnection()).getAsyncProcess(); // Make primary slowdown SlowMeCopro.getCdl().set(new CountDownLatch(1)); @@ -563,8 +564,10 @@ public class TestReplicasClient { g.setConsistency(Consistency.TIMELINE); gets.add(g); Object[] results = new Object[2]; - AsyncRequestFuture reqs = ap.submitAll(table.getPool(), table.getName(), - gets, null, results); + + AsyncRequestFuture reqs = ap.submitAll( + HTable.getDefaultExecutor(HTU.getConfiguration()), + table.getName(), gets, null, results); reqs.waitUntilDone(); // verify we got the right results back for (Object r : results) { http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java index 1f3a95b..24494dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResultSizeEstimation.java @@ -71,8 +71,7 @@ public class TestResultSizeEstimation { TableName TABLE = TableName.valueOf("testResultSizeEstimation"); byte[][] FAMILIES = new byte[][] { FAMILY }; - Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); - HTable table = TEST_UTIL.createTable(TABLE, FAMILIES, conf); + Table table = TEST_UTIL.createTable(TABLE, FAMILIES); Put p = new Put(ROW1); p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE)); table.put(p); @@ -102,8 +101,7 @@ public class TestResultSizeEstimation { TableName TABLE = TableName.valueOf("testResultSizeEstimationWithTags"); byte[][] FAMILIES = new byte[][] { FAMILY }; - Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); - HTable table = TEST_UTIL.createTable(TABLE, FAMILIES, conf); + Table table = TEST_UTIL.createTable(TABLE, FAMILIES); Put p = new Put(ROW1); p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE, new Tag[] { new Tag((byte)1, new byte[TAG_DATA_SIZE]) } )); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java index 5afc226..54963ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java @@ -297,9 +297,9 @@ public class TestScannersFromClientSide { } assertTrue("Expected row count: " + expectedRowCount + " Actual row count: " + rowCount, - expectedRowCount == rowCount); + expectedRowCount == rowCount); assertTrue("Expected cell count: " + expectedCellCount + " Actual cell count: " + cellCount, - expectedCellCount == cellCount); + expectedCellCount == cellCount); scanner.close(); } @@ -310,7 +310,7 @@ public class TestScannersFromClientSide { */ @Test public void testGetMaxResults() throws Exception { - byte [] TABLE = Bytes.toBytes("testGetMaxResults"); + TableName TABLE = TableName.valueOf("testGetMaxResults"); byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20); @@ -430,7 +430,7 @@ public class TestScannersFromClientSide { */ @Test public void testScanMaxResults() throws Exception { - byte [] TABLE = Bytes.toBytes("testScanLimit"); + TableName TABLE = TableName.valueOf("testScanLimit"); byte [][] ROWS = HTestConst.makeNAscii(ROW, 2); byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 10); @@ -480,7 +480,7 @@ public class TestScannersFromClientSide { */ @Test public void testGetRowOffset() throws Exception { - byte [] TABLE = Bytes.toBytes("testGetRowOffset"); + TableName TABLE = TableName.valueOf("testGetRowOffset"); byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 20); @@ -579,7 +579,7 @@ public class TestScannersFromClientSide { TableName TABLE = TableName.valueOf("testScanOnReopenedRegion"); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 2); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILY); + Table ht = TEST_UTIL.createTable(TABLE, FAMILY); Put put; Scan scan; @@ -599,7 +599,11 @@ public class TestScannersFromClientSide { scan = new Scan(ROW); scanner = ht.getScanner(scan); - HRegionLocation loc = ht.getRegionLocation(ROW); + HRegionLocation loc; + + try (RegionLocator locator = TEST_UTIL.getConnection().getRegionLocator(TABLE)) { + loc = locator.getRegionLocation(ROW); + } HRegionInfo hri = loc.getRegionInfo(); MiniHBaseCluster cluster = TEST_UTIL.getMiniHBaseCluster(); byte[] regionName = hri.getRegionName(); @@ -649,12 +653,12 @@ public class TestScannersFromClientSide { */ @Test public void testAsyncScanner() throws Exception { - byte [] TABLE = Bytes.toBytes("testAsyncScan"); + TableName TABLE = TableName.valueOf("testAsyncScan"); byte [][] ROWS = HTestConst.makeNAscii(ROW, 2); byte [][] FAMILIES = HTestConst.makeNAscii(FAMILY, 3); byte [][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, 10); - HTable ht = TEST_UTIL.createTable(TABLE, FAMILIES); + Table ht = TEST_UTIL.createTable(TABLE, FAMILIES); Put put; Scan scan; http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java index 095b3c6..e6656fd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java @@ -182,7 +182,7 @@ public class TestSnapshotCloneIndependence { runTestSnapshotDeleteIndependent(true); } - private static void waitOnSplit(final HTable t, int originalCount) throws Exception { + private static void waitOnSplit(Connection c, final Table t, int originalCount) throws Exception { for (int i = 0; i < 200; i++) { try { Thread.sleep(50); @@ -190,8 +190,10 @@ public class TestSnapshotCloneIndependence { // Restore the interrupted status Thread.currentThread().interrupt(); } - if (t.getAllRegionLocations().size() > originalCount) { - return; + try (RegionLocator locator = c.getRegionLocator(t.getName())) { + if (locator.getAllRegionLocations().size() > originalCount) { + return; + } } } throw new Exception("Split did not increase the number of regions"); @@ -276,7 +278,7 @@ public class TestSnapshotCloneIndependence { final long startTime = System.currentTimeMillis(); final TableName localTableName = TableName.valueOf(STRING_TABLE_NAME + startTime); - HTable original = UTIL.createTable(localTableName, TEST_FAM); + Table original = UTIL.createTable(localTableName, TEST_FAM); UTIL.loadTable(original, TEST_FAM); final int loadedTableCount = UTIL.countRows(original); System.out.println("Original table has: " + loadedTableCount + " rows"); @@ -298,7 +300,7 @@ public class TestSnapshotCloneIndependence { admin.cloneSnapshot(snapshotName, cloneTableName); // Verify that region information is the same pre-split - original.clearRegionCache(); + ((ClusterConnection) UTIL.getConnection()).clearRegionCache(); List originalTableHRegions = admin.getTableRegions(localTableName); final int originalRegionCount = originalTableHRegions.size(); @@ -309,7 +311,7 @@ public class TestSnapshotCloneIndependence { // Split a region on the parent table admin.splitRegion(originalTableHRegions.get(0).getRegionName()); - waitOnSplit(original, originalRegionCount); + waitOnSplit(UTIL.getConnection(), original, originalRegionCount); // Verify that the cloned table region is not split final int cloneTableRegionCount2 = admin.getTableRegions(cloneTableName).size(); @@ -332,7 +334,7 @@ public class TestSnapshotCloneIndependence { final long startTime = System.currentTimeMillis(); final TableName localTableName = TableName.valueOf(STRING_TABLE_NAME + startTime); - HTable original = UTIL.createTable(localTableName, TEST_FAM); + Table original = UTIL.createTable(localTableName, TEST_FAM); UTIL.loadTable(original, TEST_FAM); final String snapshotNameAsString = "snapshot_" + localTableName; http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index 079a588..287a9e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -97,7 +97,7 @@ public class TestSnapshotFromClient { public void setup() throws Exception { HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); htd.setRegionReplication(getNumReplicas()); - UTIL.createTable(htd, new byte[][]{TEST_FAM}, UTIL.getConfiguration()); + UTIL.createTable(htd, new byte[][]{TEST_FAM}, null); } protected int getNumReplicas() { http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java index 860f6e2..dfa0898 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.AggregationClient; import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter; import org.apache.hadoop.hbase.filter.Filter; @@ -84,7 +85,7 @@ public class TestAggregateProtocol { util.startMiniCluster(2); final byte[][] SPLIT_KEYS = new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] }; - HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS); + Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS); /** * The testtable has one CQ which is always populated and one variable CQ * for each row rowkey1: CF:CQ CF:CQ1 rowKey2: CF:CQ CF:CQ2 http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java index ac75660..2cbd790 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.AggregationClient; import org.apache.hadoop.hbase.client.coprocessor.BigDecimalColumnInterpreter; import org.apache.hadoop.hbase.filter.Filter; @@ -83,7 +84,7 @@ public class TestBigDecimalColumnInterpreter { util.startMiniCluster(2); final byte[][] SPLIT_KEYS = new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] }; - HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS); + Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS); /** * The testtable has one CQ which is always populated and one variable CQ for each row rowkey1: * CF:CQ CF:CQ1 rowKey2: CF:CQ CF:CQ2 @@ -92,11 +93,11 @@ public class TestBigDecimalColumnInterpreter { Put put = new Put(ROWS[i]); put.setDurability(Durability.SKIP_WAL); BigDecimal bd = new BigDecimal(i); - put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(bd)); + put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(bd)); table.put(put); Put p2 = new Put(ROWS[i]); put.setDurability(Durability.SKIP_WAL); - p2.add(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(bd)), + p2.addColumn(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(bd)), Bytes.toBytes(bd.multiply(new BigDecimal("0.10")))); table.put(p2); } http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java index 1ca7676..8301264 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java @@ -26,11 +26,14 @@ import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; +import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -173,9 +176,12 @@ public class TestCoprocessorEndpoint { @Test public void testCoprocessorService() throws Throwable { - HTable table = (HTable) util.getConnection().getTable(TEST_TABLE); - NavigableMap regions = table.getRegionLocations(); + Table table = util.getConnection().getTable(TEST_TABLE); + List regions; + try(RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) { + regions = rl.getAllRegionLocations(); + } final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build(); final Map results = Collections.synchronizedMap( @@ -208,9 +214,9 @@ public class TestCoprocessorEndpoint { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); } assertEquals(3, results.size()); - for (HRegionInfo info : regions.navigableKeySet()) { - LOG.info("Region info is "+info.getRegionNameAsString()); - assertTrue(results.containsKey(info.getRegionName())); + for (HRegionLocation info : regions) { + LOG.info("Region info is "+info.getRegionInfo().getRegionNameAsString()); + assertTrue(results.containsKey(info.getRegionInfo().getRegionName())); } results.clear(); @@ -247,8 +253,11 @@ public class TestCoprocessorEndpoint { @Test public void testCoprocessorServiceNullResponse() throws Throwable { - HTable table = (HTable) util.getConnection().getTable(TEST_TABLE); - NavigableMap regions = table.getRegionLocations(); + Table table = util.getConnection().getTable(TEST_TABLE); + List regions; + try(RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) { + regions = rl.getAllRegionLocations(); + } final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build(); @@ -273,7 +282,8 @@ public class TestCoprocessorEndpoint { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); } assertEquals(3, results.size()); - for (HRegionInfo info : regions.navigableKeySet()) { + for (HRegionLocation region : regions) { + HRegionInfo info = region.getRegionInfo(); LOG.info("Region info is "+info.getRegionNameAsString()); assertTrue(results.containsKey(info.getRegionName())); assertNull(results.get(info.getRegionName())); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java index baea95d..a1e67fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.AggregationClient; import org.apache.hadoop.hbase.client.coprocessor.DoubleColumnInterpreter; import org.apache.hadoop.hbase.filter.Filter; @@ -81,7 +82,7 @@ public class TestDoubleColumnInterpreter { util.startMiniCluster(2); final byte[][] SPLIT_KEYS = new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] }; - HTable table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS); + Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS); /** * The testtable has one CQ which is always populated and one variable CQ for each row rowkey1: * CF:CQ CF:CQ1 rowKey2: CF:CQ CF:CQ2 http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java index 1f95a53..dbba63b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -45,6 +46,8 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.RegionLocator; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.master.AssignmentManager; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; @@ -1612,15 +1615,15 @@ public class TestMasterObserver { cp.enableBypass(false); cp.resetStates(); - HTable table = UTIL.createMultiRegionTable(tableName, TEST_FAMILY); + Table table = UTIL.createMultiRegionTable(tableName, TEST_FAMILY); - try { + try (RegionLocator r = UTIL.getConnection().getRegionLocator(tableName)) { UTIL.waitUntilAllRegionsAssigned(tableName); - NavigableMap regions = table.getRegionLocations(); - Map.Entry firstGoodPair = null; - for (Map.Entry e: regions.entrySet()) { - if (e.getValue() != null) { + List regions = r.getAllRegionLocations(); + HRegionLocation firstGoodPair = null; + for (HRegionLocation e: regions) { + if (e.getServerName() != null) { firstGoodPair = e; break; } @@ -1630,7 +1633,7 @@ public class TestMasterObserver { // Try to force a move Collection servers = master.getClusterStatus().getServers(); String destName = null; - String serverNameForFirstRegion = firstGoodPair.getValue().toString(); + String serverNameForFirstRegion = firstGoodPair.getServerName().toString(); LOG.info("serverNameForFirstRegion=" + serverNameForFirstRegion); ServerName masterServerName = master.getServerName(); boolean found = false; @@ -1647,7 +1650,7 @@ public class TestMasterObserver { assertTrue("Found server", found); LOG.info("Found " + destName); master.getMasterRpcServices().moveRegion(null, RequestConverter.buildMoveRegionRequest( - firstGoodPair.getKey().getEncodedNameAsBytes(),Bytes.toBytes(destName))); + firstGoodPair.getRegionInfo().getEncodedNameAsBytes(),Bytes.toBytes(destName))); assertTrue("Coprocessor should have been called on region move", cp.wasMoveCalled()); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java index d0e561f..c710d3c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java @@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RowMutations; @@ -140,9 +141,8 @@ public class TestRegionObserverInterface { true, true, true, false, true, true, true }); verifyMethodResult(SimpleRegionObserver.class, - new String[] {"getCtPreOpen", "getCtPostOpen", "getCtPreClose", "getCtPostClose"}, - tableName, - new Integer[] {1, 1, 0, 0}); + new String[] { "getCtPreOpen", "getCtPostOpen", "getCtPreClose", "getCtPostClose" }, + tableName, new Integer[] { 1, 1, 0, 0 }); Get get = new Get(ROW); get.addColumn(A, A); @@ -151,11 +151,9 @@ public class TestRegionObserverInterface { table.get(get); verifyMethodResult(SimpleRegionObserver.class, - new String[] {"hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", - "hadDelete", "hadPrePreparedDeleteTS"}, - tableName, - new Boolean[] {true, true, true, true, false, false} - ); + new String[] { "hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", "hadDelete", + "hadPrePreparedDeleteTS" }, tableName, + new Boolean[] { true, true, true, true, false, false }); Delete delete = new Delete(ROW); delete.deleteColumn(A, A); @@ -182,7 +180,7 @@ public class TestRegionObserverInterface { @Test (timeout=300000) public void testRowMutation() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testRowMutation"); - Table table = util.createTable(tableName, new byte[][] {A, B, C}); + Table table = util.createTable(tableName, new byte[][] { A, B, C }); try { verifyMethodResult(SimpleRegionObserver.class, new String[] {"hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", @@ -219,7 +217,7 @@ public class TestRegionObserverInterface { @Test (timeout=300000) public void testIncrementHook() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testIncrementHook"); - Table table = util.createTable(tableName, new byte[][] {A, B, C}); + Table table = util.createTable(tableName, new byte[][] { A, B, C }); try { Increment inc = new Increment(Bytes.toBytes(0)); inc.addColumn(A, A, 1); @@ -254,11 +252,8 @@ public class TestRegionObserverInterface { p = new Put(Bytes.toBytes(0)); p.add(A, A, A); verifyMethodResult(SimpleRegionObserver.class, - new String[] {"hadPreCheckAndPut", - "hadPreCheckAndPutAfterRowLock", "hadPostCheckAndPut"}, - tableName, - new Boolean[] {false, false, false} - ); + new String[] { "hadPreCheckAndPut", "hadPreCheckAndPutAfterRowLock", + "hadPostCheckAndPut" }, tableName, new Boolean[] { false, false, false }); table.checkAndPut(Bytes.toBytes(0), A, A, A, p); verifyMethodResult(SimpleRegionObserver.class, new String[] {"hadPreCheckAndPut", @@ -304,7 +299,7 @@ public class TestRegionObserverInterface { @Test (timeout=300000) public void testAppendHook() throws IOException { TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testAppendHook"); - Table table = util.createTable(tableName, new byte[][] {A, B, C}); + Table table = util.createTable(tableName, new byte[][] { A, B, C }); try { Append app = new Append(Bytes.toBytes(0)); app.add(A, A, A); @@ -337,11 +332,8 @@ public class TestRegionObserverInterface { util.waitUntilAllRegionsAssigned(tableName); verifyMethodResult(SimpleRegionObserver.class, - new String[] {"hadPreGet", "hadPostGet", "wasScannerNextCalled", - "wasScannerCloseCalled"}, - tableName, - new Boolean[] {false, false, false, false} - ); + new String[] { "hadPreGet", "hadPostGet", "wasScannerNextCalled", "wasScannerCloseCalled" }, + tableName, new Boolean[] { false, false, false, false }); Table table = util.getConnection().getTable(tableName); Put put = new Put(ROW); @@ -562,8 +554,8 @@ public class TestRegionObserverInterface { String testName = TestRegionObserverInterface.class.getName()+".bulkLoadHFileTest"; TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".bulkLoadHFileTest"); Configuration conf = util.getConfiguration(); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); - try { + Table table = util.createTable(tableName, new byte[][] {A, B, C}); + try (RegionLocator locator = util.getConnection().getRegionLocator(tableName)) { verifyMethodResult(SimpleRegionObserver.class, new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"}, tableName, @@ -574,10 +566,10 @@ public class TestRegionObserverInterface { final Path dir = util.getDataTestDirOnTestFS(testName).makeQualified(fs); Path familyDir = new Path(dir, Bytes.toString(A)); - createHFile(util.getConfiguration(), fs, new Path(familyDir,Bytes.toString(A)), A, A); + createHFile(util.getConfiguration(), fs, new Path(familyDir, Bytes.toString(A)), A, A); // Bulk load - new LoadIncrementalHFiles(conf).doBulkLoad(dir, table); + new LoadIncrementalHFiles(conf).doBulkLoad(dir, util.getHBaseAdmin(), table, locator); verifyMethodResult(SimpleRegionObserver.class, new String[] {"hadPreBulkLoadHFile", "hadPostBulkLoadHFile"}, @@ -595,21 +587,22 @@ public class TestRegionObserverInterface { public void testRecovery() throws Exception { LOG.info(TestRegionObserverInterface.class.getName() +".testRecovery"); TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testRecovery"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); - try { + Table table = util.createTable(tableName, new byte[][] {A, B, C}); + try (RegionLocator locator = util.getConnection().getRegionLocator(tableName)) { + JVMClusterUtil.RegionServerThread rs1 = cluster.startRegionServer(); ServerName sn2 = rs1.getRegionServer().getServerName(); - String regEN = table.getRegionLocations().firstEntry().getKey().getEncodedName(); + String regEN = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); util.getHBaseAdmin().move(regEN.getBytes(), sn2.getServerName().getBytes()); - while (!sn2.equals(table.getRegionLocations().firstEntry().getValue() )){ + while (!sn2.equals(locator.getAllRegionLocations().get(0).getServerName())){ Thread.sleep(100); } Put put = new Put(ROW); - put.add(A, A, A); - put.add(B, B, B); - put.add(C, C, C); + put.addColumn(A, A, A); + put.addColumn(B, B, B); + put.addColumn(C, C, C); table.put(put); verifyMethodResult(SimpleRegionObserver.class, @@ -646,46 +639,48 @@ public class TestRegionObserverInterface { public void testLegacyRecovery() throws Exception { LOG.info(TestRegionObserverInterface.class.getName() +".testLegacyRecovery"); TableName tableName = TableName.valueOf(TEST_TABLE.getNameAsString() + ".testLegacyRecovery"); - HTable table = util.createTable(tableName, new byte[][] {A, B, C}); + Table table = util.createTable(tableName, new byte[][] {A, B, C}); try { - JVMClusterUtil.RegionServerThread rs1 = cluster.startRegionServer(); - ServerName sn2 = rs1.getRegionServer().getServerName(); - String regEN = table.getRegionLocations().firstEntry().getKey().getEncodedName(); + try (RegionLocator locator = util.getConnection().getRegionLocator(tableName)) { + JVMClusterUtil.RegionServerThread rs1 = cluster.startRegionServer(); + ServerName sn2 = rs1.getRegionServer().getServerName(); + String regEN = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); + + util.getHBaseAdmin().move(regEN.getBytes(), sn2.getServerName().getBytes()); + while (!sn2.equals(locator.getAllRegionLocations().get(0).getServerName())) { + Thread.sleep(100); + } - util.getHBaseAdmin().move(regEN.getBytes(), sn2.getServerName().getBytes()); - while (!sn2.equals(table.getRegionLocations().firstEntry().getValue() )){ - Thread.sleep(100); + Put put = new Put(ROW); + put.add(A, A, A); + put.add(B, B, B); + put.add(C, C, C); + table.put(put); + + verifyMethodResult(SimpleRegionObserver.Legacy.class, + new String[] {"hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", + "hadPreBatchMutate", "hadPostBatchMutate", "hadDelete"}, + tableName, + new Boolean[] {false, false, true, true, true, true, false} + ); + + verifyMethodResult(SimpleRegionObserver.Legacy.class, + new String[] {"getCtPreWALRestore", "getCtPostWALRestore", "getCtPrePut", + "getCtPostPut", "getCtPreWALRestoreDeprecated", "getCtPostWALRestoreDeprecated"}, + tableName, + new Integer[] {0, 0, 1, 1, 0, 0}); + + cluster.killRegionServer(rs1.getRegionServer().getServerName()); + Threads.sleep(1000); // Let the kill soak in. + util.waitUntilAllRegionsAssigned(tableName); + LOG.info("All regions assigned"); + + verifyMethodResult(SimpleRegionObserver.Legacy.class, + new String[] {"getCtPreWALRestore", "getCtPostWALRestore", "getCtPrePut", + "getCtPostPut", "getCtPreWALRestoreDeprecated", "getCtPostWALRestoreDeprecated"}, + tableName, + new Integer[]{1, 1, 0, 0, 1, 1}); } - - Put put = new Put(ROW); - put.add(A, A, A); - put.add(B, B, B); - put.add(C, C, C); - table.put(put); - - verifyMethodResult(SimpleRegionObserver.Legacy.class, - new String[] {"hadPreGet", "hadPostGet", "hadPrePut", "hadPostPut", - "hadPreBatchMutate", "hadPostBatchMutate", "hadDelete"}, - tableName, - new Boolean[] {false, false, true, true, true, true, false} - ); - - verifyMethodResult(SimpleRegionObserver.Legacy.class, - new String[] {"getCtPreWALRestore", "getCtPostWALRestore", "getCtPrePut", "getCtPostPut", - "getCtPreWALRestoreDeprecated", "getCtPostWALRestoreDeprecated"}, - tableName, - new Integer[] {0, 0, 1, 1, 0, 0}); - - cluster.killRegionServer(rs1.getRegionServer().getServerName()); - Threads.sleep(1000); // Let the kill soak in. - util.waitUntilAllRegionsAssigned(tableName); - LOG.info("All regions assigned"); - - verifyMethodResult(SimpleRegionObserver.Legacy.class, - new String[] {"getCtPreWALRestore", "getCtPostWALRestore", "getCtPrePut", "getCtPostPut", - "getCtPreWALRestoreDeprecated", "getCtPostWALRestoreDeprecated"}, - tableName, - new Integer[]{1, 1, 0, 0, 1, 1}); } finally { util.deleteTable(tableName); table.close(); @@ -696,27 +691,28 @@ public class TestRegionObserverInterface { public void testPreWALRestoreSkip() throws Exception { LOG.info(TestRegionObserverInterface.class.getName() + ".testPreWALRestoreSkip"); TableName tableName = TableName.valueOf(SimpleRegionObserver.TABLE_SKIPPED); - HTable table = util.createTable(tableName, new byte[][] { A, B, C }); + Table table = util.createTable(tableName, new byte[][] { A, B, C }); - JVMClusterUtil.RegionServerThread rs1 = cluster.startRegionServer(); - ServerName sn2 = rs1.getRegionServer().getServerName(); - String regEN = table.getRegionLocations().firstEntry().getKey().getEncodedName(); + try (RegionLocator locator = util.getConnection().getRegionLocator(tableName)) { + JVMClusterUtil.RegionServerThread rs1 = cluster.startRegionServer(); + ServerName sn2 = rs1.getRegionServer().getServerName(); + String regEN = locator.getAllRegionLocations().get(0).getRegionInfo().getEncodedName(); - util.getHBaseAdmin().move(regEN.getBytes(), sn2.getServerName().getBytes()); - while (!sn2.equals(table.getRegionLocations().firstEntry().getValue())) { - Thread.sleep(100); - } + util.getHBaseAdmin().move(regEN.getBytes(), sn2.getServerName().getBytes()); + while (!sn2.equals(locator.getAllRegionLocations().get(0).getServerName())) { + Thread.sleep(100); + } - Put put = new Put(ROW); - put.add(A, A, A); - put.add(B, B, B); - put.add(C, C, C); - table.put(put); - table.flushCommits(); + Put put = new Put(ROW); + put.add(A, A, A); + put.add(B, B, B); + put.add(C, C, C); + table.put(put); - cluster.killRegionServer(rs1.getRegionServer().getServerName()); - Threads.sleep(20000); // just to be sure that the kill has fully started. - util.waitUntilAllRegionsAssigned(tableName); + cluster.killRegionServer(rs1.getRegionServer().getServerName()); + Threads.sleep(20000); // just to be sure that the kill has fully started. + util.waitUntilAllRegionsAssigned(tableName); + } verifyMethodResult(SimpleRegionObserver.class, new String[] { "getCtPreWALRestore", "getCtPostWALRestore", "getCtPreWALRestoreDeprecated", "getCtPostWALRestoreDeprecated"}, http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java index 3ac2cc6..ef62564 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.Waiter.Predicate; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; @@ -98,7 +99,7 @@ public class TestRegionServerCoprocessorExceptionWithAbort { // hosts the region we attempted to write to) to abort. final byte[] TEST_FAMILY = Bytes.toBytes("aaa"); - HTable table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, TEST_FAMILY); + Table table = TEST_UTIL.createMultiRegionTable(TABLE_NAME, TEST_FAMILY); TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME); // Note which regionServer will abort (after put is attempted). @@ -109,7 +110,6 @@ public class TestRegionServerCoprocessorExceptionWithAbort { Put put = new Put(ROW); put.add(TEST_FAMILY, ROW, ROW); table.put(put); - table.flushCommits(); } catch (IOException e) { // The region server is going to be aborted. // We may get an exception if we retry, http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java index ba18e41..ce76fea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; @@ -97,7 +98,7 @@ public class TestRegionServerCoprocessorExceptionWithRemove { TableName TEST_TABLE = TableName.valueOf("observed_table"); byte[] TEST_FAMILY = Bytes.toBytes("aaa"); - HTable table = TEST_UTIL.createMultiRegionTable(TEST_TABLE, TEST_FAMILY); + Table table = TEST_UTIL.createMultiRegionTable(TEST_TABLE, TEST_FAMILY); TEST_UTIL.waitUntilAllRegionsAssigned(TEST_TABLE); // Note which regionServer that should survive the buggy coprocessor's // prePut(). @@ -108,12 +109,10 @@ public class TestRegionServerCoprocessorExceptionWithRemove { try { final byte[] ROW = Bytes.toBytes("aaa"); Put put = new Put(ROW); - put.add(TEST_FAMILY, ROW, ROW); + put.addColumn(TEST_FAMILY, ROW, ROW); table.put(put); - table.flushCommits(); // We may need two puts to reliably get an exception table.put(put); - table.flushCommits(); } catch (IOException e) { threwIOE = true; } finally { http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java index 9f62335..cf131f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java @@ -29,11 +29,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; @@ -51,7 +53,7 @@ public class TestMultiRowRangeFilter { private byte[] family = Bytes.toBytes("family"); private byte[] qf = Bytes.toBytes("qf"); private byte[] value = Bytes.toBytes("val"); - private byte[] tableName; + private TableName tableName; private int numRows = 100; /** @@ -218,8 +220,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeFilterWithRangeOverlap() throws IOException { - tableName = Bytes.toBytes("testMultiRowRangeFilterWithRangeOverlap"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("testMultiRowRangeFilterWithRangeOverlap"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -246,8 +248,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeFilterWithoutRangeOverlap() throws IOException { - tableName = Bytes.toBytes("testMultiRowRangeFilterWithoutRangeOverlap"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("testMultiRowRangeFilterWithoutRangeOverlap"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -273,8 +275,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeFilterWithEmptyStartRow() throws IOException { - tableName = Bytes.toBytes("testMultiRowRangeFilterWithEmptyStartRow"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("testMultiRowRangeFilterWithEmptyStartRow"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); scan.setMaxVersions(); @@ -295,8 +297,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeFilterWithEmptyStopRow() throws IOException { - tableName = Bytes.toBytes("testMultiRowRangeFilterWithEmptyStopRow"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("testMultiRowRangeFilterWithEmptyStopRow"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); scan.setMaxVersions(); @@ -316,8 +318,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeFilterWithInclusive() throws IOException { - tableName = Bytes.toBytes("testMultiRowRangeFilterWithInclusive"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("testMultiRowRangeFilterWithInclusive"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -344,8 +346,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeFilterWithExclusive() throws IOException { - tableName = Bytes.toBytes("testMultiRowRangeFilterWithExclusive"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("testMultiRowRangeFilterWithExclusive"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -370,8 +372,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeWithFilterListAndOperator() throws IOException { - tableName = Bytes.toBytes("TestMultiRowRangeFilterWithFilterListAndOperator"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("TestMultiRowRangeFilterWithFilterListAndOperator"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -405,8 +407,8 @@ public class TestMultiRowRangeFilter { @Test public void testMultiRowRangeWithFilterListOrOperator() throws IOException { - tableName = Bytes.toBytes("TestMultiRowRangeFilterWithFilterListOrOperator"); - HTable ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); + tableName = TableName.valueOf("TestMultiRowRangeFilterWithFilterListOrOperator"); + Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -440,18 +442,18 @@ public class TestMultiRowRangeFilter { ht.close(); } - private void generateRows(int numberOfRows, HTable ht, byte[] family, byte[] qf, byte[] value) + private void generateRows(int numberOfRows, Table ht, byte[] family, byte[] qf, byte[] value) throws IOException { for (int i = 0; i < numberOfRows; i++) { byte[] row = Bytes.toBytes(i); Put p = new Put(row); - p.add(family, qf, value); + p.addColumn(family, qf, value); ht.put(p); } TEST_UTIL.flush(); } - private List getScanResult(byte[] startRow, byte[] stopRow, HTable ht) throws IOException { + private List getScanResult(byte[] startRow, byte[] stopRow, Table ht) throws IOException { Scan scan = new Scan(); scan.setMaxVersions(); if(!Bytes.toString(startRow).isEmpty()) { @@ -471,7 +473,7 @@ public class TestMultiRowRangeFilter { return kvList; } - private int getResultsSize(HTable ht, Scan scan) throws IOException { + private int getResultsSize(Table ht, Scan scan) throws IOException { ResultScanner scanner = ht.getScanner(scan); List results = new ArrayList(); Result r; http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java index c648a8b..a7cb4a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java @@ -19,11 +19,15 @@ package org.apache.hadoop.hbase.io.encoding; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.NavigableMap; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.client.RegionLocator; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.ServerName; @@ -75,7 +79,7 @@ public class TestLoadAndSwitchEncodeOnDisk extends HColumnDescriptor hcd = getColumnDesc(admin); System.err.println("\nDisabling encode-on-disk. Old column descriptor: " + hcd + "\n"); - HTable t = (HTable) TEST_UTIL.getConnection().getTable(TABLE); + Table t = TEST_UTIL.getConnection().getTable(TABLE); assertAllOnLine(t); admin.disableTable(TABLE); @@ -92,7 +96,7 @@ public class TestLoadAndSwitchEncodeOnDisk extends assertAllOnLine(t); System.err.println("\nCompacting the table\n"); - admin.majorCompact(TABLE.getName()); + admin.majorCompact(TABLE); // Wait until compaction completes Threads.sleepWithoutInterrupt(5000); HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0); @@ -103,10 +107,13 @@ public class TestLoadAndSwitchEncodeOnDisk extends System.err.println("\nDone with the test, shutting down the cluster\n"); } - private void assertAllOnLine(final HTable t) throws IOException { - NavigableMap regions = t.getRegionLocations(); - for (Map.Entry e: regions.entrySet()) { - byte [] startkey = e.getKey().getStartKey(); + private void assertAllOnLine(final Table t) throws IOException { + List regions; + try(RegionLocator rl = TEST_UTIL.getConnection().getRegionLocator(t.getName())) { + regions = rl.getAllRegionLocations(); + } + for (HRegionLocation e: regions) { + byte [] startkey = e.getRegionInfo().getStartKey(); Scan s = new Scan(startkey); ResultScanner scanner = t.getScanner(s); Result r = scanner.next(); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java index 6129b26..e77425a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.NullWritable; @@ -77,7 +78,7 @@ public abstract class MultiTableInputFormatTestBase { TEST_UTIL.startMiniCluster(3); // create and fill table for (String tableName : TABLES) { - try (HTable table = + try (Table table = TEST_UTIL.createMultiRegionTable(TableName.valueOf(tableName), INPUT_FAMILY, 4)) { TEST_UTIL.loadTable(table, INPUT_FAMILY, false); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java index 6b23e37..2d3d6ef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java @@ -25,6 +25,7 @@ import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -80,38 +81,37 @@ public class TestCellCounter { */ @Test (timeout=300000) public void testCellCounter() throws Exception { - String sourceTable = "sourceTable"; + TableName sourceTable = TableName.valueOf("sourceTable"); byte[][] families = { FAMILY_A, FAMILY_B }; - Table t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + Table t = UTIL.createTable(sourceTable, families); try{ - Put p = new Put(ROW1); - p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); - p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); - p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); - t.put(p); - p = new Put(ROW2); - p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); - p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); - p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); - t.put(p); - String[] args = { sourceTable, FQ_OUTPUT_DIR.toString(), ";", "^row1" }; - runCount(args); - FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + - "part-r-00000"); - String data = IOUtils.toString(inputStream); - inputStream.close(); - assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); - assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); - assertTrue(data.contains("Total ROWS" + "\t" + "1")); - assertTrue(data.contains("b;q" + "\t" + "1")); - assertTrue(data.contains("a;q" + "\t" + "1")); - assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); - assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); + Put p = new Put(ROW1); + p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); + p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12")); + p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13")); + t.put(p); + p = new Put(ROW2); + p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21")); + p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); + p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); + t.put(p); + String[] args = { sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1" }; + runCount(args); + FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + + "part-r-00000"); + String data = IOUtils.toString(inputStream); + inputStream.close(); + assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2")); + assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2")); + assertTrue(data.contains("Total ROWS" + "\t" + "1")); + assertTrue(data.contains("b;q" + "\t" + "1")); + assertTrue(data.contains("a;q" + "\t" + "1")); + assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1")); + assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1")); }finally{ t.close(); FileUtil.fullyDelete(new File(OUTPUT_DIR)); } - } /** @@ -119,9 +119,9 @@ public class TestCellCounter { */ @Test (timeout=300000) public void testCellCounterStartTimeRange() throws Exception { - String sourceTable = "testCellCounterStartTimeRange"; + TableName sourceTable = TableName.valueOf("testCellCounterStartTimeRange"); byte[][] families = { FAMILY_A, FAMILY_B }; - Table t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); @@ -134,7 +134,7 @@ public class TestCellCounter { p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { - sourceTable, FQ_OUTPUT_DIR.toString(), ";", "^row1", "--starttime=" + now, + sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1", "--starttime=" + now, "--endtime=" + now + 2 }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + @@ -159,9 +159,9 @@ public class TestCellCounter { */ @Test (timeout=300000) public void testCellCounteEndTimeRange() throws Exception { - String sourceTable = "testCellCounterEndTimeRange"; + TableName sourceTable = TableName.valueOf("testCellCounterEndTimeRange"); byte[][] families = { FAMILY_A, FAMILY_B }; - Table t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); @@ -174,7 +174,8 @@ public class TestCellCounter { p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { - sourceTable, FQ_OUTPUT_DIR.toString(), ";", "^row1", "--endtime=" + now + 1 }; + sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "^row1", + "--endtime=" + now + 1 }; runCount(args); FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + File.separator + "part-r-00000"); @@ -198,9 +199,9 @@ public class TestCellCounter { */ @Test (timeout=300000) public void testCellCounteOutOfTimeRange() throws Exception { - String sourceTable = "testCellCounterOutTimeRange"; + TableName sourceTable = TableName.valueOf("testCellCounterOutTimeRange"); byte[][] families = { FAMILY_A, FAMILY_B }; - Table t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + Table t = UTIL.createTable(sourceTable, families); try{ Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); @@ -213,7 +214,7 @@ public class TestCellCounter { p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); String[] args = { - sourceTable, FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1, + sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(), ";", "--starttime=" + now + 1, "--endtime=" + now + 2 }; runCount(args); @@ -275,14 +276,14 @@ public class TestCellCounter { */ @Test(timeout = 300000) public void testCellCounterForCompleteTable() throws Exception { - String sourceTable = "testCellCounterForCompleteTable"; + TableName sourceTable = TableName.valueOf("testCellCounterForCompleteTable"); String outputPath = OUTPUT_DIR + sourceTable; LocalFileSystem localFileSystem = new LocalFileSystem(); Path outputDir = new Path(outputPath).makeQualified(localFileSystem.getUri(), localFileSystem.getWorkingDirectory()); byte[][] families = { FAMILY_A, FAMILY_B }; - Table t = UTIL.createTable(Bytes.toBytes(sourceTable), families); + Table t = UTIL.createTable(sourceTable, families); try { Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11")); @@ -294,7 +295,7 @@ public class TestCellCounter { p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22")); p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23")); t.put(p); - String[] args = { sourceTable, outputDir.toString(), ";" }; + String[] args = { sourceTable.getNameAsString(), outputDir.toString(), ";" }; runCount(args); FileInputStream inputStream = new FileInputStream(outputPath + File.separator + "part-r-00000"); http://git-wip-us.apache.org/repos/asf/hbase/blob/b5b58530/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java index c96d7c4..118395b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java @@ -189,13 +189,13 @@ public class TestCopyTable { */ @Test public void testRenameFamily() throws Exception { - String sourceTable = "sourceTable"; - String targetTable = "targetTable"; + TableName sourceTable = TableName.valueOf("sourceTable"); + TableName targetTable = TableName.valueOf("targetTable"); byte[][] families = { FAMILY_A, FAMILY_B }; - Table t = TEST_UTIL.createTable(Bytes.toBytes(sourceTable), families); - Table t2 = TEST_UTIL.createTable(Bytes.toBytes(targetTable), families); + Table t = TEST_UTIL.createTable(sourceTable, families); + Table t2 = TEST_UTIL.createTable(targetTable, families); Put p = new Put(ROW1); p.add(FAMILY_A, QUALIFIER, Bytes.toBytes("Data11")); p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Data12")); @@ -210,7 +210,7 @@ public class TestCopyTable { long currentTime = System.currentTimeMillis(); String[] args = new String[] { "--new.name=" + targetTable, "--families=a:b", "--all.cells", "--starttime=" + (currentTime - 100000), "--endtime=" + (currentTime + 100000), - "--versions=1", sourceTable }; + "--versions=1", sourceTable.getNameAsString() }; assertNull(t2.get(new Get(ROW1)).getRow()); assertTrue(runCopy(args));