hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject [1/2] hbase git commit: HBASE-15875 Remove HTable references and HTableInterface
Date Tue, 31 May 2016 02:57:01 GMT
Repository: hbase
Updated Branches:
  refs/heads/master 47176049f -> a1f0c1cbb


http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
index 190a1d2..4723fa8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
@@ -176,45 +176,56 @@ public class TestHCM {
 
     TableName tableName = TableName.valueOf("testClusterConnection");
     TEST_UTIL.createTable(tableName, FAM_NAM).close();
-    HTable t = (HTable)con1.getTable(tableName, otherPool);
-    // make sure passing a pool to the getTable does not trigger creation of an internal
pool
-    assertNull("Internal Thread pool should be null",
-        ((ConnectionImplementation) con1).getCurrentBatchPool());
-    // table should use the pool passed
-    assertTrue(otherPool == t.getPool());
-    t.close();
-
-    t = (HTable)con2.getTable(tableName);
-    // table should use the connectin's internal pool
-    assertTrue(otherPool == t.getPool());
-    t.close();
+    Table table = con1.getTable(tableName, otherPool);
 
-    t = (HTable)con2.getTable(tableName);
-    // try other API too
-    assertTrue(otherPool == t.getPool());
-    t.close();
-
-    t = (HTable)con2.getTable(tableName);
-    // try other API too
-    assertTrue(otherPool == t.getPool());
-    t.close();
+    ExecutorService pool = null;
 
-    t = (HTable)con1.getTable(tableName);
-    ExecutorService pool = ((ConnectionImplementation)con1).getCurrentBatchPool();
-    // make sure an internal pool was created
-    assertNotNull("An internal Thread pool should have been created", pool);
-    // and that the table is using it
-    assertTrue(t.getPool() == pool);
-    t.close();
-
-    t = (HTable)con1.getTable(tableName);
-    // still using the *same* internal pool
-    assertTrue(t.getPool() == pool);
-    t.close();
+    if(table instanceof HTable) {
+      HTable t = (HTable) table;
+      // make sure passing a pool to the getTable does not trigger creation of an internal
pool
+      assertNull("Internal Thread pool should be null",
+        ((ConnectionImplementation) con1).getCurrentBatchPool());
+      // table should use the pool passed
+      assertTrue(otherPool == t.getPool());
+      t.close();
+
+      t = (HTable) con2.getTable(tableName);
+      // table should use the connectin's internal pool
+      assertTrue(otherPool == t.getPool());
+      t.close();
+
+      t = (HTable) con2.getTable(tableName);
+      // try other API too
+      assertTrue(otherPool == t.getPool());
+      t.close();
+
+      t = (HTable) con2.getTable(tableName);
+      // try other API too
+      assertTrue(otherPool == t.getPool());
+      t.close();
+
+      t = (HTable) con1.getTable(tableName);
+      pool = ((ConnectionImplementation) con1).getCurrentBatchPool();
+      // make sure an internal pool was created
+      assertNotNull("An internal Thread pool should have been created", pool);
+      // and that the table is using it
+      assertTrue(t.getPool() == pool);
+      t.close();
+
+      t = (HTable) con1.getTable(tableName);
+      // still using the *same* internal pool
+      assertTrue(t.getPool() == pool);
+      t.close();
+    } else {
+      table.close();
+    }
 
     con1.close();
+
     // if the pool was created on demand it should be closed upon connection close
-    assertTrue(pool.isShutdown());
+    if(pool != null) {
+      assertTrue(pool.isShutdown());
+    }
 
     con2.close();
     // if the pool is passed, it is not closed
@@ -316,30 +327,27 @@ public class TestHCM {
   public void testOperationTimeout() throws Exception {
     HTableDescriptor hdt = TEST_UTIL.createTableDescriptor("HCM-testOperationTimeout");
     hdt.addCoprocessor(SleepAndFailFirstTime.class.getName());
-    Table t = TEST_UTIL.createTable(hdt, new byte[][]{FAM_NAM});
-    if (t instanceof HTable) {
-      HTable table = (HTable) t;
-      table.setRpcTimeout(Integer.MAX_VALUE);
-      // Check that it works if the timeout is big enough
-      table.setOperationTimeout(120 * 1000);
+    Table table = TEST_UTIL.createTable(hdt, new byte[][]{FAM_NAM});
+    table.setRpcTimeout(Integer.MAX_VALUE);
+    // Check that it works if the timeout is big enough
+    table.setOperationTimeout(120 * 1000);
+    table.get(new Get(FAM_NAM));
+
+    // Resetting and retrying. Will fail this time, not enough time for the second try
+    SleepAndFailFirstTime.ct.set(0);
+    try {
+      table.setOperationTimeout(30 * 1000);
       table.get(new Get(FAM_NAM));
-
-      // Resetting and retrying. Will fail this time, not enough time for the second try
-      SleepAndFailFirstTime.ct.set(0);
-      try {
-        table.setOperationTimeout(30 * 1000);
-        table.get(new Get(FAM_NAM));
-        Assert.fail("We expect an exception here");
-      } catch (SocketTimeoutException e) {
-        // The client has a CallTimeout class, but it's not shared.We're not very clean today,
-        //  in the general case you can expect the call to stop, but the exception may vary.
-        // In this test however, we're sure that it will be a socket timeout.
-        LOG.info("We received an exception, as expected ", e);
-      } catch (IOException e) {
-        Assert.fail("Wrong exception:" + e.getMessage());
-      } finally {
-        table.close();
-      }
+      Assert.fail("We expect an exception here");
+    } catch (SocketTimeoutException e) {
+      // The client has a CallTimeout class, but it's not shared.We're not very clean today,
+      //  in the general case you can expect the call to stop, but the exception may vary.
+      // In this test however, we're sure that it will be a socket timeout.
+      LOG.info("We received an exception, as expected ", e);
+    } catch (IOException e) {
+      Assert.fail("Wrong exception:" + e.getMessage());
+    } finally {
+      table.close();
     }
   }
 
@@ -350,11 +358,9 @@ public class TestHCM {
     Configuration c = new Configuration(TEST_UTIL.getConfiguration());
 
     try (Table t = TEST_UTIL.createTable(hdt, new byte[][] { FAM_NAM }, c)) {
-      assert t instanceof HTable;
-      HTable table = (HTable) t;
-      table.setRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2);
-      table.setOperationTimeout(SleepCoprocessor.SLEEP_TIME * 100);
-      table.get(new Get(FAM_NAM));
+      t.setRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2);
+      t.setOperationTimeout(SleepCoprocessor.SLEEP_TIME * 100);
+      t.get(new Get(FAM_NAM));
     }
   }
 
@@ -373,29 +379,26 @@ public class TestHCM {
     c.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 4000);
 
     Connection connection = ConnectionFactory.createConnection(c);
-    Table t = connection.getTable(TableName.valueOf("HCM-testRpcRetryingCallerSleep"));
-    if (t instanceof HTable) {
-      HTable table = (HTable) t;
-      table.setOperationTimeout(8000);
-      // Check that it works. Because 2s + 3s * RETRY_BACKOFF[0] + 2s < 8s
-      table.get(new Get(FAM_NAM));
+    Table table = connection.getTable(TableName.valueOf("HCM-testRpcRetryingCallerSleep"));
+    table.setOperationTimeout(8000);
+    // Check that it works. Because 2s + 3s * RETRY_BACKOFF[0] + 2s < 8s
+    table.get(new Get(FAM_NAM));
 
-      // Resetting and retrying.
-      SleepAndFailFirstTime.ct.set(0);
-      try {
-        table.setOperationTimeout(6000);
-        // Will fail this time. After sleep, there are not enough time for second retry
-        // Beacuse 2s + 3s + 2s > 6s
-        table.get(new Get(FAM_NAM));
-        Assert.fail("We expect an exception here");
-      } catch (SocketTimeoutException e) {
-        LOG.info("We received an exception, as expected ", e);
-      } catch (IOException e) {
-        Assert.fail("Wrong exception:" + e.getMessage());
-      } finally {
-        table.close();
-        connection.close();
-      }
+    // Resetting and retrying.
+    SleepAndFailFirstTime.ct.set(0);
+    try {
+      table.setOperationTimeout(6000);
+      // Will fail this time. After sleep, there are not enough time for second retry
+      // Beacuse 2s + 3s + 2s > 6s
+      table.get(new Get(FAM_NAM));
+      Assert.fail("We expect an exception here");
+    } catch (SocketTimeoutException e) {
+      LOG.info("We received an exception, as expected ", e);
+    } catch (IOException e) {
+      Assert.fail("Wrong exception:" + e.getMessage());
+    } finally {
+      table.close();
+      connection.close();
     }
   }
 
@@ -404,7 +407,7 @@ public class TestHCM {
     long pauseTime;
     long baseTime = 100;
     TableName tableName = TableName.valueOf("HCM-testCallableSleep");
-    HTable table = TEST_UTIL.createTable(tableName, FAM_NAM);
+    Table table = TEST_UTIL.createTable(tableName, FAM_NAM);
     RegionServerCallable<Object> regionServerCallable = new RegionServerCallable<Object>(
         TEST_UTIL.getConnection(), tableName, ROW) {
       public Object call(int timeout) throws IOException {
@@ -882,15 +885,21 @@ public class TestHCM {
   public void testConnectionManagement() throws Exception{
     Table table0 = TEST_UTIL.createTable(TABLE_NAME1, FAM_NAM);
     Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
-    HTable table = (HTable) conn.getTable(TABLE_NAME1);
+    Table table = conn.getTable(TABLE_NAME1);
     table.close();
     assertFalse(conn.isClosed());
-    assertFalse(table.getPool().isShutdown());
-    table = (HTable) conn.getTable(TABLE_NAME1);
+    if(table instanceof HTable) {
+      assertFalse(((HTable) table).getPool().isShutdown());
+    }
+    table = conn.getTable(TABLE_NAME1);
     table.close();
-    assertFalse(table.getPool().isShutdown());
+    if(table instanceof HTable) {
+      assertFalse(((HTable) table).getPool().isShutdown());
+    }
     conn.close();
-    assertTrue(table.getPool().isShutdown());
+    if(table instanceof HTable) {
+      assertTrue(((HTable) table).getPool().isShutdown());
+    }
     table0.close();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
index 9be6b6c..8c54880 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
@@ -128,7 +128,7 @@ public class TestHTableMultiplexerFlushCache {
     // Region cache (and not just tearing down the entire connection).
     TableName TABLE = TableName.valueOf("testOnRegionMove");
     final int NUM_REGIONS = 10;
-    HTable htable = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY }, 3,
+    Table htable = TEST_UTIL.createTable(TABLE, new byte[][] { FAMILY }, 3,
       Bytes.toBytes("aaaaa"), Bytes.toBytes("zzzzz"), NUM_REGIONS);
 
     HTableMultiplexer multiplexer = new HTableMultiplexer(TEST_UTIL.getConfiguration(),

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
index 7170299..d8bc591 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestLeaseRenewal.java
@@ -95,7 +95,7 @@ public class TestLeaseRenewal {
 
   @Test
   public void testLeaseRenewal() throws Exception {
-    HTable table = TEST_UTIL.createTable(
+    Table table = TEST_UTIL.createTable(
       TableName.valueOf("testLeaseRenewal"), FAMILY);
     Put p = new Put(ROW_BYTES);
     p.addColumn(FAMILY, COL_QUAL, VAL_BYTES);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 5446570..7bd4f93 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -50,7 +50,6 @@ import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
@@ -376,7 +375,7 @@ public class TestRegionObserverInterface {
   @Test (timeout=300000)
   public void testHBASE14489() throws IOException {
     TableName tableName = TableName.valueOf("testHBASE14489");
-    HTable table = util.createTable(tableName, new byte[][] { A });
+    Table table = util.createTable(tableName, new byte[][] { A });
     Put put = new Put(ROW);
     put.addColumn(A, A, A);
     table.put(put);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 59173ad..12761d3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -66,7 +66,6 @@ import org.apache.hadoop.hbase.TagUtil;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
@@ -611,7 +610,7 @@ public class TestHFileOutputFormat2  {
       Configuration conf = new Configuration(this.util.getConfiguration());
       Map<String, Compression.Algorithm> familyToCompression =
           getMockColumnFamiliesForCompression(numCfs);
-      Table table = Mockito.mock(HTable.class);
+      Table table = Mockito.mock(Table.class);
       setupMockColumnFamiliesForCompression(table, familyToCompression);
       HFileOutputFormat2.configureCompression(conf, table.getTableDescriptor());
 
@@ -682,7 +681,7 @@ public class TestHFileOutputFormat2  {
       Configuration conf = new Configuration(this.util.getConfiguration());
       Map<String, BloomType> familyToBloomType =
           getMockColumnFamiliesForBloomType(numCfs);
-      Table table = Mockito.mock(HTable.class);
+      Table table = Mockito.mock(Table.class);
       setupMockColumnFamiliesForBloomType(table,
           familyToBloomType);
       HFileOutputFormat2.configureBloomType(table.getTableDescriptor(), conf);
@@ -753,7 +752,7 @@ public class TestHFileOutputFormat2  {
       Configuration conf = new Configuration(this.util.getConfiguration());
       Map<String, Integer> familyToBlockSize =
           getMockColumnFamiliesForBlockSize(numCfs);
-      Table table = Mockito.mock(HTable.class);
+      Table table = Mockito.mock(Table.class);
       setupMockColumnFamiliesForBlockSize(table,
           familyToBlockSize);
       HFileOutputFormat2.configureBlockSize(table.getTableDescriptor(), conf);
@@ -828,7 +827,7 @@ public class TestHFileOutputFormat2  {
       Configuration conf = new Configuration(this.util.getConfiguration());
       Map<String, DataBlockEncoding> familyToDataBlockEncoding =
           getMockColumnFamiliesForDataBlockEncoding(numCfs);
-      Table table = Mockito.mock(HTable.class);
+      Table table = Mockito.mock(Table.class);
       setupMockColumnFamiliesForDataBlockEncoding(table,
           familyToDataBlockEncoding);
       HTableDescriptor tableDescriptor = table.getTableDescriptor();

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
index 6ec2e95..f8bc6ab 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
@@ -28,8 +28,8 @@ import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.TableNamespaceManager;
 import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
@@ -104,7 +104,7 @@ public class TestSimpleRegionNormalizerOnCluster {
     MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
     HMaster m = cluster.getMaster();
 
-    try (HTable ht = TEST_UTIL.createMultiRegionTable(TABLENAME, FAMILYNAME, 5)) {
+    try (Table ht = TEST_UTIL.createMultiRegionTable(TABLENAME, FAMILYNAME, 5)) {
       // Need to get sorted list of regions here
       List<HRegion> generatedRegions = TEST_UTIL.getHBaseCluster().getRegions(TABLENAME);
       Collections.sort(generatedRegions, new Comparator<HRegion>() {
@@ -182,7 +182,7 @@ public class TestSimpleRegionNormalizerOnCluster {
     HMaster m = cluster.getMaster();
 
     // create 5 regions with sizes to trigger merge of small regions
-    try (HTable ht = TEST_UTIL.createMultiRegionTable(TABLENAME, FAMILYNAME, 5)) {
+    try (Table ht = TEST_UTIL.createMultiRegionTable(TABLENAME, FAMILYNAME, 5)) {
       // Need to get sorted list of regions here
       List<HRegion> generatedRegions = TEST_UTIL.getHBaseCluster().getRegions(TABLENAME);
       Collections.sort(generatedRegions, new Comparator<HRegion>() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
index 54dbe9b..68b0ba3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCorruptedRegionStoreFile.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -92,13 +91,11 @@ public class TestCorruptedRegionStoreFile {
         table.put(put);
 
         if ((rowCount++ % ROW_PER_FILE) == 0) {
-          // flush it
-          ((HTable)table).flushCommits();
-          UTIL.getHBaseAdmin().flush(tableName);
+          UTIL.getAdmin().flush(tableName);
         }
       }
     } finally {
-      UTIL.getHBaseAdmin().flush(tableName);
+      UTIL.getAdmin().flush(tableName);
       table.close();
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 2087097..18796bd 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -558,9 +558,8 @@ public class TestRegionServerMetrics {
     htd.addFamily(hcd);
     Connection connection = ConnectionFactory.createConnection(conf);
     Admin admin = connection.getAdmin();
-    HTable t = TEST_UTIL.createTable(htd, new byte[0][0], conf);
+    Table t = TEST_UTIL.createTable(htd, new byte[0][0], conf);
     Region region = rs.getOnlineRegions(tableName).get(0);
-    t.setAutoFlush(true, true);
     for (int insertCount = 0; insertCount < numHfiles; insertCount++) {
       Put p = new Put(Bytes.toBytes(insertCount));
       p.addColumn(cf, qualifier, val);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
index 8908c71..ae6b036 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.regionserver.DefaultStoreEngine;
@@ -76,7 +75,7 @@ public class TestFlushWithThroughputController {
       admin.disableTable(tableName);
       admin.deleteTable(tableName);
     }
-    HTable table = TEST_UTIL.createTable(tableName, family);
+    Table table = TEST_UTIL.createTable(tableName, family);
     Random rand = new Random();
     for (int i = 0; i < 10; i++) {
       for (int j = 0; j < 10; j++) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
index da01fb9..bf46b03 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
@@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-import org.apache.hadoop.hbase.wal.FSHLogProvider;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -135,7 +133,6 @@ public class TestLogRolling extends AbstractTestLogRolling {
 
     admin.createTable(desc);
     Table table = TEST_UTIL.getConnection().getTable(desc.getTableName());
-    assertTrue(((HTable) table).isAutoFlush());
 
     server = TEST_UTIL.getRSForFirstRegionInTable(desc.getTableName());
     HRegionInfo region = server.getOnlineRegions(desc.getTableName()).get(0).getRegionInfo();

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
index faac8eb..9382bd4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.FifoRpcScheduler;
@@ -235,11 +235,11 @@ public class TestTokenAuthentication {
         public Configuration getConfiguration() { return conf; }
 
         @Override
-        public HTableInterface getTable(TableName tableName) throws IOException
+        public Table getTable(TableName tableName) throws IOException
           { return null; }
 
         @Override
-        public HTableInterface getTable(TableName tableName, ExecutorService service)
+        public Table getTable(TableName tableName, ExecutorService service)
             throws IOException {
           return null;
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index efca102..755e5ba 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -19,20 +19,14 @@
 
 package org.apache.hadoop.hbase.tool;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.zookeeper.ZKUtil;
-import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Appender;
 import org.apache.log4j.LogManager;
@@ -87,7 +81,7 @@ public class TestCanaryTool {
   @Test
   public void testBasicCanaryWorks() throws Exception {
     TableName tableName = TableName.valueOf("testTable");
-    HTable table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
+    Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY });
     // insert some test rows
     for (int i=0; i<1000; i++) {
       byte[] iBytes = Bytes.toBytes(i);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
index 23b999e..77443e1 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Get;
 
 import org.apache.hadoop.hbase.client.Consistency;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
index 84cc47d..cdf814c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReaderWithACL.java
@@ -26,7 +26,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.security.User;
@@ -69,7 +68,7 @@ public class MultiThreadedReaderWithACL extends MultiThreadedReader {
     }
 
     @Override
-    protected HTableInterface createTable() throws IOException {
+    protected Table createTable() throws IOException {
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index 0f3baf9..6c816cf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
index 756f612..bf27dde 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdaterWithACL.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.client.Put;
@@ -84,7 +83,7 @@ public class MultiThreadedUpdaterWithACL extends MultiThreadedUpdater {
     }
 
     @Override
-    protected HTableInterface createTable() throws IOException {
+    protected Table createTable() throws IOException {
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
index 32a06bb..d53ab25 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
@@ -33,7 +33,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Table;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
index d3cba2b..4806288 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriterWithACL.java
@@ -26,7 +26,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
 import org.apache.hadoop.hbase.client.Table;
@@ -71,7 +70,7 @@ public class MultiThreadedWriterWithACL extends MultiThreadedWriter {
     }
 
     @Override
-    protected HTableInterface createTable() throws IOException {
+    protected Table createTable() throws IOException {
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a1f0c1cb/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index 9f60d61..9dea9a5 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -74,7 +74,7 @@ import org.apache.thrift.TException;
 
 /**
  * This class is a glue object that connects Thrift RPC calls to the HBase client API primarily
- * defined in the HTableInterface.
+ * defined in the Table interface.
  */
 @InterfaceAudience.Private
 @SuppressWarnings("deprecation")


Mime
View raw message