Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1CD6B117A1 for ; Tue, 9 Sep 2014 06:40:45 +0000 (UTC) Received: (qmail 36469 invoked by uid 500); 9 Sep 2014 06:40:44 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 36337 invoked by uid 500); 9 Sep 2014 06:40:44 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 36049 invoked by uid 99); 9 Sep 2014 06:40:44 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 09 Sep 2014 06:40:44 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 4000EA0FB1C; Tue, 9 Sep 2014 06:40:44 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: enis@apache.org To: commits@hbase.apache.org Date: Tue, 09 Sep 2014 06:40:51 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [8/8] git commit: HBASE-11679 Replace HTable with HTableInterface where backwards-compatible (Carter) HBASE-11679 Replace HTable with HTableInterface where backwards-compatible (Carter) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4995ed8a Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4995ed8a Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4995ed8a Branch: refs/heads/master Commit: 4995ed8a029feb8ccac8054f56f23261a6918add Parents: 71e6ff4 Author: Enis Soztutar Authored: Mon Sep 8 23:39:34 2014 -0700 Committer: Enis Soztutar Committed: Mon Sep 8 23:39:34 2014 -0700 ---------------------------------------------------------------------- .../apache/hadoop/hbase/MetaTableAccessor.java | 29 +-- .../org/apache/hadoop/hbase/client/HTable.java | 3 +- .../apache/hadoop/hbase/client/HTableUtil.java | 5 +- .../client/coprocessor/AggregationClient.java | 35 ++-- .../coprocessor/SecureBulkLoadClient.java | 6 +- .../security/access/AccessControlClient.java | 10 +- .../security/visibility/VisibilityClient.java | 7 +- .../hbase/client/TestClientNoCluster.java | 10 +- .../hbase/client/TestSnapshotFromAdmin.java | 6 +- .../example/TestBulkDeleteProtocol.java | 23 ++- .../example/TestRowCountEndpoint.java | 7 +- .../TestZooKeeperScanPolicyObserver.java | 3 +- .../hadoop/hbase/DistributedHBaseCluster.java | 3 +- .../hbase/IntegrationTestLazyCfLoading.java | 3 +- .../mapreduce/IntegrationTestImportTsv.java | 3 +- .../hadoop/hbase/mttr/IntegrationTestMTTR.java | 8 +- .../test/IntegrationTestBigLinkedList.java | 14 +- ...egrationTestBigLinkedListWithVisibility.java | 10 +- .../test/IntegrationTestLoadAndVerify.java | 2 +- ...stTimeBoundedRequestsWithRegionReplicas.java | 4 +- ...tionTestWithCellVisibilityLoadAndVerify.java | 3 +- .../trace/IntegrationTestSendTraceRequests.java | 5 +- .../apache/hadoop/hbase/LocalHBaseCluster.java | 3 +- .../hbase/coprocessor/CoprocessorHost.java | 1 + .../hadoop/hbase/mapred/HRegionPartitioner.java | 3 +- .../hadoop/hbase/mapred/TableOutputFormat.java | 5 +- .../hadoop/hbase/mapred/TableRecordReader.java | 3 +- .../hbase/mapred/TableRecordReaderImpl.java | 5 +- .../DefaultVisibilityExpressionResolver.java | 3 +- .../hbase/mapreduce/HFileOutputFormat.java | 9 +- .../hbase/mapreduce/HFileOutputFormat2.java | 12 +- .../hbase/mapreduce/HRegionPartitioner.java | 3 +- .../hadoop/hbase/mapreduce/ImportTsv.java | 3 +- .../hbase/mapreduce/LoadIncrementalHFiles.java | 11 +- .../mapreduce/MultiTableInputFormatBase.java | 3 +- .../hbase/mapreduce/TableInputFormatBase.java | 2 - .../hbase/mapreduce/TableOutputFormat.java | 5 +- .../hbase/mapreduce/TableRecordReader.java | 3 +- .../hbase/mapreduce/TableRecordReaderImpl.java | 5 +- .../replication/VerifyReplication.java | 3 +- .../hadoop/hbase/master/HMasterCommandLine.java | 4 +- .../hbase/master/RegionPlacementMaintainer.java | 5 +- .../hbase/master/TableNamespaceManager.java | 11 +- .../balancer/FavoredNodeAssignmentHelper.java | 3 +- .../master/handler/ModifyTableHandler.java | 3 +- .../regionserver/ReplicationSink.java | 4 +- .../apache/hadoop/hbase/rest/RESTServlet.java | 4 +- .../apache/hadoop/hbase/rest/RowResource.java | 12 +- .../hadoop/hbase/rest/RowResultGenerator.java | 4 +- .../hbase/rest/ScannerResultGenerator.java | 4 +- .../hadoop/hbase/rest/SchemaResource.java | 4 +- .../apache/hadoop/hbase/rest/TableResource.java | 4 +- .../security/access/AccessControlLists.java | 15 +- .../hadoop/hbase/security/token/TokenUtil.java | 3 +- .../org/apache/hadoop/hbase/tool/Canary.java | 9 +- .../org/apache/hadoop/hbase/util/HBaseFsck.java | 8 +- .../hadoop/hbase/util/HBaseFsckRepair.java | 12 +- .../org/apache/hadoop/hbase/util/HMerge.java | 6 +- .../hadoop/hbase/util/RegionSizeCalculator.java | 4 +- .../org/apache/hadoop/hbase/HBaseTestCase.java | 5 +- .../hadoop/hbase/HBaseTestingUtility.java | 33 +-- .../hadoop/hbase/PerformanceEvaluation.java | 6 +- .../hadoop/hbase/ScanPerformanceEvaluation.java | 3 +- .../apache/hadoop/hbase/TestAcidGuarantees.java | 7 +- .../hadoop/hbase/TestHBaseTestingUtility.java | 6 +- .../hadoop/hbase/TestMetaTableAccessor.java | 5 +- .../apache/hadoop/hbase/TestMultiVersions.java | 8 +- .../org/apache/hadoop/hbase/TestNamespace.java | 3 +- .../hadoop/hbase/TestRegionRebalancing.java | 6 +- .../org/apache/hadoop/hbase/TestZooKeeper.java | 16 +- .../apache/hadoop/hbase/client/TestAdmin.java | 20 +- .../client/TestClientOperationInterrupt.java | 6 +- .../client/TestClientScannerRPCTimeout.java | 4 +- .../hadoop/hbase/client/TestClientTimeouts.java | 2 +- .../hadoop/hbase/client/TestFromClientSide.java | 180 ++++++++--------- .../hbase/client/TestFromClientSide3.java | 10 +- .../hbase/client/TestHBaseAdminNoCluster.java | 3 +- .../org/apache/hadoop/hbase/client/TestHCM.java | 12 +- .../hadoop/hbase/client/TestMultiParallel.java | 22 +- .../hbase/client/TestMultipleTimestamps.java | 32 +-- .../hadoop/hbase/client/TestPutWithDelete.java | 2 +- .../hbase/client/TestReplicaWithCluster.java | 12 +- .../hadoop/hbase/client/TestReplicasClient.java | 2 +- .../hbase/client/TestRpcControllerFactory.java | 4 +- .../hadoop/hbase/client/TestScannerTimeout.java | 10 +- .../client/TestScannersFromClientSide.java | 8 +- .../client/TestSnapshotCloneIndependence.java | 2 +- .../hbase/client/TestSnapshotMetadata.java | 6 +- .../hbase/client/TestTimestampsFilter.java | 16 +- .../hadoop/hbase/constraint/TestConstraint.java | 11 +- .../TestBatchCoprocessorEndpoint.java | 16 +- .../coprocessor/TestCoprocessorEndpoint.java | 11 +- .../hbase/coprocessor/TestHTableWrapper.java | 4 +- .../coprocessor/TestOpenTableInCoprocessor.java | 13 +- .../coprocessor/TestRegionObserverBypass.java | 9 +- .../TestRegionObserverInterface.java | 21 +- .../TestRegionObserverScannerOpenHook.java | 3 +- .../coprocessor/TestRegionServerObserver.java | 3 +- .../coprocessor/TestRowProcessorEndpoint.java | 8 +- .../hbase/filter/TestColumnRangeFilter.java | 7 +- .../hbase/filter/TestFilterWithScanLimits.java | 5 +- .../hadoop/hbase/filter/TestFilterWrapper.java | 6 +- .../TestFuzzyRowAndColumnRangeFilter.java | 8 +- .../hadoop/hbase/fs/TestBlockReorder.java | 4 +- .../hbase/io/encoding/TestChangingEncoding.java | 5 +- .../hbase/io/encoding/TestPrefixTree.java | 3 +- .../hbase/mapred/TestTableInputFormat.java | 38 ++-- .../hbase/mapred/TestTableMapReduceUtil.java | 10 +- .../hadoop/hbase/mapreduce/TestCellCounter.java | 4 +- .../hadoop/hbase/mapreduce/TestCopyTable.java | 14 +- .../hbase/mapreduce/TestHFileOutputFormat.java | 28 +-- .../hbase/mapreduce/TestHFileOutputFormat2.java | 28 +-- .../hbase/mapreduce/TestImportExport.java | 19 +- .../TestImportTSVWithOperationAttributes.java | 6 +- .../TestImportTSVWithVisibilityLabels.java | 8 +- .../hadoop/hbase/mapreduce/TestImportTsv.java | 3 +- .../mapreduce/TestLoadIncrementalHFiles.java | 4 +- .../TestLoadIncrementalHFilesSplitRecovery.java | 5 +- .../mapreduce/TestMultithreadedTableMapper.java | 5 +- .../hadoop/hbase/mapreduce/TestRowCounter.java | 6 +- .../hbase/mapreduce/TestTableMapReduceBase.java | 5 +- .../hbase/mapreduce/TestTimeRangeMapRed.java | 12 +- .../hadoop/hbase/mapreduce/TestWALPlayer.java | 6 +- .../hbase/master/TestAssignmentListener.java | 3 +- .../master/TestAssignmentManagerOnCluster.java | 27 +-- .../master/TestDistributedLogSplitting.java | 25 +-- .../hadoop/hbase/master/TestMasterFailover.java | 4 +- .../TestMasterOperationsForRegionReplicas.java | 6 +- .../TestMasterRestartAfterDisablingTable.java | 3 +- .../hbase/master/TestMasterTransitions.java | 5 +- .../hbase/master/TestRegionPlacement.java | 4 +- .../hbase/regionserver/TestCompactionState.java | 8 +- .../regionserver/TestEncryptionKeyRotation.java | 3 +- .../TestEncryptionRandomKeying.java | 3 +- .../TestEndToEndSplitTransaction.java | 15 +- .../hbase/regionserver/TestFSErrorsExposed.java | 3 +- .../hadoop/hbase/regionserver/TestHRegion.java | 4 +- .../regionserver/TestHRegionOnCluster.java | 4 +- .../hbase/regionserver/TestJoinedScanners.java | 5 +- .../TestRegionMergeTransactionOnCluster.java | 18 +- .../hbase/regionserver/TestRegionReplicas.java | 3 +- .../regionserver/TestRegionServerMetrics.java | 10 +- .../regionserver/TestSCVFWithMiniCluster.java | 4 +- .../regionserver/TestScannerWithBulkload.java | 3 +- .../regionserver/TestServerCustomProtocol.java | 22 +- .../TestSplitTransactionOnCluster.java | 20 +- .../hadoop/hbase/regionserver/TestTags.java | 11 +- .../regionserver/wal/TestHLogFiltering.java | 4 +- .../regionserver/wal/TestLogRollAbort.java | 4 +- .../regionserver/wal/TestLogRollPeriod.java | 10 +- .../hbase/regionserver/wal/TestLogRolling.java | 23 ++- .../hbase/regionserver/wal/TestWALReplay.java | 3 +- .../replication/TestMasterReplication.java | 26 +-- .../replication/TestMultiSlaveReplication.java | 21 +- .../replication/TestPerTableCFReplication.java | 31 +-- .../hbase/replication/TestReplicationBase.java | 8 +- .../replication/TestReplicationSyncUpTool.java | 6 +- .../replication/TestReplicationWithTags.java | 8 +- .../TestRegionReplicaReplicationEndpoint.java | 12 +- .../regionserver/TestReplicationSink.java | 6 +- .../hadoop/hbase/rest/TestGzipFilter.java | 5 +- .../hadoop/hbase/rest/TestScannerResource.java | 4 +- .../hbase/rest/TestScannersWithFilters.java | 4 +- .../hbase/rest/TestScannersWithLabels.java | 4 +- .../hbase/rest/client/TestRemoteTable.java | 6 +- .../hbase/security/access/SecureTestUtil.java | 15 +- .../access/TestAccessControlFilter.java | 11 +- .../security/access/TestAccessController.java | 65 +++--- .../security/access/TestAccessController2.java | 3 +- .../access/TestCellACLWithMultipleVersions.java | 60 +++--- .../hbase/security/access/TestCellACLs.java | 34 ++-- .../security/access/TestNamespaceCommands.java | 7 +- .../access/TestScanEarlyTermination.java | 12 +- .../security/access/TestTablePermissions.java | 3 +- .../ExpAsStringVisibilityLabelServiceImpl.java | 3 +- .../TestEnforcingScanLabelGenerator.java | 5 +- .../visibility/TestVisibilityLabels.java | 39 ++-- .../visibility/TestVisibilityLabelsWithACL.java | 9 +- ...ibilityLabelsWithDefaultVisLabelService.java | 4 +- .../TestVisibilityLabelsWithDeletes.java | 199 ++++++++++--------- .../TestVisibilityLabelsWithSLGStack.java | 4 +- .../TestVisibilityWithCheckAuths.java | 13 +- .../hbase/snapshot/SnapshotTestingUtils.java | 6 +- .../hbase/snapshot/TestExportSnapshot.java | 10 +- .../hadoop/hbase/trace/TestHTraceHooks.java | 4 +- .../apache/hadoop/hbase/util/LoadTestTool.java | 3 +- .../hadoop/hbase/util/MultiThreadedReader.java | 9 +- .../hbase/util/MultiThreadedReaderWithACL.java | 7 +- .../hadoop/hbase/util/MultiThreadedUpdater.java | 11 +- .../hbase/util/MultiThreadedUpdaterWithACL.java | 13 +- .../hadoop/hbase/util/MultiThreadedWriter.java | 5 +- .../hbase/util/MultiThreadedWriterBase.java | 4 +- .../hbase/util/MultiThreadedWriterWithACL.java | 7 +- .../hadoop/hbase/util/RestartMetaTest.java | 3 +- .../hbase/util/TestCoprocessorScanPolicy.java | 5 +- .../apache/hadoop/hbase/util/TestHBaseFsck.java | 37 ++-- .../hbase/util/TestHBaseFsckEncryption.java | 3 +- .../hadoop/hbase/util/TestMergeTable.java | 3 +- .../util/TestMiniClusterLoadSequential.java | 5 +- .../hbase/util/TestProcessBasedCluster.java | 3 +- .../hbase/util/TestRegionSizeCalculator.java | 11 +- .../util/hbck/OfflineMetaRebuildTestCore.java | 13 +- .../hadoop/hbase/thrift/IncrementCoalescer.java | 4 +- .../thrift2/ThriftHBaseServiceHandler.java | 33 +-- .../hadoop/hbase/thrift2/TestHTablePool.java | 53 ++--- .../thrift2/TestThriftHBaseServiceHandler.java | 3 +- ...TestThriftHBaseServiceHandlerWithLabels.java | 3 +- 207 files changed, 1226 insertions(+), 1105 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index c63e4c6..af9b587 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; @@ -174,7 +175,7 @@ public class MetaTableAccessor { * @throws IOException * @SuppressWarnings("deprecation") */ - private static HTable getHTable(final HConnection hConnection, + private static Table getHTable(final HConnection hConnection, final TableName tableName) throws IOException { // We used to pass whole CatalogTracker in here, now we just pass in HConnection @@ -190,7 +191,7 @@ public class MetaTableAccessor { * @return An {@link HTable} for hbase:meta * @throws IOException */ - static HTable getMetaHTable(final HConnection hConnection) + static Table getMetaHTable(final HConnection hConnection) throws IOException { return getHTable(hConnection, TableName.META_TABLE_NAME); } @@ -200,7 +201,7 @@ public class MetaTableAccessor { * @param g Get to run * @throws IOException */ - private static Result get(final HTable t, final Get g) throws IOException { + private static Result get(final Table t, final Get g) throws IOException { try { return t.get(g); } finally { @@ -599,7 +600,7 @@ public class MetaTableAccessor { scan.setCaching(caching); } scan.addFamily(HConstants.CATALOG_FAMILY); - HTable metaTable = getMetaHTable(hConnection); + Table metaTable = getMetaHTable(hConnection); ResultScanner scanner = null; try { scanner = metaTable.getScanner(scan); @@ -948,7 +949,7 @@ public class MetaTableAccessor { * @param p put to make * @throws IOException */ - private static void put(final HTable t, final Put p) throws IOException { + private static void put(final Table t, final Put p) throws IOException { try { t.put(p); } finally { @@ -964,7 +965,7 @@ public class MetaTableAccessor { */ public static void putsToMetaTable(final HConnection hConnection, final List ps) throws IOException { - HTable t = getMetaHTable(hConnection); + Table t = getMetaHTable(hConnection); try { t.put(ps); } finally { @@ -993,7 +994,7 @@ public class MetaTableAccessor { */ public static void deleteFromMetaTable(final HConnection hConnection, final List deletes) throws IOException { - HTable t = getMetaHTable(hConnection); + Table t = getMetaHTable(hConnection); try { t.delete(deletes); } finally { @@ -1036,7 +1037,7 @@ public class MetaTableAccessor { public static void mutateMetaTable(final HConnection hConnection, final List mutations) throws IOException { - HTable t = getMetaHTable(hConnection); + Table t = getMetaHTable(hConnection); try { t.batch(mutations); } catch (InterruptedException e) { @@ -1068,7 +1069,7 @@ public class MetaTableAccessor { * @param regionInfo region information * @throws IOException if problem connecting or updating meta */ - public static void addRegionToMeta(HTable meta, HRegionInfo regionInfo) throws IOException { + public static void addRegionToMeta(Table meta, HRegionInfo regionInfo) throws IOException { addRegionToMeta(meta, regionInfo, null, null); } @@ -1085,7 +1086,7 @@ public class MetaTableAccessor { * @param splitB second split daughter of the parent regionInfo * @throws IOException if problem connecting or updating meta */ - public static void addRegionToMeta(HTable meta, HRegionInfo regionInfo, + public static void addRegionToMeta(Table meta, HRegionInfo regionInfo, HRegionInfo splitA, HRegionInfo splitB) throws IOException { Put put = makePutFromRegionInfo(regionInfo); addDaughtersToPut(put, splitA, splitB); @@ -1109,7 +1110,7 @@ public class MetaTableAccessor { */ public static void addRegionToMeta(HConnection hConnection, HRegionInfo regionInfo, HRegionInfo splitA, HRegionInfo splitB) throws IOException { - HTable meta = getMetaHTable(hConnection); + Table meta = getMetaHTable(hConnection); try { addRegionToMeta(meta, regionInfo, splitA, splitB); } finally { @@ -1168,7 +1169,7 @@ public class MetaTableAccessor { */ public static void mergeRegions(final HConnection hConnection, HRegionInfo mergedRegion, HRegionInfo regionA, HRegionInfo regionB, ServerName sn) throws IOException { - HTable meta = getMetaHTable(hConnection); + Table meta = getMetaHTable(hConnection); try { HRegionInfo copyOfMerged = new HRegionInfo(mergedRegion); @@ -1208,7 +1209,7 @@ public class MetaTableAccessor { public static void splitRegion(final HConnection hConnection, HRegionInfo parent, HRegionInfo splitA, HRegionInfo splitB, ServerName sn) throws IOException { - HTable meta = getMetaHTable(hConnection); + Table meta = getMetaHTable(hConnection); try { HRegionInfo copyOfParent = new HRegionInfo(parent); copyOfParent.setOffline(true); @@ -1235,7 +1236,7 @@ public class MetaTableAccessor { /** * Performs an atomic multi-Mutate operation against the given table. */ - private static void multiMutate(HTable table, byte[] row, Mutation... mutations) + private static void multiMutate(Table table, byte[] row, Mutation... mutations) throws IOException { CoprocessorRpcChannel channel = table.coprocessorService(row); MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 8a68c56..40d46c4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -22,7 +22,6 @@ import java.io.Closeable; import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -1779,7 +1778,7 @@ public class HTable implements HTableInterface, RegionLocator { * @throws IOException */ public static void main(String[] args) throws IOException { - HTable t = new HTable(HBaseConfiguration.create(), args[0]); + Table t = new HTable(HBaseConfiguration.create(), args[0]); try { System.out.println(t.get(new Get(Bytes.toBytes(args[1])))); } finally { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java index 27aec10..04b3fea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HRegionLocation; import java.io.IOException; @@ -102,7 +101,7 @@ public class HTableUtil { } - private static Map> createRsPutMap(HTable htable, List puts) throws IOException { + private static Map> createRsPutMap(RegionLocator htable, List puts) throws IOException { Map> putMap = new HashMap>(); for (Put put: puts) { @@ -118,7 +117,7 @@ public class HTableUtil { return putMap; } - private static Map> createRsRowMap(HTable htable, List rows) throws IOException { + private static Map> createRsRowMap(RegionLocator htable, List rows) throws IOException { Map> rowMap = new HashMap>(); for (Row row: rows) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 3b37238..9058289 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.ServerRpcController; @@ -101,7 +102,7 @@ public class AggregationClient { public R max( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return max(table, ci, scan); @@ -125,7 +126,7 @@ public class AggregationClient { * & propagated to it. */ public - R max(final HTable table, final ColumnInterpreter ci, + R max(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class MaxCallBack implements Batch.Callback { @@ -196,7 +197,7 @@ public class AggregationClient { public R min( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return min(table, ci, scan); @@ -218,7 +219,7 @@ public class AggregationClient { * @throws Throwable */ public - R min(final HTable table, final ColumnInterpreter ci, + R min(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class MinCallBack implements Batch.Callback { @@ -276,7 +277,7 @@ public class AggregationClient { public long rowCount( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return rowCount(table, ci, scan); @@ -301,7 +302,7 @@ public class AggregationClient { * @throws Throwable */ public - long rowCount(final HTable table, + long rowCount(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, true); class RowNumCallback implements Batch.Callback { @@ -350,7 +351,7 @@ public class AggregationClient { public S sum( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return sum(table, ci, scan); @@ -371,7 +372,7 @@ public class AggregationClient { * @throws Throwable */ public - S sum(final HTable table, final ColumnInterpreter ci, + S sum(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); @@ -423,7 +424,7 @@ public class AggregationClient { private Pair getAvgArgs( final TableName tableName, final ColumnInterpreter ci, final Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return getAvgArgs(table, ci, scan); @@ -443,7 +444,7 @@ public class AggregationClient { * @throws Throwable */ private - Pair getAvgArgs(final HTable table, + Pair getAvgArgs(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class AvgCallBack implements Batch.Callback> { @@ -523,7 +524,7 @@ public class AggregationClient { * @throws Throwable */ public double avg( - final HTable table, final ColumnInterpreter ci, Scan scan) throws Throwable { + final Table table, final ColumnInterpreter ci, Scan scan) throws Throwable { Pair p = getAvgArgs(table, ci, scan); return ci.divideForAvg(p.getFirst(), p.getSecond()); } @@ -540,7 +541,7 @@ public class AggregationClient { * @throws Throwable */ private - Pair, Long> getStdArgs(final HTable table, + Pair, Long> getStdArgs(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); class StdCallback implements Batch.Callback, Long>> { @@ -614,7 +615,7 @@ public class AggregationClient { public double std(final TableName tableName, ColumnInterpreter ci, Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return std(table, ci, scan); @@ -638,7 +639,7 @@ public class AggregationClient { * @throws Throwable */ public double std( - final HTable table, ColumnInterpreter ci, Scan scan) throws Throwable { + final Table table, ColumnInterpreter ci, Scan scan) throws Throwable { Pair, Long> p = getStdArgs(table, ci, scan); double res = 0d; double avg = ci.divideForAvg(p.getFirst().get(0), p.getSecond()); @@ -662,7 +663,7 @@ public class AggregationClient { */ private Pair>, List> - getMedianArgs(final HTable table, + getMedianArgs(final Table table, final ColumnInterpreter ci, final Scan scan) throws Throwable { final AggregateRequest requestArg = validateArgAndGetPB(scan, ci, false); final NavigableMap> map = @@ -727,7 +728,7 @@ public class AggregationClient { public R median(final TableName tableName, ColumnInterpreter ci, Scan scan) throws Throwable { - HTable table = null; + Table table = null; try { table = new HTable(conf, tableName); return median(table, ci, scan); @@ -749,7 +750,7 @@ public class AggregationClient { * @throws Throwable */ public - R median(final HTable table, ColumnInterpreter ci, + R median(final Table table, ColumnInterpreter ci, Scan scan) throws Throwable { Pair>, List> p = getMedianArgs(table, ci, scan); byte[] startRow = null; http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java index 48986b1..c4ce866 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java @@ -21,12 +21,12 @@ package org.apache.hadoop.hbase.client.coprocessor; import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW; import static org.apache.hadoop.hbase.HConstants.LAST_ROW; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.ServerRpcController; @@ -47,9 +47,9 @@ import java.util.List; */ @InterfaceAudience.Private public class SecureBulkLoadClient { - private HTable table; + private Table table; - public SecureBulkLoadClient(HTable table) { + public SecureBulkLoadClient(Table table) { this.table = table; } http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java index 35c1412..87db23d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java @@ -32,8 +32,10 @@ import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; @@ -70,7 +72,7 @@ public class AccessControlClient { public static GrantResponse grant(Configuration conf, final TableName tableName, final String userName, final byte[] family, final byte[] qual, final AccessControlProtos.Permission.Action... actions) throws Throwable { - HTable ht = null; + Table ht = null; try { TableName aclTableName = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl"); @@ -150,7 +152,7 @@ public class AccessControlClient { public static RevokeResponse revoke(Configuration conf, final String username, final TableName tableName, final byte[] family, final byte[] qualifier, final AccessControlProtos.Permission.Action... actions) throws Throwable { - HTable ht = null; + Table ht = null; try { TableName aclTableName = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl"); @@ -211,8 +213,8 @@ public class AccessControlClient { public static List getUserPermissions(Configuration conf, String tableRegex) throws Throwable { List permList = new ArrayList(); - HTable ht = null; - HBaseAdmin ha = null; + Table ht = null; + Admin ha = null; try { TableName aclTableName = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl"); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java index 8a17994..b1f9c91 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LA import java.io.IOException; import java.util.Map; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -72,7 +73,7 @@ public class VisibilityClient { */ public static VisibilityLabelsResponse addLabels(Configuration conf, final String[] labels) throws Throwable { - HTable ht = null; + Table ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME.getName()); Batch.Call callable = @@ -126,7 +127,7 @@ public class VisibilityClient { * @throws Throwable */ public static GetAuthsResponse getAuths(Configuration conf, final String user) throws Throwable { - HTable ht = null; + Table ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME.getName()); Batch.Call callable = @@ -168,7 +169,7 @@ public class VisibilityClient { private static VisibilityLabelsResponse setOrClearAuths(Configuration conf, final String[] auths, final String user, final boolean setOrClear) throws IOException, ServiceException, Throwable { - HTable ht = null; + Table ht = null; try { ht = new HTable(conf, LABELS_TABLE_NAME.getName()); Batch.Call callable = http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index 9e878b4..df5e693 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -150,7 +150,7 @@ public class TestClientNoCluster extends Configured implements Tool { Configuration localConfig = HBaseConfiguration.create(this.conf); // This override mocks up our exists/get call to throw a RegionServerStoppedException. localConfig.set("hbase.client.connection.impl", RpcTimeoutConnection.class.getName()); - HTable table = new HTable(localConfig, TableName.META_TABLE_NAME); + Table table = new HTable(localConfig, TableName.META_TABLE_NAME); Throwable t = null; LOG.info("Start"); try { @@ -187,7 +187,7 @@ public class TestClientNoCluster extends Configured implements Tool { // and it has expired. Otherwise, if this functionality is broke, all retries will be run -- // all ten of them -- and we'll get the RetriesExhaustedException exception. localConfig.setInt(HConstants.HBASE_CLIENT_META_OPERATION_TIMEOUT, pause - 1); - HTable table = new HTable(localConfig, TableName.META_TABLE_NAME); + Table table = new HTable(localConfig, TableName.META_TABLE_NAME); Throwable t = null; try { // An exists call turns into a get w/ a flag. @@ -219,7 +219,7 @@ public class TestClientNoCluster extends Configured implements Tool { // Go against meta else we will try to find first region for the table on construction which // means we'll have to do a bunch more mocking. Tests that go against meta only should be // good for a bit of testing. - HTable table = new HTable(this.conf, TableName.META_TABLE_NAME); + Table table = new HTable(this.conf, TableName.META_TABLE_NAME); ResultScanner scanner = table.getScanner(HConstants.CATALOG_FAMILY); try { Result result = null; @@ -239,7 +239,7 @@ public class TestClientNoCluster extends Configured implements Tool { // Go against meta else we will try to find first region for the table on construction which // means we'll have to do a bunch more mocking. Tests that go against meta only should be // good for a bit of testing. - HTable table = new HTable(this.conf, TableName.META_TABLE_NAME); + Table table = new HTable(this.conf, TableName.META_TABLE_NAME); ResultScanner scanner = table.getScanner(HConstants.CATALOG_FAMILY); try { Result result = null; @@ -700,7 +700,7 @@ public class TestClientNoCluster extends Configured implements Tool { * @throws IOException */ static void cycle(int id, final Configuration c, final HConnection sharedConnection) throws IOException { - HTableInterface table = sharedConnection.getTable(BIG_USER_TABLE); + Table table = sharedConnection.getTable(BIG_USER_TABLE); table.setAutoFlushTo(false); long namespaceSpan = c.getLong("hbase.test.namespace.span", 1000000); long startTime = System.currentTimeMillis(); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java index 1619467..572128a 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java @@ -99,7 +99,7 @@ public class TestSnapshotFromAdmin { builder.build(), builder.build(), builder.build(), builder.setDone(true).build()); // setup the admin and run the test - HBaseAdmin admin = new HBaseAdmin(mockConnection); + Admin admin = new HBaseAdmin(mockConnection); String snapshot = "snapshot"; TableName table = TableName.valueOf("table"); // get start time @@ -122,7 +122,7 @@ public class TestSnapshotFromAdmin { .mock(ConnectionManager.HConnectionImplementation.class); Configuration conf = HBaseConfiguration.create(); Mockito.when(mockConnection.getConfiguration()).thenReturn(conf); - HBaseAdmin admin = new HBaseAdmin(mockConnection); + Admin admin = new HBaseAdmin(mockConnection); SnapshotDescription.Builder builder = SnapshotDescription.newBuilder(); // check that invalid snapshot names fail failSnapshotStart(admin, builder.setName(HConstants.SNAPSHOT_DIR_NAME).build()); @@ -152,7 +152,7 @@ public class TestSnapshotFromAdmin { admin.snapshot(builder.setName("snapshot").setTable("table").build()); } - private void failSnapshotStart(HBaseAdmin admin, SnapshotDescription snapshot) throws IOException { + private void failSnapshotStart(Admin admin, SnapshotDescription snapshot) throws IOException { try { admin.snapshot(snapshot); fail("Snapshot should not have succeed with name:" + snapshot.getName()); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java ---------------------------------------------------------------------- diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java index 8dbb16c..16327ba 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest; @@ -78,7 +79,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteEndpoint() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteEndpoint"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -102,7 +103,7 @@ public class TestBulkDeleteProtocol { throws Throwable { byte[] tableName = Bytes .toBytes("testBulkDeleteEndpointWhenRowBatchSizeLessThanRowsToDeleteFromARegion"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -123,7 +124,7 @@ public class TestBulkDeleteProtocol { private long invokeBulkDeleteProtocol(byte[] tableName, final Scan scan, final int rowBatchSize, final DeleteType deleteType, final Long timeStamp) throws Throwable { - HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName); + Table ht = new HTable(TEST_UTIL.getConfiguration(), tableName); long noOfDeletedRows = 0L; Batch.Call callable = new Batch.Call() { @@ -155,7 +156,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteWithConditionBasedDelete() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteWithConditionBasedDelete"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -185,7 +186,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteColumn() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteColumn"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { byte[] rowkey = Bytes.toBytes(j); @@ -218,7 +219,7 @@ public class TestBulkDeleteProtocol { htd.addFamily(new HColumnDescriptor(FAMILY1)); htd.addFamily(new HColumnDescriptor(FAMILY2)); TEST_UTIL.getHBaseAdmin().createTable(htd, Bytes.toBytes(0), Bytes.toBytes(120), 5); - HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName); + Table ht = new HTable(TEST_UTIL.getConfiguration(), tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -245,7 +246,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteColumnVersion() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteColumnVersion"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -293,7 +294,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteColumnVersionBasedOnTS() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteColumnVersionBasedOnTS"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -340,7 +341,7 @@ public class TestBulkDeleteProtocol { // @Ignore @Test public void testBulkDeleteWithNumberOfVersions() throws Throwable { byte[] tableName = Bytes.toBytes("testBulkDeleteWithNumberOfVersions"); - HTable ht = createTable(tableName); + Table ht = createTable(tableName); List puts = new ArrayList(100); for (int j = 0; j < 100; j++) { Put put = new Put(Bytes.toBytes(j)); @@ -422,13 +423,13 @@ public class TestBulkDeleteProtocol { ht.close(); } - private HTable createTable(byte[] tableName) throws IOException { + private Table createTable(byte[] tableName) throws IOException { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName)); HColumnDescriptor hcd = new HColumnDescriptor(FAMILY1); hcd.setMaxVersions(10);// Just setting 10 as I am not testing with more than 10 versions here htd.addFamily(hcd); TEST_UTIL.getHBaseAdmin().createTable(htd, Bytes.toBytes(0), Bytes.toBytes(120), 5); - HTable ht = new HTable(TEST_UTIL.getConfiguration(), tableName); + Table ht = new HTable(TEST_UTIL.getConfiguration(), tableName); return ht; } http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java ---------------------------------------------------------------------- diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java index fb04b4d..cc16483 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRowCountEndpoint.java @@ -23,16 +23,13 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos; import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.Ignore; import org.junit.experimental.categories.Category; import java.io.IOException; @@ -72,7 +69,7 @@ public class TestRowCountEndpoint { // @Ignore @Test public void testEndpoint() throws Throwable { - HTable table = new HTable(CONF, TEST_TABLE); + Table table = new HTable(CONF, TEST_TABLE); // insert some test rows for (int i=0; i<5; i++) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java ---------------------------------------------------------------------- diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java index 824910a..af51504 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -74,7 +75,7 @@ public class TestZooKeeperScanPolicyObserver { .setTimeToLive(1); desc.addFamily(hcd); TEST_UTIL.getHBaseAdmin().createTable(desc); - HTable t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); + Table t = new HTable(new Configuration(TEST_UTIL.getConfiguration()), tableName); long now = EnvironmentEdgeManager.currentTime(); ZooKeeperWatcher zkw = new ZooKeeperWatcher(TEST_UTIL.getConfiguration(), "test", null); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java index 9ae00f9..f1f5f93 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ClusterManager.ServiceType; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HConnectionManager; @@ -45,7 +46,7 @@ import com.google.common.collect.Sets; @InterfaceAudience.Private public class DistributedHBaseCluster extends HBaseCluster { - private HBaseAdmin admin; + private Admin admin; private ClusterManager clusterManager; http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java index a1e306d..750376a 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestLazyCfLoading.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; @@ -222,7 +223,7 @@ public class IntegrationTestLazyCfLoading { long maxRuntime = conf.getLong(timeoutKey, DEFAULT_TIMEOUT_MINUTES); long serverCount = util.getHBaseClusterInterface().getClusterStatus().getServersSize(); long keysToWrite = serverCount * KEYS_TO_WRITE_PER_SERVER; - HTable table = new HTable(conf, TABLE_NAME); + Table table = new HTable(conf, TABLE_NAME); // Create multi-threaded writer and start it. We write multiple columns/CFs and verify // their integrity, therefore multi-put is necessary. http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java index 766c66f..e99677b 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; @@ -153,7 +154,7 @@ public class IntegrationTestImportTsv implements Configurable, Tool { assertEquals("Loading HFiles failed.", 0, ToolRunner.run(new LoadIncrementalHFiles(new Configuration(getConf())), args)); - HTable table = null; + Table table = null; Scan scan = new Scan() {{ setCacheBlocks(false); setCaching(1000); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index 65e1026..55cc70e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.chaos.actions.RestartActiveMasterAction; import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingMetaAction; import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingTableAction; import org.apache.hadoop.hbase.chaos.factories.MonkeyConstants; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; @@ -56,6 +57,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.RetriesExhaustedException; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.filter.KeyOnlyFilter; import org.apache.hadoop.hbase.ipc.FatalConnectionException; @@ -461,7 +463,7 @@ public class IntegrationTestMTTR { */ static class PutCallable extends TimingCallable { - private final HTable table; + private final Table table; public PutCallable(Future f) throws IOException { super(f); @@ -488,7 +490,7 @@ public class IntegrationTestMTTR { * supplied future returns. Returns the max time taken to scan. */ static class ScanCallable extends TimingCallable { - private final HTable table; + private final Table table; public ScanCallable(Future f) throws IOException { super(f); @@ -531,7 +533,7 @@ public class IntegrationTestMTTR { @Override protected boolean doAction() throws Exception { - HBaseAdmin admin = null; + Admin admin = null; try { admin = new HBaseAdmin(util.getConfiguration()); ClusterStatus status = admin.getClusterStatus(); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 3adef26..c709f0d 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; @@ -60,6 +61,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.ScannerCallable; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; @@ -448,7 +450,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { protected void createSchema() throws IOException { Configuration conf = getConf(); - HBaseAdmin admin = new HBaseAdmin(conf); + Admin admin = new HBaseAdmin(conf); TableName tableName = getTableName(conf); try { if (!admin.tableExists(tableName)) { @@ -873,7 +875,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { System.exit(-1); } - HTable table = new HTable(getConf(), getTableName(getConf())); + Table table = new HTable(getConf(), getTableName(getConf())); Scan scan = new Scan(); scan.setBatch(10000); @@ -923,7 +925,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { org.apache.hadoop.hbase.client.Delete delete = new org.apache.hadoop.hbase.client.Delete(val); - HTable table = new HTable(getConf(), getTableName(getConf())); + Table table = new HTable(getConf(), getTableName(getConf())); table.delete(delete); table.flushCommits(); @@ -969,7 +971,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { byte[] startKey = isSpecificStart ? Bytes.toBytesBinary(cmd.getOptionValue('s')) : null; int logEvery = cmd.hasOption('l') ? Integer.parseInt(cmd.getOptionValue('l')) : 1; - HTable table = new HTable(getConf(), getTableName(getConf())); + Table table = new HTable(getConf(), getTableName(getConf())); long numQueries = 0; // If isSpecificStart is set, only walk one list from that particular node. // Note that in case of circular (or P-shaped) list it will walk forever, as is @@ -1005,7 +1007,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { return 0; } - private static CINode findStartNode(HTable table, byte[] startKey) throws IOException { + private static CINode findStartNode(Table table, byte[] startKey) throws IOException { Scan scan = new Scan(); scan.setStartRow(startKey); scan.setBatch(1); @@ -1028,7 +1030,7 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { return null; } - private CINode getNode(byte[] row, HTable table, CINode node) throws IOException { + private CINode getNode(byte[] row, Table table, CINode node) throws IOException { Get get = new Get(row); get.addColumn(FAMILY_NAME, COLUMN_PREV); Result result = table.get(get); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index 9748b31..be7e36f 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; @@ -46,6 +47,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.mapreduce.Import; @@ -130,7 +132,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB if(!acl) { LOG.info("No ACL available."); } - HBaseAdmin admin = new HBaseAdmin(getConf()); + Admin admin = new HBaseAdmin(getConf()); for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) { TableName tableName = IntegrationTestBigLinkedListWithVisibility.getTableName(i); createTable(admin, tableName, false, acl); @@ -140,7 +142,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB admin.close(); } - private void createTable(HBaseAdmin admin, TableName tableName, boolean setVersion, + private void createTable(Admin admin, TableName tableName, boolean setVersion, boolean acl) throws IOException { if (!admin.tableExists(tableName)) { HTableDescriptor htd = new HTableDescriptor(tableName); @@ -170,8 +172,8 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB } static class VisibilityGeneratorMapper extends GeneratorMapper { - HTable[] tables = new HTable[DEFAULT_TABLES_COUNT]; - HTable commonTable = null; + Table[] tables = new Table[DEFAULT_TABLES_COUNT]; + Table commonTable = null; @Override protected void setup(org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index 0da5107..6bab237 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -448,7 +448,7 @@ public void cleanUpCluster() throws Exception { HTableDescriptor htd = new HTableDescriptor(table); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); - HBaseAdmin admin = new HBaseAdmin(getConf()); + Admin admin = new HBaseAdmin(getConf()); if (doLoad) { admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPresplits); doLoad(getConf(), htd); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java index 5ca0e36..63bd42f 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore; import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.hbase.util.MultiThreadedReader; @@ -326,7 +326,7 @@ public class IntegrationTestTimeBoundedRequestsWithRegionReplicas extends Integr @Override protected void verifyResultsAndUpdateMetrics(boolean verify, Get[] gets, long elapsedNano, - Result[] results, HTableInterface table, boolean isNullExpected) + Result[] results, Table table, boolean isNullExpected) throws IOException { super.verifyResultsAndUpdateMetrics(verify, gets, elapsedNano, results, table, isNullExpected); for (Result r : results) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java index f4e4250..fc3ae3c 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestWithCellVisibilityLoadAndVerify.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTests; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -369,7 +370,7 @@ public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationT HTableDescriptor htd = new HTableDescriptor(getTablename()); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); - HBaseAdmin admin = new HBaseAdmin(getConf()); + Admin admin = new HBaseAdmin(getConf()); try { admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPresplits); } finally { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java ---------------------------------------------------------------------- diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java index 7961eb9..2ec5838 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/trace/IntegrationTestSendTraceRequests.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.ToolRunner; @@ -123,7 +124,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { ResultScanner rs = null; try { innerScope = Trace.startSpan("Scan", Sampler.ALWAYS); - HTable ht = new HTable(util.getConfiguration(), tableName); + Table ht = new HTable(util.getConfiguration(), tableName); Scan s = new Scan(); s.setStartRow(Bytes.toBytes(rowKeyQueue.take())); s.setBatch(7); @@ -171,7 +172,7 @@ public class IntegrationTestSendTraceRequests extends AbstractHBaseTool { public void run() { - HTable ht = null; + Table ht = null; try { ht = new HTable(util.getConfiguration(), tableName); } catch (IOException e) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java index a30819d..d4a87d3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java @@ -29,6 +29,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.security.User; @@ -462,7 +463,7 @@ public class LocalHBaseCluster { Configuration conf = HBaseConfiguration.create(); LocalHBaseCluster cluster = new LocalHBaseCluster(conf); cluster.startup(); - HBaseAdmin admin = new HBaseAdmin(conf); + Admin admin = new HBaseAdmin(conf); HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(cluster.getClass().getName())); admin.createTable(htd); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index ab76cd4..155990e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTableWrapper; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CoprocessorClassLoader; import org.apache.hadoop.hbase.util.SortedCopyOnWriteSet; http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java index b6419b8..3af2351 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java @@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapred.JobConf; @@ -46,7 +47,7 @@ import org.apache.hadoop.mapred.Partitioner; public class HRegionPartitioner implements Partitioner { private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); - private HTable table; + private RegionLocator table; private byte[][] startKeys; public void configure(JobConf job) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java index 327e404..e9d3932 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.mapred.InvalidJobConfException; @@ -56,14 +57,14 @@ FileOutputFormat { */ protected static class TableRecordWriter implements RecordWriter { - private HTable m_table; + private Table m_table; /** * Instantiate a TableRecordWriter with the HBase HClient for writing. * * @param table */ - public TableRecordWriter(HTable table) { + public TableRecordWriter(Table table) { m_table = table; } http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java index 7713180..a9496a1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.mapred.RecordReader; @@ -62,7 +63,7 @@ implements RecordReader { /** * @param htable the {@link HTable} to scan. */ - public void setHTable(HTable htable) { + public void setHTable(Table htable) { this.recordReaderImpl.setHTable(htable); } http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java index f4043f4..7517c1f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.ScannerCallable; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableInputFormat; @@ -52,7 +53,7 @@ public class TableRecordReaderImpl { private byte [] lastSuccessfulRow; private Filter trrRowFilter; private ResultScanner scanner; - private HTable htable; + private Table htable; private byte [][] trrInputColumns; private long timestamp; private int rowcount; @@ -116,7 +117,7 @@ public class TableRecordReaderImpl { /** * @param htable the {@link HTable} to scan. */ - public void setHTable(HTable htable) { + public void setHTable(Table htable) { Configuration conf = htable.getConfiguration(); logScannerActivity = conf.getBoolean( ScannerCallable.LOG_SCANNER_ACTIVITY, false); http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java index 62bfba4..d09601d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/DefaultVisibilityExpressionResolver.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.VisibilityLabelOrdinalProvider; import org.apache.hadoop.hbase.security.visibility.VisibilityUtils; @@ -66,7 +67,7 @@ public class DefaultVisibilityExpressionResolver implements VisibilityExpression public void init() { // Reading all the labels and ordinal. // This scan should be done by user with global_admin previliges.. Ensure that it works - HTable labelsTable = null; + Table labelsTable = null; try { labelsTable = new HTable(conf, LABELS_TABLE_NAME); } catch (TableNotFoundException e) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java index df063a4..24ca013 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java @@ -29,6 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -160,7 +161,7 @@ public class HFileOutputFormat extends FileOutputFormat getRegionStartKeys(HTable table) + private static List getRegionStartKeys(RegionLocator table) throws IOException { byte[][] byteKeys = table.getStartKeys(); ArrayList ret = @@ -544,7 +546,7 @@ public class HFileOutputFormat2 value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE") @VisibleForTesting static void configureCompression( - HTable table, Configuration conf) throws IOException { + Table table, Configuration conf) throws IOException { StringBuilder compressionConfigValue = new StringBuilder(); HTableDescriptor tableDescriptor = table.getTableDescriptor(); if(tableDescriptor == null){ @@ -578,7 +580,7 @@ public class HFileOutputFormat2 */ @VisibleForTesting static void configureBlockSize( - HTable table, Configuration conf) throws IOException { + Table table, Configuration conf) throws IOException { StringBuilder blockSizeConfigValue = new StringBuilder(); HTableDescriptor tableDescriptor = table.getTableDescriptor(); if (tableDescriptor == null) { @@ -612,7 +614,7 @@ public class HFileOutputFormat2 */ @VisibleForTesting static void configureBloomType( - HTable table, Configuration conf) throws IOException { + Table table, Configuration conf) throws IOException { HTableDescriptor tableDescriptor = table.getTableDescriptor(); if (tableDescriptor == null) { // could happen with mock table instance @@ -647,7 +649,7 @@ public class HFileOutputFormat2 * on failure to read column family descriptors */ @VisibleForTesting - static void configureDataBlockEncoding(HTable table, + static void configureDataBlockEncoding(Table table, Configuration conf) throws IOException { HTableDescriptor tableDescriptor = table.getTableDescriptor(); if (tableDescriptor == null) { http://git-wip-us.apache.org/repos/asf/hbase/blob/4995ed8a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java index 02727cc..e3e9dd0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java @@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Partitioner; @@ -53,7 +54,7 @@ implements Configurable { private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); private Configuration conf = null; - private HTable table; + private RegionLocator table; private byte[][] startKeys; /**