Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id A1B3A200D58 for ; Sat, 11 Nov 2017 17:12:42 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id A0781160C07; Sat, 11 Nov 2017 16:12:42 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 93184160C06 for ; Sat, 11 Nov 2017 17:12:40 +0100 (CET) Received: (qmail 53344 invoked by uid 500); 11 Nov 2017 16:12:37 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 50752 invoked by uid 99); 11 Nov 2017 16:12:35 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 11 Nov 2017 16:12:35 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id D6A56F5C3D; Sat, 11 Nov 2017 16:12:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: git-site-role@apache.org To: commits@hbase.apache.org Date: Sat, 11 Nov 2017 16:12:33 -0000 Message-Id: <81fd842602db4a21bd9f8cc9ea77125c@git.apache.org> In-Reply-To: <2e5a9a7ec0574144b1b2c9efdce1ba85@git.apache.org> References: <2e5a9a7ec0574144b1b2c9efdce1ba85@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [02/51] [partial] hbase-site git commit: Published site at . archived-at: Sat, 11 Nov 2017 16:12:42 -0000 http://git-wip-us.apache.org/repos/asf/hbase-site/blob/809180c4/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html ---------------------------------------------------------------------- diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html index 6e18b1c..774bdc9 100644 --- a/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html +++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/HBaseAdmin.DeleteTableFuture.html @@ -26,93 +26,93 @@ 018 */ 019package org.apache.hadoop.hbase.client; 020 -021import java.io.Closeable; -022import java.io.IOException; -023import java.io.InterruptedIOException; -024import java.util.ArrayList; -025import java.util.Arrays; -026import java.util.Collection; -027import java.util.EnumSet; -028import java.util.HashMap; -029import java.util.Iterator; -030import java.util.LinkedList; -031import java.util.List; -032import java.util.Map; -033import java.util.Set; -034import java.util.TreeMap; -035import java.util.concurrent.Callable; -036import java.util.concurrent.ExecutionException; -037import java.util.concurrent.Future; -038import java.util.concurrent.TimeUnit; -039import java.util.concurrent.TimeoutException; -040import java.util.concurrent.atomic.AtomicInteger; -041import java.util.concurrent.atomic.AtomicReference; -042import java.util.regex.Pattern; -043import java.util.stream.Collectors; -044 -045import org.apache.commons.logging.Log; -046import org.apache.commons.logging.LogFactory; -047import org.apache.hadoop.conf.Configuration; -048import org.apache.hadoop.hbase.Abortable; -049import org.apache.hadoop.hbase.CacheEvictionStats; -050import org.apache.hadoop.hbase.CacheEvictionStatsBuilder; -051import org.apache.hadoop.hbase.ClusterStatus; -052import org.apache.hadoop.hbase.ClusterStatus.Option; -053import org.apache.hadoop.hbase.CompoundConfiguration; -054import org.apache.hadoop.hbase.DoNotRetryIOException; -055import org.apache.hadoop.hbase.HBaseConfiguration; -056import org.apache.hadoop.hbase.HColumnDescriptor; -057import org.apache.hadoop.hbase.HConstants; -058import org.apache.hadoop.hbase.HRegionInfo; -059import org.apache.hadoop.hbase.HRegionLocation; -060import org.apache.hadoop.hbase.HTableDescriptor; -061import org.apache.hadoop.hbase.MasterNotRunningException; -062import org.apache.hadoop.hbase.MetaTableAccessor; -063import org.apache.hadoop.hbase.NamespaceDescriptor; -064import org.apache.hadoop.hbase.NamespaceNotFoundException; -065import org.apache.hadoop.hbase.NotServingRegionException; -066import org.apache.hadoop.hbase.RegionLoad; -067import org.apache.hadoop.hbase.RegionLocations; -068import org.apache.hadoop.hbase.ServerName; -069import org.apache.hadoop.hbase.TableExistsException; -070import org.apache.hadoop.hbase.TableName; -071import org.apache.hadoop.hbase.TableNotDisabledException; -072import org.apache.hadoop.hbase.TableNotFoundException; -073import org.apache.hadoop.hbase.UnknownRegionException; -074import org.apache.hadoop.hbase.ZooKeeperConnectionException; -075import org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper; -076import org.apache.hadoop.hbase.client.replication.TableCFs; -077import org.apache.hadoop.hbase.client.security.SecurityCapability; -078import org.apache.hadoop.hbase.exceptions.TimeoutIOException; -079import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -080import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; -081import org.apache.hadoop.hbase.ipc.HBaseRpcController; -082import org.apache.hadoop.hbase.ipc.RpcControllerFactory; -083import org.apache.hadoop.hbase.quotas.QuotaFilter; -084import org.apache.hadoop.hbase.quotas.QuotaRetriever; -085import org.apache.hadoop.hbase.quotas.QuotaSettings; -086import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; -087import org.apache.hadoop.hbase.replication.ReplicationException; -088import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; -089import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; -090import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; -091import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException; -092import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException; -093import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; -094import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException; -095import org.apache.hadoop.hbase.util.Addressing; -096import org.apache.hadoop.hbase.util.Bytes; -097import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -098import org.apache.hadoop.hbase.util.ForeignExceptionUtil; -099import org.apache.hadoop.hbase.util.Pair; -100import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker; -101import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; -102import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; -103import org.apache.hadoop.ipc.RemoteException; -104import org.apache.hadoop.util.StringUtils; -105import org.apache.yetus.audience.InterfaceAudience; -106import org.apache.yetus.audience.InterfaceStability; -107import org.apache.zookeeper.KeeperException; +021import com.google.protobuf.Descriptors; +022import com.google.protobuf.Message; +023import com.google.protobuf.RpcController; +024 +025import java.io.Closeable; +026import java.io.IOException; +027import java.io.InterruptedIOException; +028import java.util.ArrayList; +029import java.util.Arrays; +030import java.util.Collection; +031import java.util.EnumSet; +032import java.util.HashMap; +033import java.util.Iterator; +034import java.util.LinkedList; +035import java.util.List; +036import java.util.Map; +037import java.util.Set; +038import java.util.TreeMap; +039import java.util.concurrent.Callable; +040import java.util.concurrent.ExecutionException; +041import java.util.concurrent.Future; +042import java.util.concurrent.TimeUnit; +043import java.util.concurrent.TimeoutException; +044import java.util.concurrent.atomic.AtomicInteger; +045import java.util.concurrent.atomic.AtomicReference; +046import java.util.regex.Pattern; +047import java.util.stream.Collectors; +048 +049import org.apache.commons.logging.Log; +050import org.apache.commons.logging.LogFactory; +051import org.apache.hadoop.conf.Configuration; +052import org.apache.hadoop.hbase.Abortable; +053import org.apache.hadoop.hbase.CacheEvictionStats; +054import org.apache.hadoop.hbase.CacheEvictionStatsBuilder; +055import org.apache.hadoop.hbase.ClusterStatus; +056import org.apache.hadoop.hbase.ClusterStatus.Option; +057import org.apache.hadoop.hbase.CompoundConfiguration; +058import org.apache.hadoop.hbase.DoNotRetryIOException; +059import org.apache.hadoop.hbase.HBaseConfiguration; +060import org.apache.hadoop.hbase.HColumnDescriptor; +061import org.apache.hadoop.hbase.HConstants; +062import org.apache.hadoop.hbase.HRegionInfo; +063import org.apache.hadoop.hbase.HRegionLocation; +064import org.apache.hadoop.hbase.HTableDescriptor; +065import org.apache.hadoop.hbase.MasterNotRunningException; +066import org.apache.hadoop.hbase.MetaTableAccessor; +067import org.apache.hadoop.hbase.NamespaceDescriptor; +068import org.apache.hadoop.hbase.NamespaceNotFoundException; +069import org.apache.hadoop.hbase.NotServingRegionException; +070import org.apache.hadoop.hbase.RegionLoad; +071import org.apache.hadoop.hbase.RegionLocations; +072import org.apache.hadoop.hbase.ServerName; +073import org.apache.hadoop.hbase.TableExistsException; +074import org.apache.hadoop.hbase.TableName; +075import org.apache.hadoop.hbase.TableNotDisabledException; +076import org.apache.hadoop.hbase.TableNotFoundException; +077import org.apache.hadoop.hbase.UnknownRegionException; +078import org.apache.hadoop.hbase.ZooKeeperConnectionException; +079import org.apache.hadoop.hbase.client.replication.ReplicationSerDeHelper; +080import org.apache.hadoop.hbase.client.replication.TableCFs; +081import org.apache.hadoop.hbase.client.security.SecurityCapability; +082import org.apache.hadoop.hbase.exceptions.TimeoutIOException; +083import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; +084import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; +085import org.apache.hadoop.hbase.ipc.HBaseRpcController; +086import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +087import org.apache.hadoop.hbase.quotas.QuotaFilter; +088import org.apache.hadoop.hbase.quotas.QuotaRetriever; +089import org.apache.hadoop.hbase.quotas.QuotaSettings; +090import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; +091import org.apache.hadoop.hbase.replication.ReplicationException; +092import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; +093import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; +094import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils; +095import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException; +096import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException; +097import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; +098import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException; +099import org.apache.hadoop.hbase.util.Addressing; +100import org.apache.hadoop.hbase.util.Bytes; +101import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +102import org.apache.hadoop.hbase.util.ForeignExceptionUtil; +103import org.apache.hadoop.hbase.util.Pair; +104import org.apache.hadoop.ipc.RemoteException; +105import org.apache.hadoop.util.StringUtils; +106import org.apache.yetus.audience.InterfaceAudience; +107import org.apache.yetus.audience.InterfaceStability; 108 109import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; 110import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -214,4209 +214,4109 @@ 206import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse; 207import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; 208 -209import com.google.protobuf.Descriptors; -210import com.google.protobuf.Message; -211import com.google.protobuf.RpcController; -212 -213/** -214 * HBaseAdmin is no longer a client API. It is marked InterfaceAudience.Private indicating that -215 * this is an HBase-internal class as defined in -216 * https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html -217 * There are no guarantees for backwards source / binary compatibility and methods or class can -218 * change or go away without deprecation. -219 * Use {@link Connection#getAdmin()} to obtain an instance of {@link Admin} instead of constructing -220 * an HBaseAdmin directly. -221 * -222 * <p>Connection should be an <i>unmanaged</i> connection obtained via -223 * {@link ConnectionFactory#createConnection(Configuration)} -224 * -225 * @see ConnectionFactory -226 * @see Connection -227 * @see Admin -228 */ -229@InterfaceAudience.Private -230@InterfaceStability.Evolving -231public class HBaseAdmin implements Admin { -232 private static final Log LOG = LogFactory.getLog(HBaseAdmin.class); +209/** +210 * HBaseAdmin is no longer a client API. It is marked InterfaceAudience.Private indicating that +211 * this is an HBase-internal class as defined in +212 * https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html +213 * There are no guarantees for backwards source / binary compatibility and methods or class can +214 * change or go away without deprecation. +215 * Use {@link Connection#getAdmin()} to obtain an instance of {@link Admin} instead of constructing +216 * an HBaseAdmin directly. +217 * +218 * <p>Connection should be an <i>unmanaged</i> connection obtained via +219 * {@link ConnectionFactory#createConnection(Configuration)} +220 * +221 * @see ConnectionFactory +222 * @see Connection +223 * @see Admin +224 */ +225@InterfaceAudience.Private +226@InterfaceStability.Evolving +227public class HBaseAdmin implements Admin { +228 private static final Log LOG = LogFactory.getLog(HBaseAdmin.class); +229 +230 private static final String ZK_IDENTIFIER_PREFIX = "hbase-admin-on-"; +231 +232 private ClusterConnection connection; 233 -234 private static final String ZK_IDENTIFIER_PREFIX = "hbase-admin-on-"; -235 -236 private ClusterConnection connection; -237 -238 private volatile Configuration conf; -239 private final long pause; -240 private final int numRetries; -241 private final int syncWaitTimeout; -242 private boolean aborted; -243 private int operationTimeout; -244 private int rpcTimeout; -245 -246 private RpcRetryingCallerFactory rpcCallerFactory; -247 private RpcControllerFactory rpcControllerFactory; -248 -249 private NonceGenerator ng; -250 -251 @Override -252 public int getOperationTimeout() { -253 return operationTimeout; -254 } +234 private volatile Configuration conf; +235 private final long pause; +236 private final int numRetries; +237 private final int syncWaitTimeout; +238 private boolean aborted; +239 private int operationTimeout; +240 private int rpcTimeout; +241 +242 private RpcRetryingCallerFactory rpcCallerFactory; +243 private RpcControllerFactory rpcControllerFactory; +244 +245 private NonceGenerator ng; +246 +247 @Override +248 public int getOperationTimeout() { +249 return operationTimeout; +250 } +251 +252 HBaseAdmin(ClusterConnection connection) throws IOException { +253 this.conf = connection.getConfiguration(); +254 this.connection = connection; 255 -256 HBaseAdmin(ClusterConnection connection) throws IOException { -257 this.conf = connection.getConfiguration(); -258 this.connection = connection; -259 -260 // TODO: receive ConnectionConfiguration here rather than re-parsing these configs every time. -261 this.pause = this.conf.getLong(HConstants.HBASE_CLIENT_PAUSE, -262 HConstants.DEFAULT_HBASE_CLIENT_PAUSE); -263 this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, -264 HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER); -265 this.operationTimeout = this.conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, -266 HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); -267 this.rpcTimeout = this.conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, -268 HConstants.DEFAULT_HBASE_RPC_TIMEOUT); -269 this.syncWaitTimeout = this.conf.getInt( -270 "hbase.client.sync.wait.timeout.msec", 10 * 60000); // 10min -271 -272 this.rpcCallerFactory = connection.getRpcRetryingCallerFactory(); -273 this.rpcControllerFactory = connection.getRpcControllerFactory(); -274 -275 this.ng = this.connection.getNonceGenerator(); -276 } -277 -278 @Override -279 public void abort(String why, Throwable e) { -280 // Currently does nothing but throw the passed message and exception -281 this.aborted = true; -282 throw new RuntimeException(why, e); -283 } -284 -285 @Override -286 public boolean isAborted() { -287 return this.aborted; -288 } -289 -290 @Override -291 public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning) -292 throws IOException { -293 return get(abortProcedureAsync(procId, mayInterruptIfRunning), this.syncWaitTimeout, -294 TimeUnit.MILLISECONDS); -295 } -296 -297 @Override -298 public Future<Boolean> abortProcedureAsync(final long procId, final boolean mayInterruptIfRunning) -299 throws IOException { -300 Boolean abortProcResponse = -301 executeCallable(new MasterCallable<AbortProcedureResponse>(getConnection(), -302 getRpcControllerFactory()) { -303 @Override -304 protected AbortProcedureResponse rpcCall() throws Exception { -305 AbortProcedureRequest abortProcRequest = -306 AbortProcedureRequest.newBuilder().setProcId(procId).build(); -307 return master.abortProcedure(getRpcController(), abortProcRequest); -308 } -309 }).getIsProcedureAborted(); -310 return new AbortProcedureFuture(this, procId, abortProcResponse); -311 } -312 -313 @Override -314 public List<TableDescriptor> listTableDescriptors() throws IOException { -315 return listTableDescriptors((Pattern)null, false); -316 } -317 -318 @Override -319 public List<TableDescriptor> listTableDescriptors(Pattern pattern) throws IOException { -320 return listTableDescriptors(pattern, false); -321 } -322 -323 @Override -324 public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean includeSysTables) throws IOException { -325 return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(), -326 getRpcControllerFactory()) { -327 @Override -328 protected List<TableDescriptor> rpcCall() throws Exception { -329 GetTableDescriptorsRequest req = -330 RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables); -331 return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), -332 req)); -333 } -334 }); -335 } -336 -337 @Override -338 public TableDescriptor getDescriptor(TableName tableName) throws TableNotFoundException, IOException { -339 return getTableDescriptor(tableName, getConnection(), rpcCallerFactory, rpcControllerFactory, -340 operationTimeout, rpcTimeout); -341 } -342 -343 @Override -344 public void modifyTable(TableDescriptor td) throws IOException { -345 get(modifyTableAsync(td), syncWaitTimeout, TimeUnit.MILLISECONDS); -346 } -347 -348 @Override -349 public Future<Void> modifyTableAsync(TableDescriptor td) throws IOException { -350 ModifyTableResponse response = executeCallable( -351 new MasterCallable<ModifyTableResponse>(getConnection(), getRpcControllerFactory()) { -352 @Override -353 protected ModifyTableResponse rpcCall() throws Exception { -354 setPriority(td.getTableName()); -355 ModifyTableRequest request = RequestConverter.buildModifyTableRequest( -356 td.getTableName(), td, ng.getNonceGroup(), ng.newNonce()); -357 return master.modifyTable(getRpcController(), request); -358 } -359 }); -360 return new ModifyTableFuture(this, td.getTableName(), response); -361 } -362 -363 @Override -364 public List<TableDescriptor> listTableDescriptorsByNamespace(byte[] name) throws IOException { -365 return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(), -366 getRpcControllerFactory()) { -367 @Override -368 protected List<TableDescriptor> rpcCall() throws Exception { -369 return master.listTableDescriptorsByNamespace(getRpcController(), -370 ListTableDescriptorsByNamespaceRequest.newBuilder() -371 .setNamespaceName(Bytes.toString(name)).build()) -372 .getTableSchemaList() -373 .stream() -374 .map(ProtobufUtil::toTableDescriptor) -375 .collect(Collectors.toList()); -376 } -377 }); -378 } -379 -380 @Override -381 public List<TableDescriptor> listTableDescriptors(List<TableName> tableNames) throws IOException { -382 return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(), -383 getRpcControllerFactory()) { -384 @Override -385 protected List<TableDescriptor> rpcCall() throws Exception { -386 GetTableDescriptorsRequest req = -387 RequestConverter.buildGetTableDescriptorsRequest(tableNames); -388 return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), req)); -389 } -390 }); -391 } -392 -393 @Override -394 public List<RegionInfo> getRegions(final ServerName sn) throws IOException { -395 AdminService.BlockingInterface admin = this.connection.getAdmin(sn); -396 // TODO: There is no timeout on this controller. Set one! -397 HBaseRpcController controller = rpcControllerFactory.newController(); -398 return ProtobufUtil.getOnlineRegions(controller, admin); -399 } -400 -401 @Override -402 public List<RegionInfo> getRegions(final TableName tableName) throws IOException { -403 ZooKeeperWatcher zookeeper = -404 new ZooKeeperWatcher(conf, ZK_IDENTIFIER_PREFIX + connection.toString(), -405 new ThrowableAbortable()); -406 try { -407 if (TableName.META_TABLE_NAME.equals(tableName)) { -408 return new MetaTableLocator().getMetaRegions(zookeeper); -409 } else { -410 return MetaTableAccessor.getTableRegions(connection, tableName, true); -411 } -412 } finally { -413 zookeeper.close(); -414 } -415 } +256 // TODO: receive ConnectionConfiguration here rather than re-parsing these configs every time. +257 this.pause = this.conf.getLong(HConstants.HBASE_CLIENT_PAUSE, +258 HConstants.DEFAULT_HBASE_CLIENT_PAUSE); +259 this.numRetries = this.conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, +260 HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER); +261 this.operationTimeout = this.conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, +262 HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); +263 this.rpcTimeout = this.conf.getInt(HConstants.HBASE_RPC_TIMEOUT_KEY, +264 HConstants.DEFAULT_HBASE_RPC_TIMEOUT); +265 this.syncWaitTimeout = this.conf.getInt( +266 "hbase.client.sync.wait.timeout.msec", 10 * 60000); // 10min +267 +268 this.rpcCallerFactory = connection.getRpcRetryingCallerFactory(); +269 this.rpcControllerFactory = connection.getRpcControllerFactory(); +270 +271 this.ng = this.connection.getNonceGenerator(); +272 } +273 +274 @Override +275 public void abort(String why, Throwable e) { +276 // Currently does nothing but throw the passed message and exception +277 this.aborted = true; +278 throw new RuntimeException(why, e); +279 } +280 +281 @Override +282 public boolean isAborted() { +283 return this.aborted; +284 } +285 +286 @Override +287 public boolean abortProcedure(final long procId, final boolean mayInterruptIfRunning) +288 throws IOException { +289 return get(abortProcedureAsync(procId, mayInterruptIfRunning), this.syncWaitTimeout, +290 TimeUnit.MILLISECONDS); +291 } +292 +293 @Override +294 public Future<Boolean> abortProcedureAsync(final long procId, final boolean mayInterruptIfRunning) +295 throws IOException { +296 Boolean abortProcResponse = +297 executeCallable(new MasterCallable<AbortProcedureResponse>(getConnection(), +298 getRpcControllerFactory()) { +299 @Override +300 protected AbortProcedureResponse rpcCall() throws Exception { +301 AbortProcedureRequest abortProcRequest = +302 AbortProcedureRequest.newBuilder().setProcId(procId).build(); +303 return master.abortProcedure(getRpcController(), abortProcRequest); +304 } +305 }).getIsProcedureAborted(); +306 return new AbortProcedureFuture(this, procId, abortProcResponse); +307 } +308 +309 @Override +310 public List<TableDescriptor> listTableDescriptors() throws IOException { +311 return listTableDescriptors((Pattern)null, false); +312 } +313 +314 @Override +315 public List<TableDescriptor> listTableDescriptors(Pattern pattern) throws IOException { +316 return listTableDescriptors(pattern, false); +317 } +318 +319 @Override +320 public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean includeSysTables) throws IOException { +321 return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(), +322 getRpcControllerFactory()) { +323 @Override +324 protected List<TableDescriptor> rpcCall() throws Exception { +325 GetTableDescriptorsRequest req = +326 RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables); +327 return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), +328 req)); +329 } +330 }); +331 } +332 +333 @Override +334 public TableDescriptor getDescriptor(TableName tableName) throws TableNotFoundException, IOException { +335 return getTableDescriptor(tableName, getConnection(), rpcCallerFactory, rpcControllerFactory, +336 operationTimeout, rpcTimeout); +337 } +338 +339 @Override +340 public void modifyTable(TableDescriptor td) throws IOException { +341 get(modifyTableAsync(td), syncWaitTimeout, TimeUnit.MILLISECONDS); +342 } +343 +344 @Override +345 public Future<Void> modifyTableAsync(TableDescriptor td) throws IOException { +346 ModifyTableResponse response = executeCallable( +347 new MasterCallable<ModifyTableResponse>(getConnection(), getRpcControllerFactory()) { +348 @Override +349 protected ModifyTableResponse rpcCall() throws Exception { +350 setPriority(td.getTableName()); +351 ModifyTableRequest request = RequestConverter.buildModifyTableRequest( +352 td.getTableName(), td, ng.getNonceGroup(), ng.newNonce()); +353 return master.modifyTable(getRpcController(), request); +354 } +355 }); +356 return new ModifyTableFuture(this, td.getTableName(), response); +357 } +358 +359 @Override +360 public List<TableDescriptor> listTableDescriptorsByNamespace(byte[] name) throws IOException { +361 return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(), +362 getRpcControllerFactory()) { +363 @Override +364 protected List<TableDescriptor> rpcCall() throws Exception { +365 return master.listTableDescriptorsByNamespace(getRpcController(), +366 ListTableDescriptorsByNamespaceRequest.newBuilder() +367 .setNamespaceName(Bytes.toString(name)).build()) +368 .getTableSchemaList() +369 .stream() +370 .map(ProtobufUtil::toTableDescriptor) +371 .collect(Collectors.toList()); +372 } +373 }); +374 } +375 +376 @Override +377 public List<TableDescriptor> listTableDescriptors(List<TableName> tableNames) throws IOException { +378 return executeCallable(new MasterCallable<List<TableDescriptor>>(getConnection(), +379 getRpcControllerFactory()) { +380 @Override +381 protected List<TableDescriptor> rpcCall() throws Exception { +382 GetTableDescriptorsRequest req = +383 RequestConverter.buildGetTableDescriptorsRequest(tableNames); +384 return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), req)); +385 } +386 }); +387 } +388 +389 @Override +390 public List<RegionInfo> getRegions(final ServerName sn) throws IOException { +391 AdminService.BlockingInterface admin = this.connection.getAdmin(sn); +392 // TODO: There is no timeout on this controller. Set one! +393 HBaseRpcController controller = rpcControllerFactory.newController(); +394 return ProtobufUtil.getOnlineRegions(controller, admin); +395 } +396 +397 @Override +398 public List<RegionInfo> getRegions(TableName tableName) throws IOException { +399 if (TableName.isMetaTableName(tableName)) { +400 return Arrays.asList(RegionInfoBuilder.FIRST_META_REGIONINFO); +401 } else { +402 return MetaTableAccessor.getTableRegions(connection, tableName, true); +403 } +404 } +405 +406 private static class AbortProcedureFuture extends ProcedureFuture<Boolean> { +407 private boolean isAbortInProgress; +408 +409 public AbortProcedureFuture( +410 final HBaseAdmin admin, +411 final Long procId, +412 final Boolean abortProcResponse) { +413 super(admin, procId); +414 this.isAbortInProgress = abortProcResponse; +415 } 416 -417 private static class AbortProcedureFuture extends ProcedureFuture<Boolean> { -418 private boolean isAbortInProgress; -419 -420 public AbortProcedureFuture( -421 final HBaseAdmin admin, -422 final Long procId, -423 final Boolean abortProcResponse) { -424 super(admin, procId); -425 this.isAbortInProgress = abortProcResponse; -426 } +417 @Override +418 public Boolean get(long timeout, TimeUnit unit) +419 throws InterruptedException, ExecutionException, TimeoutException { +420 if (!this.isAbortInProgress) { +421 return false; +422 } +423 super.get(timeout, unit); +424 return true; +425 } +426 } 427 -428 @Override -429 public Boolean get(long timeout, TimeUnit unit) -430 throws InterruptedException, ExecutionException, TimeoutException { -431 if (!this.isAbortInProgress) { -432 return false; -433 } -434 super.get(timeout, unit); -435 return true; -436 } -437 } -438 -439 /** @return Connection used by this object. */ -440 @Override -441 public Connection getConnection() { -442 return connection; -443 } -444 -445 @Override -446 public boolean tableExists(final TableName tableName) throws IOException { -447 return executeCallable(new RpcRetryingCallable<Boolean>() { -448 @Override -449 protected Boolean rpcCall(int callTimeout) throws Exception { -450 return MetaTableAccessor.tableExists(connection, tableName); -451 } -452 }); -453 } -454 -455 @Override -456 public HTableDescriptor[] listTables() throws IOException { -457 return listTables((Pattern)null, false); -458 } -459 -460 @Override -461 public HTableDescriptor[] listTables(Pattern pattern) throws IOException { -462 return listTables(pattern, false); -463 } -464 -465 @Override -466 public HTableDescriptor[] listTables(String regex) throws IOException { -467 return listTables(Pattern.compile(regex), false); -468 } -469 -470 @Override -471 public HTableDescriptor[] listTables(final Pattern pattern, final boolean includeSysTables) -472 throws IOException { -473 return executeCallable(new MasterCallable<HTableDescriptor[]>(getConnection(), -474 getRpcControllerFactory()) { -475 @Override -476 protected HTableDescriptor[] rpcCall() throws Exception { -477 GetTableDescriptorsRequest req = -478 RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables); -479 return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), -480 req)).stream().map(ImmutableHTableDescriptor::new).toArray(HTableDescriptor[]::new); -481 } -482 }); +428 /** @return Connection used by this object. */ +429 @Override +430 public Connection getConnection() { +431 return connection; +432 } +433 +434 @Override +435 public boolean tableExists(final TableName tableName) throws IOException { +436 return executeCallable(new RpcRetryingCallable<Boolean>() { +437 @Override +438 protected Boolean rpcCall(int callTimeout) throws Exception { +439 return MetaTableAccessor.tableExists(connection, tableName); +440 } +441 }); +442 } +443 +444 @Override +445 public HTableDescriptor[] listTables() throws IOException { +446 return listTables((Pattern)null, false); +447 } +448 +449 @Override +450 public HTableDescriptor[] listTables(Pattern pattern) throws IOException { +451 return listTables(pattern, false); +452 } +453 +454 @Override +455 public HTableDescriptor[] listTables(String regex) throws IOException { +456 return listTables(Pattern.compile(regex), false); +457 } +458 +459 @Override +460 public HTableDescriptor[] listTables(final Pattern pattern, final boolean includeSysTables) +461 throws IOException { +462 return executeCallable(new MasterCallable<HTableDescriptor[]>(getConnection(), +463 getRpcControllerFactory()) { +464 @Override +465 protected HTableDescriptor[] rpcCall() throws Exception { +466 GetTableDescriptorsRequest req = +467 RequestConverter.buildGetTableDescriptorsRequest(pattern, includeSysTables); +468 return ProtobufUtil.toTableDescriptorList(master.getTableDescriptors(getRpcController(), +469 req)).stream().map(ImmutableHTableDescriptor::new).toArray(HTableDescriptor[]::new); +470 } +471 }); +472 } +473 +474 @Override +475 public HTableDescriptor[] listTables(String regex, boolean includeSysTables) +476 throws IOException { +477 return listTables(Pattern.compile(regex), includeSysTables); +478 } +479 +480 @Override +481 public TableName[] listTableNames() throws IOException { +482 return listTableNames((Pattern)null, false); 483 } 484 485 @Override -486 public HTableDescriptor[] listTables(String regex, boolean includeSysTables) -487 throws IOException { -488 return listTables(Pattern.compile(regex), includeSysTables); -489 } -490 -491 @Override -492 public TableName[] listTableNames() throws IOException { -493 return listTableNames((Pattern)null, false); -494 } -495 -496 @Override -497 public TableName[] listTableNames(Pattern pattern) throws IOException { -498 return listTableNames(pattern, false); -499 } -500 -501 @Override -502 public TableName[] listTableNames(String regex) throws IOException { -503 return listTableNames(Pattern.compile(regex), false); -504 } -505 -506 @Override -507 public TableName[] listTableNames(final Pattern pattern, final boolean includeSysTables) -508 throws IOException { -509 return executeCallable(new MasterCallable<TableName[]>(getConnection(), -510 getRpcControllerFactory()) { -511 @Override -512 protected TableName[] rpcCall() throws Exception { -513 GetTableNamesRequest req = -514 RequestConverter.buildGetTableNamesRequest(pattern, includeSysTables); -515 return ProtobufUtil.getTableNameArray(master.getTableNames(getRpcController(), req) -516 .getTableNamesList()); -517 } -518 }); -519 } -520 -521 @Override -522 public TableName[] listTableNames(final String regex, final boolean includeSysTables) -523 throws IOException { -524 return listTableNames(Pattern.compile(regex), includeSysTables); -525 } -526 -527 @Override -528 public HTableDescriptor getTableDescriptor(final TableName tableName) throws IOException { -529 return getHTableDescriptor(tableName, getConnection(), rpcCallerFactory, rpcControllerFactory, -530 operationTimeout, rpcTimeout); -531 } -532 -533 static TableDescriptor getTableDescriptor(final TableName tableName, Connection connection, -534 RpcRetryingCallerFactory rpcCallerFactory, final RpcControllerFactory rpcControllerFactory, -535 int operationTimeout, int rpcTimeout) throws IOException { -536 if (tableName == null) return null; -537 TableDescriptor td = -538 executeCallable(new MasterCallable<TableDescriptor>(connection, rpcControllerFactory) { -539 @Override -540 protected TableDescriptor rpcCall() throws Exception { -541 GetTableDescriptorsRequest req = -542 RequestConverter.buildGetTableDescriptorsRequest(tableName); -543 GetTableDescriptorsResponse htds = master.getTableDescriptors(getRpcController(), req); -544 if (!htds.getTableSchemaList().isEmpty()) { -545 return ProtobufUtil.toTableDescriptor(htds.getTableSchemaList().get(0)); -546 } -547 return null; -548 } -549 }, rpcCallerFactory, operationTimeout, rpcTimeout); -550 if (td != null) { -551 return td; -552 } -553 throw new TableNotFoundException(tableName.getNameAsString()); -554 } -555 -556 /** -557 * @deprecated since 2.0 version and will be removed in 3.0 version. -558 * use {@link #getTableDescriptor(TableName, -559 * Connection, RpcRetryingCallerFactory,RpcControllerFactory,int,int)} -560 */ -561 @Deprecated -562 static HTableDescriptor getHTableDescriptor(final TableName tableName, Connection connection, -563 RpcRetryingCallerFactory rpcCallerFactory, final RpcControllerFactory rpcControllerFactory, -564 int operationTimeout, int rpcTimeout) throws IOException { -565 if (tableName == null) return null; -566 HTableDescriptor htd = -567 executeCallable(new MasterCallable<HTableDescriptor>(connection, rpcControllerFactory) { -568 @Override -569 protected HTableDescriptor rpcCall() throws Exception { -570 GetTableDescriptorsRequest req = -571 RequestConverter.buildGetTableDescriptorsRequest(tableName); -572 GetTableDescriptorsResponse htds = master.getTableDescriptors(getRpcController(), req); -573 if (!htds.getTableSchemaList().isEmpty()) { -574 return new ImmutableHTableDescriptor(ProtobufUtil.toTableDescriptor(htds.getTableSchemaList().get(0))); -575 } -576 return null; -577 } -578 }, rpcCallerFactory, operationTimeout, rpcTimeout); -579 if (htd != null) { -580 return new ImmutableHTableDescriptor(htd); -581 } -582 throw new TableNotFoundException(tableName.getNameAsString()); -583 } -584 -585 private long getPauseTime(int tries) { -586 int triesCount = tries; -587 if (triesCount >= HConstants.RETRY_BACKOFF.length) { -588 triesCount = HConstants.RETRY_BACKOFF.length - 1; -589 } -590 return this.pause * HConstants.RETRY_BACKOFF[triesCount]; -591 } -592 -593 @Override -594 public void createTable(TableDescriptor desc) -595 throws IOException { -596 createTable(desc, null); -597 } -598 -599 @Override -600 public void createTable(TableDescriptor desc, byte [] startKey, -601 byte [] endKey, int numRegions) -602 throws IOException { -603 if(numRegions < 3) { -604 throw new IllegalArgumentException("Must create at least three regions"); -605 } else if(Bytes.compareTo(startKey, endKey) >= 0) { -606 throw new IllegalArgumentException("Start key must be smaller than end key"); -607 } -608 if (numRegions == 3) { -609 createTable(desc, new byte[][]{startKey, endKey}); -610 return; -611 } -612 byte [][] splitKeys = Bytes.split(startKey, endKey, numRegions - 3); -613 if(splitKeys == null || splitKeys.length != numRegions - 1) { -614 throw new IllegalArgumentException("Unable to split key range into enough regions"); -615 } -616 createTable(desc, splitKeys); -617 } -618 -619 @Override -620 public void createTable(final TableDescriptor desc, byte [][] splitKeys) -621 throws IOException { -622 get(createTableAsync(desc, splitKeys), syncWaitTimeout, TimeUnit.MILLISECONDS); -623 } -624 -625 @Override -626 public Future<Void> createTableAsync(final TableDescriptor desc, final byte[][] splitKeys) -627 throws IOException { -628 if (desc.getTableName() == null) { -629 throw new IllegalArgumentException("TableName cannot be null"); -630 } -631 if (splitKeys != null && splitKeys.length > 0) { -632 Arrays.sort(splitKeys, Bytes.BYTES_COMPARATOR); -633 // Verify there are no duplicate split keys -634 byte[] lastKey = null; -635 for (byte[] splitKey : splitKeys) { -636 if (Bytes.compareTo(splitKey, HConstants.EMPTY_BYTE_ARRAY) == 0) {