Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 2E7D7200D04 for ; Mon, 11 Sep 2017 09:42:36 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 2D0201609CA; Mon, 11 Sep 2017 07:42:36 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id EF62E1609C7 for ; Mon, 11 Sep 2017 09:42:33 +0200 (CEST) Received: (qmail 1380 invoked by uid 500); 11 Sep 2017 07:42:32 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 97640 invoked by uid 99); 11 Sep 2017 07:42:30 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 11 Sep 2017 07:42:30 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id D2837F5749; Mon, 11 Sep 2017 07:42:28 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: busbey@apache.org To: commits@hbase.apache.org Date: Mon, 11 Sep 2017 07:42:53 -0000 Message-Id: In-Reply-To: <9a073cac3035449e87c647186a7253db@git.apache.org> References: <9a073cac3035449e87c647186a7253db@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [26/50] [abbrv] hbase git commit: HBASE-18106 Redo ProcedureInfo and LockInfo archived-at: Mon, 11 Sep 2017 07:42:36 -0000 HBASE-18106 Redo ProcedureInfo and LockInfo Main changes: - ProcedureInfo and LockInfo were removed, we use JSON instead of them - Procedure and LockedResource are their server side equivalent - Procedure protobuf state_data became obsolate, it is only kept for reading previously written WAL - Procedure protobuf contains a state_message field, which stores the internal state messages (Any type instead of bytes) - Procedure.serializeStateData and deserializeStateData were changed slightly - Procedures internal states are available on client side - Procedures are displayed on web UI and in shell in the following jruby format: { ID => '1', PARENT_ID = '-1', PARAMETERS => [ ..extra state information.. ] } Signed-off-by: Michael Stack Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/359fed7b Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/359fed7b Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/359fed7b Branch: refs/heads/HBASE-18467 Commit: 359fed7b4b62e4dc7818a211735630cf0e10fc14 Parents: 87c3ae9 Author: Balazs Meszaros Authored: Wed Sep 6 15:21:32 2017 +0200 Committer: Michael Stack Committed: Fri Sep 8 10:24:04 2017 -0700 ---------------------------------------------------------------------- .../org/apache/hadoop/hbase/client/Admin.java | 14 +- .../apache/hadoop/hbase/client/AsyncAdmin.java | 12 +- .../hadoop/hbase/client/AsyncHBaseAdmin.java | 10 +- .../hbase/client/ConnectionImplementation.java | 12 +- .../apache/hadoop/hbase/client/HBaseAdmin.java | 48 +- .../hadoop/hbase/client/RawAsyncHBaseAdmin.java | 92 ++-- .../client/ShortCircuitMasterConnection.java | 139 +++++- .../protobuf/ProtobufMessageConverter.java | 159 +++++++ .../hbase/shaded/protobuf/ProtobufUtil.java | 197 +------- .../hbase/shaded/protobuf/TestProtobufUtil.java | 466 +++++++++++++++++++ .../org/apache/hadoop/hbase/ProcedureInfo.java | 215 --------- .../hadoop/hbase/procedure2/LockInfo.java | 128 ----- .../apache/hadoop/hbase/util/JRubyFormat.java | 151 ++++++ .../hadoop/hbase/util/TestJRubyFormat.java | 54 +++ .../hadoop/hbase/procedure2/LockType.java | 26 ++ .../hadoop/hbase/procedure2/LockedResource.java | 69 +++ .../hbase/procedure2/LockedResourceType.java | 26 ++ .../hadoop/hbase/procedure2/Procedure.java | 12 +- .../hbase/procedure2/ProcedureExecutor.java | 42 +- .../procedure2/ProcedureInMemoryChore.java | 12 +- .../hbase/procedure2/ProcedureScheduler.java | 8 +- .../procedure2/ProcedureStateSerializer.java | 28 ++ .../hadoop/hbase/procedure2/ProcedureUtil.java | 194 ++++---- .../hbase/procedure2/SequentialProcedure.java | 13 +- .../procedure2/SimpleProcedureScheduler.java | 5 +- .../hbase/procedure2/StateMachineProcedure.java | 12 +- .../procedure2/ProcedureTestingUtility.java | 33 +- .../hbase/procedure2/TestProcedureEvents.java | 26 +- .../hbase/procedure2/TestProcedureRecovery.java | 21 +- .../procedure2/TestProcedureReplayOrder.java | 16 +- .../procedure2/TestProcedureSuspended.java | 8 +- .../hbase/procedure2/TestProcedureToString.java | 9 +- .../hbase/procedure2/TestProcedureUtil.java | 27 +- .../hbase/procedure2/TestYieldProcedures.java | 9 +- .../store/wal/TestWALProcedureStore.java | 21 +- .../src/main/protobuf/LockService.proto | 21 +- .../src/main/protobuf/Master.proto | 18 +- .../src/main/protobuf/Procedure.proto | 4 +- .../hbase/rsgroup/RSGroupAdminServer.java | 11 +- .../hbase/coprocessor/MasterObserver.java | 33 +- .../master/ExpiredMobFileCleanerChore.java | 4 +- .../org/apache/hadoop/hbase/master/HMaster.java | 30 +- .../hbase/master/MasterCoprocessorHost.java | 25 +- .../hbase/master/MasterMobCompactionThread.java | 4 +- .../hadoop/hbase/master/MasterRpcServices.java | 156 ++++++- .../hadoop/hbase/master/MasterServices.java | 12 +- .../hadoop/hbase/master/MobCompactionChore.java | 4 +- .../master/assignment/AssignProcedure.java | 14 +- .../assignment/GCMergedRegionsProcedure.java | 18 +- .../master/assignment/GCRegionProcedure.java | 18 +- .../assignment/MergeTableRegionsProcedure.java | 24 +- .../master/assignment/MoveRegionProcedure.java | 18 +- .../assignment/SplitTableRegionProcedure.java | 19 +- .../master/assignment/UnassignProcedure.java | 18 +- .../hbase/master/locking/LockManager.java | 33 +- .../hbase/master/locking/LockProcedure.java | 17 +- .../AbstractStateMachineRegionProcedure.java | 21 +- .../procedure/AddColumnFamilyProcedure.java | 17 +- .../procedure/CloneSnapshotProcedure.java | 17 +- .../procedure/CreateNamespaceProcedure.java | 18 +- .../master/procedure/CreateTableProcedure.java | 17 +- .../procedure/DeleteColumnFamilyProcedure.java | 17 +- .../procedure/DeleteNamespaceProcedure.java | 18 +- .../master/procedure/DeleteTableProcedure.java | 17 +- .../master/procedure/DisableTableProcedure.java | 18 +- .../master/procedure/EnableTableProcedure.java | 18 +- .../procedure/MasterProcedureScheduler.java | 111 ++--- .../procedure/ModifyColumnFamilyProcedure.java | 18 +- .../procedure/ModifyNamespaceProcedure.java | 18 +- .../master/procedure/ModifyTableProcedure.java | 17 +- .../master/procedure/ProcedureDescriber.java | 83 ++++ .../master/procedure/RecoverMetaProcedure.java | 18 +- .../procedure/RestoreSnapshotProcedure.java | 17 +- .../master/procedure/ServerCrashProcedure.java | 18 +- .../procedure/TruncateTableProcedure.java | 17 +- .../master/snapshot/TakeSnapshotHandler.java | 11 +- .../hbase/security/access/AccessController.java | 32 +- .../hbase-webapps/master/procedures.jsp | 43 +- .../apache/hadoop/hbase/client/TestAdmin2.java | 14 +- .../client/TestAsyncProcedureAdminApi.java | 25 +- .../hbase/coprocessor/TestMasterObserver.java | 75 +-- .../hbase/master/MockNoopMasterServices.java | 9 +- .../hbase/master/locking/TestLockManager.java | 25 +- .../hbase/master/locking/TestLockProcedure.java | 50 +- .../procedure/TestMasterProcedureScheduler.java | 141 +++--- .../master/procedure/TestProcedureAdmin.java | 12 +- .../hbase/procedure/TestProcedureDescriber.java | 83 ++++ .../hadoop/hbase/protobuf/TestProtobufUtil.java | 41 -- .../security/access/TestAccessController.java | 50 +- .../hbase/shaded/protobuf/TestProtobufUtil.java | 460 ------------------ hbase-shell/src/main/ruby/hbase/admin.rb | 4 +- .../src/main/ruby/shell/commands/list_locks.rb | 31 +- .../main/ruby/shell/commands/list_procedures.rb | 14 +- .../hbase/client/TestReplicationShell.java | 2 +- .../client/procedure/ShellTestProcedure.java | 87 ++++ .../src/test/ruby/shell/list_locks_test.rb | 298 +++++++----- .../src/test/ruby/shell/list_procedures_test.rb | 68 +++ 97 files changed, 2754 insertions(+), 2131 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index b19c107..26384c9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; -import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableExistsException; @@ -46,7 +45,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.replication.TableCFs; import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import org.apache.hadoop.hbase.procedure2.LockInfo; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaRetriever; import org.apache.hadoop.hbase.quotas.QuotaSettings; @@ -1454,19 +1452,19 @@ public interface Admin extends Abortable, Closeable { final boolean mayInterruptIfRunning) throws IOException; /** - * List procedures - * @return procedure list + * Get procedures. + * @return procedure list in JSON * @throws IOException */ - ProcedureInfo[] listProcedures() + String getProcedures() throws IOException; /** - * List locks. - * @return lock list + * Get locks. + * @return lock list in JSON * @throws IOException if a remote or network exception occurs */ - LockInfo[] listLocks() + String getLocks() throws IOException; /** http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java index bdd047d..923a573 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java @@ -29,13 +29,11 @@ import java.util.regex.Pattern; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus.Options; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.procedure2.LockInfo; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaSettings; import org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable; @@ -804,15 +802,15 @@ public interface AsyncAdmin { /** * List procedures - * @return procedure list wrapped by {@link CompletableFuture} + * @return procedure list JSON wrapped by {@link CompletableFuture} */ - CompletableFuture> listProcedures(); + CompletableFuture getProcedures(); /** - * List procedure locks. - * @return lock list wrapped by {@link CompletableFuture} + * List locks. + * @return lock list JSON wrapped by {@link CompletableFuture} */ - CompletableFuture> listProcedureLocks(); + CompletableFuture getLocks(); /** * Mark a region server as draining to prevent additional regions from getting assigned to it. http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java index df077fc..b0b6ba6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java @@ -38,7 +38,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus.Options; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.NamespaceDescriptor; @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable; import org.apache.hadoop.hbase.client.replication.TableCFs; import org.apache.hadoop.hbase.client.security.SecurityCapability; -import org.apache.hadoop.hbase.procedure2.LockInfo; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaSettings; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; @@ -469,13 +467,13 @@ public class AsyncHBaseAdmin implements AsyncAdmin { } @Override - public CompletableFuture> listProcedures() { - return wrap(rawAdmin.listProcedures()); + public CompletableFuture getProcedures() { + return wrap(rawAdmin.getProcedures()); } @Override - public CompletableFuture> listProcedureLocks() { - return wrap(rawAdmin.listProcedureLocks()); + public CompletableFuture getLocks() { + return wrap(rawAdmin.getLocks()); } @Override http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index fcd7c22..b274371 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -1294,17 +1294,17 @@ class ConnectionImplementation implements ClusterConnection, Closeable { } @Override - public MasterProtos.ListProceduresResponse listProcedures( + public MasterProtos.GetProceduresResponse getProcedures( RpcController controller, - MasterProtos.ListProceduresRequest request) throws ServiceException { - return stub.listProcedures(controller, request); + MasterProtos.GetProceduresRequest request) throws ServiceException { + return stub.getProcedures(controller, request); } @Override - public MasterProtos.ListLocksResponse listLocks( + public MasterProtos.GetLocksResponse getLocks( RpcController controller, - MasterProtos.ListLocksRequest request) throws ServiceException { - return stub.listLocks(controller, request); + MasterProtos.GetLocksRequest request) throws ServiceException { + return stub.getLocks(controller, request); } @Override http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index c699676..5866439 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -59,7 +59,6 @@ import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.NotServingRegionException; -import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.ServerName; @@ -79,7 +78,6 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; -import org.apache.hadoop.hbase.procedure2.LockInfo; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaRetriever; import org.apache.hadoop.hbase.quotas.QuotaSettings; @@ -108,7 +106,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema; -import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse; @@ -134,9 +131,13 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProced import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; @@ -149,10 +150,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedur import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest; @@ -180,7 +178,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMaster import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos; @@ -2216,40 +2213,27 @@ public class HBaseAdmin implements Admin { } @Override - public ProcedureInfo[] listProcedures() throws IOException { - return executeCallable(new MasterCallable(getConnection(), + public String getProcedures() throws IOException { + return executeCallable(new MasterCallable(getConnection(), getRpcControllerFactory()) { @Override - protected ProcedureInfo[] rpcCall() throws Exception { - List procList = master.listProcedures( - getRpcController(), ListProceduresRequest.newBuilder().build()).getProcedureList(); - ProcedureInfo[] procInfoList = new ProcedureInfo[procList.size()]; - for (int i = 0; i < procList.size(); i++) { - procInfoList[i] = ProtobufUtil.toProcedureInfo(procList.get(i)); - } - return procInfoList; + protected String rpcCall() throws Exception { + GetProceduresRequest request = GetProceduresRequest.newBuilder().build(); + GetProceduresResponse response = master.getProcedures(getRpcController(), request); + return ProtobufUtil.toProcedureJson(response.getProcedureList()); } }); } @Override - public LockInfo[] listLocks() throws IOException { - return executeCallable(new MasterCallable(getConnection(), + public String getLocks() throws IOException { + return executeCallable(new MasterCallable(getConnection(), getRpcControllerFactory()) { @Override - protected LockInfo[] rpcCall() throws Exception { - ListLocksRequest request = ListLocksRequest.newBuilder().build(); - ListLocksResponse response = master.listLocks(getRpcController(), request); - List locksProto = response.getLockList(); - - LockInfo[] locks = new LockInfo[locksProto.size()]; - - for (int i = 0; i < locks.length; i++) { - LockServiceProtos.LockInfo lockProto = locksProto.get(i); - locks[i] = ProtobufUtil.toLockInfo(lockProto); - } - - return locks; + protected String rpcCall() throws Exception { + GetLocksRequest request = GetLocksRequest.newBuilder().build(); + GetLocksResponse response = master.getLocks(getRpcController(), request); + return ProtobufUtil.toLockJson(response.getLockList()); } }); } http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java index c5eb168..47ca32f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java @@ -37,38 +37,29 @@ import java.util.function.BiConsumer; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; - -import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Message; -import com.google.protobuf.RpcChannel; - -import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout; -import org.apache.hadoop.hbase.shaded.io.netty.util.TimerTask; - import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.AsyncMetaTableAccessor; import org.apache.hadoop.hbase.ClusterStatus; +import org.apache.hadoop.hbase.ClusterStatus.Options; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.MetaTableAccessor.QueryType; -import org.apache.hadoop.hbase.ProcedureInfo; +import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.RegionLoad; import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.AsyncMetaTableAccessor; import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.UnknownRegionException; -import org.apache.hadoop.hbase.ClusterStatus.Options; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder; import org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder; @@ -80,7 +71,6 @@ import org.apache.hadoop.hbase.client.replication.TableCFs; import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.ipc.HBaseRpcController; -import org.apache.hadoop.hbase.procedure2.LockInfo; import org.apache.hadoop.hbase.quotas.QuotaFilter; import org.apache.hadoop.hbase.quotas.QuotaSettings; import org.apache.hadoop.hbase.quotas.QuotaTableUtil; @@ -88,13 +78,13 @@ import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.io.netty.util.Timeout; +import org.apache.hadoop.hbase.shaded.io.netty.util.TimerTask; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest; @@ -112,8 +102,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerR import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription; -import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; +import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest; @@ -124,40 +114,44 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceReq import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest; @@ -176,12 +170,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrM import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse; @@ -253,6 +243,10 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ForeignExceptionUtil; import org.apache.hadoop.hbase.util.Pair; +import com.google.common.annotations.VisibleForTesting; +import com.google.protobuf.Message; +import com.google.protobuf.RpcChannel; + /** * The implementation of AsyncAdmin. * @since 2.0.0 @@ -1403,6 +1397,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { return future; } + @Override public CompletableFuture addReplicationPeer(String peerId, ReplicationPeerConfig peerConfig) { return this @@ -1448,6 +1443,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { .call(); } + @Override public CompletableFuture getReplicationPeerConfig(String peerId) { return this . newMasterCaller() @@ -1976,27 +1972,26 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { } @Override - public CompletableFuture> listProcedures() { + public CompletableFuture getProcedures() { return this - .> newMasterCaller() + . newMasterCaller() .action( (controller, stub) -> this - .> call( - controller, stub, ListProceduresRequest.newBuilder().build(), - (s, c, req, done) -> s.listProcedures(c, req, done), - resp -> resp.getProcedureList().stream().map(ProtobufUtil::toProcedureInfo) - .collect(Collectors.toList()))).call(); + . call( + controller, stub, GetProceduresRequest.newBuilder().build(), + (s, c, req, done) -> s.getProcedures(c, req, done), + resp -> ProtobufUtil.toProcedureJson(resp.getProcedureList()))).call(); } @Override - public CompletableFuture> listProcedureLocks() { + public CompletableFuture getLocks() { return this - .> newMasterCaller() + . newMasterCaller() .action( - (controller, stub) -> this.> call( - controller, stub, ListLocksRequest.newBuilder().build(), - (s, c, req, done) -> s.listLocks(c, req, done), resp -> resp.getLockList().stream() - .map(ProtobufUtil::toLockInfo).collect(Collectors.toList()))).call(); + (controller, stub) -> this. call( + controller, stub, GetLocksRequest.newBuilder().build(), + (s, c, req, done) -> s.getLocks(c, req, done), + resp -> ProtobufUtil.toLockJson(resp.getLockList()))).call(); } @Override @@ -2220,6 +2215,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "CREATE"; } @@ -2231,6 +2227,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "DELETE"; } @@ -2248,6 +2245,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "TRUNCATE"; } @@ -2259,6 +2257,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "ENABLE"; } @@ -2270,6 +2269,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "DISABLE"; } @@ -2281,6 +2281,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "ADD_COLUMN_FAMILY"; } @@ -2292,6 +2293,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "DELETE_COLUMN_FAMILY"; } @@ -2303,6 +2305,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "MODIFY_COLUMN_FAMILY"; } @@ -2314,6 +2317,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, namespaceName); } + @Override String getOperationType() { return "CREATE_NAMESPACE"; } @@ -2325,6 +2329,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, namespaceName); } + @Override String getOperationType() { return "DELETE_NAMESPACE"; } @@ -2336,6 +2341,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, namespaceName); } + @Override String getOperationType() { return "MODIFY_NAMESPACE"; } @@ -2347,6 +2353,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "MERGE_REGIONS"; } @@ -2358,6 +2365,7 @@ public class RawAsyncHBaseAdmin implements AsyncAdmin { super(admin, tableName); } + @Override String getOperationType() { return "SPLIT_REGION"; } http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java index a8050d4..17b21cf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java @@ -22,7 +22,132 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse; -import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.*; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetLocksResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProceduresResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest; @@ -189,15 +314,15 @@ public class ShortCircuitMasterConnection implements MasterKeepAliveConnection { } @Override - public ListProceduresResponse listProcedures(RpcController controller, - ListProceduresRequest request) throws ServiceException { - return stub.listProcedures(controller, request); + public GetProceduresResponse getProcedures(RpcController controller, + GetProceduresRequest request) throws ServiceException { + return stub.getProcedures(controller, request); } @Override - public ListLocksResponse listLocks(RpcController controller, - ListLocksRequest request) throws ServiceException { - return stub.listLocks(controller, request); + public GetLocksResponse getLocks(RpcController controller, + GetLocksRequest request) throws ServiceException { + return stub.getLocks(controller, request); } @Override http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMessageConverter.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMessageConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMessageConverter.java new file mode 100644 index 0000000..4ad78f5 --- /dev/null +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMessageConverter.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.protobuf; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonArray; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonElement; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonObject; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonParser; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonPrimitive; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.util.JsonFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.util.JsonFormat.TypeRegistry; +import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; + +/** + * This class converts PB Messages to various representations, like: + *
    + *
  • JSON string: {@link #toJsonElement(MessageOrBuilder)}
  • + *
  • JSON object (gson): {@link #toJsonElement(MessageOrBuilder)}
  • + *
  • Java objects (Boolean, Number, String, List, Map): + * {@link #toJavaObject(JsonElement)}
  • + *
+ */ +@InterfaceAudience.Private +public class ProtobufMessageConverter { + private static final String TYPE_KEY = "@type"; + + private static final JsonFormat.Printer jsonPrinter; + + static { + TypeRegistry.Builder builder = TypeRegistry.newBuilder(); + builder + .add(BytesValue.getDescriptor()) + .add(LockServiceProtos.getDescriptor().getMessageTypes()) + .add(MasterProcedureProtos.getDescriptor().getMessageTypes()) + .add(ProcedureProtos.getDescriptor().getMessageTypes()); + TypeRegistry typeRegistry = builder.build(); + jsonPrinter = JsonFormat.printer() + .usingTypeRegistry(typeRegistry) + .omittingInsignificantWhitespace(); + } + + private ProtobufMessageConverter() { + } + + public static String toJsonString(MessageOrBuilder messageOrBuilder) + throws InvalidProtocolBufferException { + return jsonPrinter.print(messageOrBuilder); + } + + private static void removeTypeFromJson(JsonElement json) { + if (json.isJsonArray()) { + for (JsonElement child: json.getAsJsonArray()) { + removeTypeFromJson(child); + } + } else if (json.isJsonObject()) { + Iterator> iterator = + json.getAsJsonObject().entrySet().iterator(); + + while (iterator.hasNext()) { + Entry entry = iterator.next(); + if (TYPE_KEY.equals(entry.getKey())) { + iterator.remove(); + } else { + removeTypeFromJson(entry.getValue()); + } + } + } + } + + public static JsonElement toJsonElement(MessageOrBuilder messageOrBuilder) + throws InvalidProtocolBufferException { + return toJsonElement(messageOrBuilder, true); + } + + public static JsonElement toJsonElement(MessageOrBuilder messageOrBuilder, + boolean removeType) throws InvalidProtocolBufferException { + String jsonString = toJsonString(messageOrBuilder); + JsonParser parser = new JsonParser(); + JsonElement element = parser.parse(jsonString); + if (removeType) { + removeTypeFromJson(element); + } + return element; + } + + private static Object toJavaObject(JsonElement element) { + if (element.isJsonNull()) { + return null; + } else if (element.isJsonPrimitive()) { + JsonPrimitive primitive = element.getAsJsonPrimitive(); + if (primitive.isBoolean()) { + return primitive.getAsBoolean(); + } else if (primitive.isNumber()) { + return primitive.getAsNumber(); + } else if (primitive.isString()) { + return primitive.getAsString(); + } else { + return null; + } + } else if (element.isJsonArray()) { + JsonArray array = element.getAsJsonArray(); + List list = new ArrayList<>(); + + for (JsonElement arrayElement : array) { + Object javaObject = toJavaObject(arrayElement); + list.add(javaObject); + } + + return list; + } else if (element.isJsonObject()) { + JsonObject object = element.getAsJsonObject(); + Map map = new LinkedHashMap<>(); + + for (Entry entry: object.entrySet()) { + Object javaObject = toJavaObject(entry.getValue()); + map.put(entry.getKey(), javaObject); + } + + return map; + } else { + return null; + } + } + + public static Object toJavaObject(MessageOrBuilder messageOrBuilder) + throws InvalidProtocolBufferException { + JsonElement element = toJsonElement(messageOrBuilder); + return toJavaObject(element); + } +} http://git-wip-us.apache.org/repos/asf/hbase/blob/359fed7b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index e97b78d..71cd674 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -57,8 +57,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.ProcedureInfo; -import org.apache.hadoop.hbase.ProcedureState; import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -94,8 +92,8 @@ import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.master.RegionState; -import org.apache.hadoop.hbase.procedure2.LockInfo; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; +import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter; import org.apache.hadoop.hbase.quotas.QuotaScope; import org.apache.hadoop.hbase.quotas.QuotaType; import org.apache.hadoop.hbase.quotas.SpaceViolationPolicy; @@ -104,6 +102,8 @@ import org.apache.hadoop.hbase.replication.ReplicationLoadSink; import org.apache.hadoop.hbase.replication.ReplicationLoadSource; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.CellVisibility; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonArray; +import org.apache.hadoop.hbase.shaded.com.google.gson.JsonElement; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; @@ -164,7 +164,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDe import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; -import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest; @@ -182,9 +181,7 @@ import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; import org.apache.hadoop.hbase.util.ExceptionUtil; -import org.apache.hadoop.hbase.util.ForeignExceptionUtil; import org.apache.hadoop.hbase.util.Methods; -import org.apache.hadoop.hbase.util.NonceKey; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.ipc.RemoteException; @@ -202,7 +199,6 @@ import org.apache.hadoop.ipc.RemoteException; value="DP_CREATE_CLASSLOADER_INSIDE_DO_PRIVILEGED", justification="None. Address sometime.") @InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class public final class ProtobufUtil { - private ProtobufUtil() { } @@ -3240,175 +3236,32 @@ public final class ProtobufUtil { } /** - * @return Convert the current {@link ProcedureInfo} into a Protocol Buffers Procedure - * instance. - */ - public static ProcedureProtos.Procedure toProtoProcedure(ProcedureInfo procedure) { - ProcedureProtos.Procedure.Builder builder = ProcedureProtos.Procedure.newBuilder(); - - builder.setClassName(procedure.getProcName()); - builder.setProcId(procedure.getProcId()); - builder.setSubmittedTime(procedure.getSubmittedTime()); - builder.setState(ProcedureProtos.ProcedureState.valueOf(procedure.getProcState().name())); - builder.setLastUpdate(procedure.getLastUpdate()); - - if (procedure.hasParentId()) { - builder.setParentId(procedure.getParentId()); - } - - if (procedure.hasOwner()) { - builder.setOwner(procedure.getProcOwner()); - } - - if (procedure.isFailed()) { - builder.setException(ForeignExceptionUtil.toProtoForeignException(procedure.getException())); - } - - if (procedure.hasResultData()) { - builder.setResult(UnsafeByteOperations.unsafeWrap(procedure.getResult())); - } - - return builder.build(); - } - - /** - * Helper to convert the protobuf object. - * @return Convert the current Protocol Buffers Procedure to {@link ProcedureInfo} - * instance. + * Helper to convert the protobuf Procedure to JSON String + * @return Convert the current Protocol Buffers Procedure to JSON String */ - public static ProcedureInfo toProcedureInfo(ProcedureProtos.Procedure procedureProto) { - NonceKey nonceKey = null; - - if (procedureProto.getNonce() != HConstants.NO_NONCE) { - nonceKey = new NonceKey(procedureProto.getNonceGroup(), procedureProto.getNonce()); - } - - return new ProcedureInfo(procedureProto.getProcId(), procedureProto.getClassName(), - procedureProto.hasOwner() ? procedureProto.getOwner() : null, - ProcedureState.valueOf(procedureProto.getState().name()), - procedureProto.hasParentId() ? procedureProto.getParentId() : -1, nonceKey, - procedureProto.hasException() ? - ForeignExceptionUtil.toIOException(procedureProto.getException()) : null, - procedureProto.getLastUpdate(), procedureProto.getSubmittedTime(), - procedureProto.hasResult() ? procedureProto.getResult().toByteArray() : null); - } - - public static LockServiceProtos.ResourceType toProtoResourceType( - LockInfo.ResourceType resourceType) { - switch (resourceType) { - case SERVER: - return LockServiceProtos.ResourceType.RESOURCE_TYPE_SERVER; - case NAMESPACE: - return LockServiceProtos.ResourceType.RESOURCE_TYPE_NAMESPACE; - case TABLE: - return LockServiceProtos.ResourceType.RESOURCE_TYPE_TABLE; - case REGION: - return LockServiceProtos.ResourceType.RESOURCE_TYPE_REGION; - default: - throw new IllegalArgumentException("Unknown resource type: " + resourceType); - } - } - - public static LockInfo.ResourceType toResourceType( - LockServiceProtos.ResourceType resourceTypeProto) { - switch (resourceTypeProto) { - case RESOURCE_TYPE_SERVER: - return LockInfo.ResourceType.SERVER; - case RESOURCE_TYPE_NAMESPACE: - return LockInfo.ResourceType.NAMESPACE; - case RESOURCE_TYPE_TABLE: - return LockInfo.ResourceType.TABLE; - case RESOURCE_TYPE_REGION: - return LockInfo.ResourceType.REGION; - default: - throw new IllegalArgumentException("Unknown resource type: " + resourceTypeProto); - } - } - - public static LockServiceProtos.LockType toProtoLockType( - LockInfo.LockType lockType) { - return LockServiceProtos.LockType.valueOf(lockType.name()); - } - - public static LockInfo.LockType toLockType( - LockServiceProtos.LockType lockTypeProto) { - return LockInfo.LockType.valueOf(lockTypeProto.name()); - } - - public static LockServiceProtos.WaitingProcedure toProtoWaitingProcedure( - LockInfo.WaitingProcedure waitingProcedure) { - LockServiceProtos.WaitingProcedure.Builder builder = LockServiceProtos.WaitingProcedure.newBuilder(); - - ProcedureProtos.Procedure procedureProto = - toProtoProcedure(waitingProcedure.getProcedure()); - - builder - .setLockType(toProtoLockType(waitingProcedure.getLockType())) - .setProcedure(procedureProto); - - return builder.build(); - } - - public static LockInfo.WaitingProcedure toWaitingProcedure( - LockServiceProtos.WaitingProcedure waitingProcedureProto) { - LockInfo.WaitingProcedure waiting = new LockInfo.WaitingProcedure(); - - waiting.setLockType(toLockType(waitingProcedureProto.getLockType())); - - ProcedureInfo procedure = - toProcedureInfo(waitingProcedureProto.getProcedure()); - waiting.setProcedure(procedure); - - return waiting; - } - - public static LockServiceProtos.LockInfo toProtoLockInfo(LockInfo lock) - { - LockServiceProtos.LockInfo.Builder builder = LockServiceProtos.LockInfo.newBuilder(); - - builder - .setResourceType(toProtoResourceType(lock.getResourceType())) - .setResourceName(lock.getResourceName()) - .setLockType(toProtoLockType(lock.getLockType())); - - ProcedureInfo exclusiveLockOwnerProcedure = lock.getExclusiveLockOwnerProcedure(); - - if (exclusiveLockOwnerProcedure != null) { - Procedure exclusiveLockOwnerProcedureProto = - toProtoProcedure(lock.getExclusiveLockOwnerProcedure()); - builder.setExclusiveLockOwnerProcedure(exclusiveLockOwnerProcedureProto); - } - - builder.setSharedLockCount(lock.getSharedLockCount()); - - for (LockInfo.WaitingProcedure waitingProcedure : lock.getWaitingProcedures()) { - builder.addWaitingProcedures(toProtoWaitingProcedure(waitingProcedure)); + public static String toProcedureJson(List procProtos) { + JsonArray procJsons = new JsonArray(procProtos.size()); + for (ProcedureProtos.Procedure procProto : procProtos) { + try { + JsonElement procJson = ProtobufMessageConverter.toJsonElement(procProto); + procJsons.add(procJson); + } catch (InvalidProtocolBufferException e) { + procJsons.add(e.toString()); + } } - - return builder.build(); + return procJsons.toString(); } - public static LockInfo toLockInfo(LockServiceProtos.LockInfo lockProto) - { - LockInfo lock = new LockInfo(); - - lock.setResourceType(toResourceType(lockProto.getResourceType())); - lock.setResourceName(lockProto.getResourceName()); - lock.setLockType(toLockType(lockProto.getLockType())); - - if (lockProto.hasExclusiveLockOwnerProcedure()) { - ProcedureInfo exclusiveLockOwnerProcedureProto = - toProcedureInfo(lockProto.getExclusiveLockOwnerProcedure()); - - lock.setExclusiveLockOwnerProcedure(exclusiveLockOwnerProcedureProto); - } - - lock.setSharedLockCount(lockProto.getSharedLockCount()); - - for (LockServiceProtos.WaitingProcedure waitingProcedureProto : lockProto.getWaitingProceduresList()) { - lock.addWaitingProcedure(toWaitingProcedure(waitingProcedureProto)); + public static String toLockJson(List lockedResourceProtos) { + JsonArray lockedResourceJsons = new JsonArray(lockedResourceProtos.size()); + for (LockServiceProtos.LockedResource lockedResourceProto : lockedResourceProtos) { + try { + JsonElement lockedResourceJson = ProtobufMessageConverter.toJsonElement(lockedResourceProto); + lockedResourceJsons.add(lockedResourceJson); + } catch (InvalidProtocolBufferException e) { + lockedResourceJsons.add(e.toString()); + } } - - return lock; + return lockedResourceJsons.toString(); } }