Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 4076617E4D for ; Tue, 24 Mar 2015 17:00:35 +0000 (UTC) Received: (qmail 19054 invoked by uid 500); 24 Mar 2015 17:00:34 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 18817 invoked by uid 500); 24 Mar 2015 17:00:34 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 18565 invoked by uid 99); 24 Mar 2015 17:00:34 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 24 Mar 2015 17:00:34 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 8089AE17AF; Tue, 24 Mar 2015 17:00:34 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: mbertozzi@apache.org To: commits@hbase.apache.org Date: Tue, 24 Mar 2015 17:00:37 -0000 Message-Id: <84f989b7f33d468d95c442b90c119bf5@git.apache.org> In-Reply-To: <600786e515794e82856eae0075d2a8bd@git.apache.org> References: <600786e515794e82856eae0075d2a8bd@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [4/8] hbase git commit: HBASE-13202 Procedure v2 - core framework http://git-wip-us.apache.org/repos/asf/hbase/blob/e3db688a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java new file mode 100644 index 0000000..2826a0e --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ProcedureProtos.java @@ -0,0 +1,7219 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Procedure.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ProcedureProtos { + private ProcedureProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + /** + * Protobuf enum {@code ProcedureState} + */ + public enum ProcedureState + implements com.google.protobuf.ProtocolMessageEnum { + /** + * INITIALIZING = 1; + * + *
+     * Procedure in construction, not yet added to the executor
+     * 
+ */ + INITIALIZING(0, 1), + /** + * RUNNABLE = 2; + * + *
+     * Procedure added to the executor, and ready to be executed
+     * 
+ */ + RUNNABLE(1, 2), + /** + * WAITING = 3; + * + *
+     * The procedure is waiting on children to be completed
+     * 
+ */ + WAITING(2, 3), + /** + * WAITING_TIMEOUT = 4; + * + *
+     * The procedure is waiting a timout or an external event
+     * 
+ */ + WAITING_TIMEOUT(3, 4), + /** + * ROLLEDBACK = 5; + * + *
+     * The procedure failed and was rolledback
+     * 
+ */ + ROLLEDBACK(4, 5), + /** + * FINISHED = 6; + * + *
+     * The procedure completed successfully
+     * 
+ */ + FINISHED(5, 6), + ; + + /** + * INITIALIZING = 1; + * + *
+     * Procedure in construction, not yet added to the executor
+     * 
+ */ + public static final int INITIALIZING_VALUE = 1; + /** + * RUNNABLE = 2; + * + *
+     * Procedure added to the executor, and ready to be executed
+     * 
+ */ + public static final int RUNNABLE_VALUE = 2; + /** + * WAITING = 3; + * + *
+     * The procedure is waiting on children to be completed
+     * 
+ */ + public static final int WAITING_VALUE = 3; + /** + * WAITING_TIMEOUT = 4; + * + *
+     * The procedure is waiting a timout or an external event
+     * 
+ */ + public static final int WAITING_TIMEOUT_VALUE = 4; + /** + * ROLLEDBACK = 5; + * + *
+     * The procedure failed and was rolledback
+     * 
+ */ + public static final int ROLLEDBACK_VALUE = 5; + /** + * FINISHED = 6; + * + *
+     * The procedure completed successfully
+     * 
+ */ + public static final int FINISHED_VALUE = 6; + + + public final int getNumber() { return value; } + + public static ProcedureState valueOf(int value) { + switch (value) { + case 1: return INITIALIZING; + case 2: return RUNNABLE; + case 3: return WAITING; + case 4: return WAITING_TIMEOUT; + case 5: return ROLLEDBACK; + case 6: return FINISHED; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ProcedureState findValueByNumber(int number) { + return ProcedureState.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final ProcedureState[] VALUES = values(); + + public static ProcedureState valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ProcedureState(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:ProcedureState) + } + + public interface ProcedureOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string class_name = 1; + /** + * required string class_name = 1; + * + *
+     * internal "static" state
+     * 
+ */ + boolean hasClassName(); + /** + * required string class_name = 1; + * + *
+     * internal "static" state
+     * 
+ */ + java.lang.String getClassName(); + /** + * required string class_name = 1; + * + *
+     * internal "static" state
+     * 
+ */ + com.google.protobuf.ByteString + getClassNameBytes(); + + // optional uint64 parent_id = 2; + /** + * optional uint64 parent_id = 2; + * + *
+     * parent if not a root-procedure otherwise not set
+     * 
+ */ + boolean hasParentId(); + /** + * optional uint64 parent_id = 2; + * + *
+     * parent if not a root-procedure otherwise not set
+     * 
+ */ + long getParentId(); + + // required uint64 proc_id = 3; + /** + * required uint64 proc_id = 3; + */ + boolean hasProcId(); + /** + * required uint64 proc_id = 3; + */ + long getProcId(); + + // required uint64 start_time = 4; + /** + * required uint64 start_time = 4; + */ + boolean hasStartTime(); + /** + * required uint64 start_time = 4; + */ + long getStartTime(); + + // optional string owner = 5; + /** + * optional string owner = 5; + */ + boolean hasOwner(); + /** + * optional string owner = 5; + */ + java.lang.String getOwner(); + /** + * optional string owner = 5; + */ + com.google.protobuf.ByteString + getOwnerBytes(); + + // required .ProcedureState state = 6; + /** + * required .ProcedureState state = 6; + * + *
+     * internal "runtime" state
+     * 
+ */ + boolean hasState(); + /** + * required .ProcedureState state = 6; + * + *
+     * internal "runtime" state
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState getState(); + + // repeated uint32 stack_id = 7; + /** + * repeated uint32 stack_id = 7; + * + *
+     * stack indices in case the procedure was running
+     * 
+ */ + java.util.List getStackIdList(); + /** + * repeated uint32 stack_id = 7; + * + *
+     * stack indices in case the procedure was running
+     * 
+ */ + int getStackIdCount(); + /** + * repeated uint32 stack_id = 7; + * + *
+     * stack indices in case the procedure was running
+     * 
+ */ + int getStackId(int index); + + // required uint64 last_update = 8; + /** + * required uint64 last_update = 8; + */ + boolean hasLastUpdate(); + /** + * required uint64 last_update = 8; + */ + long getLastUpdate(); + + // optional uint32 timeout = 9; + /** + * optional uint32 timeout = 9; + */ + boolean hasTimeout(); + /** + * optional uint32 timeout = 9; + */ + int getTimeout(); + + // optional .ForeignExceptionMessage exception = 10; + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+     * user state/results
+     * 
+ */ + boolean hasException(); + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+     * user state/results
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException(); + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+     * user state/results
+     * 
+ */ + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder(); + + // optional bytes result = 11; + /** + * optional bytes result = 11; + * + *
+     * opaque (user) result structure
+     * 
+ */ + boolean hasResult(); + /** + * optional bytes result = 11; + * + *
+     * opaque (user) result structure
+     * 
+ */ + com.google.protobuf.ByteString getResult(); + + // optional bytes state_data = 12; + /** + * optional bytes state_data = 12; + * + *
+     * opaque (user) procedure internal-state
+     * 
+ */ + boolean hasStateData(); + /** + * optional bytes state_data = 12; + * + *
+     * opaque (user) procedure internal-state
+     * 
+ */ + com.google.protobuf.ByteString getStateData(); + } + /** + * Protobuf type {@code Procedure} + * + *
+   **
+   * Procedure metadata, serialized by the ProcedureStore to be able to recover the old state.
+   * 
+ */ + public static final class Procedure extends + com.google.protobuf.GeneratedMessage + implements ProcedureOrBuilder { + // Use Procedure.newBuilder() to construct. + private Procedure(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Procedure(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Procedure defaultInstance; + public static Procedure getDefaultInstance() { + return defaultInstance; + } + + public Procedure getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Procedure( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + className_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + parentId_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + procId_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + startTime_ = input.readUInt64(); + break; + } + case 42: { + bitField0_ |= 0x00000010; + owner_ = input.readBytes(); + break; + } + case 48: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState value = org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(6, rawValue); + } else { + bitField0_ |= 0x00000020; + state_ = value; + } + break; + } + case 56: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + stackId_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + stackId_.add(input.readUInt32()); + break; + } + case 58: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) { + stackId_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000040; + } + while (input.getBytesUntilLimit() > 0) { + stackId_.add(input.readUInt32()); + } + input.popLimit(limit); + break; + } + case 64: { + bitField0_ |= 0x00000040; + lastUpdate_ = input.readUInt64(); + break; + } + case 72: { + bitField0_ |= 0x00000080; + timeout_ = input.readUInt32(); + break; + } + case 82: { + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000100) == 0x00000100)) { + subBuilder = exception_.toBuilder(); + } + exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(exception_); + exception_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000100; + break; + } + case 90: { + bitField0_ |= 0x00000200; + result_ = input.readBytes(); + break; + } + case 98: { + bitField0_ |= 0x00000400; + stateData_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + stackId_ = java.util.Collections.unmodifiableList(stackId_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_Procedure_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_Procedure_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.class, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public Procedure parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Procedure(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string class_name = 1; + public static final int CLASS_NAME_FIELD_NUMBER = 1; + private java.lang.Object className_; + /** + * required string class_name = 1; + * + *
+     * internal "static" state
+     * 
+ */ + public boolean hasClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string class_name = 1; + * + *
+     * internal "static" state
+     * 
+ */ + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + className_ = s; + } + return s; + } + } + /** + * required string class_name = 1; + * + *
+     * internal "static" state
+     * 
+ */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional uint64 parent_id = 2; + public static final int PARENT_ID_FIELD_NUMBER = 2; + private long parentId_; + /** + * optional uint64 parent_id = 2; + * + *
+     * parent if not a root-procedure otherwise not set
+     * 
+ */ + public boolean hasParentId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint64 parent_id = 2; + * + *
+     * parent if not a root-procedure otherwise not set
+     * 
+ */ + public long getParentId() { + return parentId_; + } + + // required uint64 proc_id = 3; + public static final int PROC_ID_FIELD_NUMBER = 3; + private long procId_; + /** + * required uint64 proc_id = 3; + */ + public boolean hasProcId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required uint64 proc_id = 3; + */ + public long getProcId() { + return procId_; + } + + // required uint64 start_time = 4; + public static final int START_TIME_FIELD_NUMBER = 4; + private long startTime_; + /** + * required uint64 start_time = 4; + */ + public boolean hasStartTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * required uint64 start_time = 4; + */ + public long getStartTime() { + return startTime_; + } + + // optional string owner = 5; + public static final int OWNER_FIELD_NUMBER = 5; + private java.lang.Object owner_; + /** + * optional string owner = 5; + */ + public boolean hasOwner() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional string owner = 5; + */ + public java.lang.String getOwner() { + java.lang.Object ref = owner_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + owner_ = s; + } + return s; + } + } + /** + * optional string owner = 5; + */ + public com.google.protobuf.ByteString + getOwnerBytes() { + java.lang.Object ref = owner_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + owner_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required .ProcedureState state = 6; + public static final int STATE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState state_; + /** + * required .ProcedureState state = 6; + * + *
+     * internal "runtime" state
+     * 
+ */ + public boolean hasState() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * required .ProcedureState state = 6; + * + *
+     * internal "runtime" state
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState getState() { + return state_; + } + + // repeated uint32 stack_id = 7; + public static final int STACK_ID_FIELD_NUMBER = 7; + private java.util.List stackId_; + /** + * repeated uint32 stack_id = 7; + * + *
+     * stack indices in case the procedure was running
+     * 
+ */ + public java.util.List + getStackIdList() { + return stackId_; + } + /** + * repeated uint32 stack_id = 7; + * + *
+     * stack indices in case the procedure was running
+     * 
+ */ + public int getStackIdCount() { + return stackId_.size(); + } + /** + * repeated uint32 stack_id = 7; + * + *
+     * stack indices in case the procedure was running
+     * 
+ */ + public int getStackId(int index) { + return stackId_.get(index); + } + + // required uint64 last_update = 8; + public static final int LAST_UPDATE_FIELD_NUMBER = 8; + private long lastUpdate_; + /** + * required uint64 last_update = 8; + */ + public boolean hasLastUpdate() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * required uint64 last_update = 8; + */ + public long getLastUpdate() { + return lastUpdate_; + } + + // optional uint32 timeout = 9; + public static final int TIMEOUT_FIELD_NUMBER = 9; + private int timeout_; + /** + * optional uint32 timeout = 9; + */ + public boolean hasTimeout() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * optional uint32 timeout = 9; + */ + public int getTimeout() { + return timeout_; + } + + // optional .ForeignExceptionMessage exception = 10; + public static final int EXCEPTION_FIELD_NUMBER = 10; + private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_; + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+     * user state/results
+     * 
+ */ + public boolean hasException() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+     * user state/results
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { + return exception_; + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+     * user state/results
+     * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { + return exception_; + } + + // optional bytes result = 11; + public static final int RESULT_FIELD_NUMBER = 11; + private com.google.protobuf.ByteString result_; + /** + * optional bytes result = 11; + * + *
+     * opaque (user) result structure
+     * 
+ */ + public boolean hasResult() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + /** + * optional bytes result = 11; + * + *
+     * opaque (user) result structure
+     * 
+ */ + public com.google.protobuf.ByteString getResult() { + return result_; + } + + // optional bytes state_data = 12; + public static final int STATE_DATA_FIELD_NUMBER = 12; + private com.google.protobuf.ByteString stateData_; + /** + * optional bytes state_data = 12; + * + *
+     * opaque (user) procedure internal-state
+     * 
+ */ + public boolean hasStateData() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + /** + * optional bytes state_data = 12; + * + *
+     * opaque (user) procedure internal-state
+     * 
+ */ + public com.google.protobuf.ByteString getStateData() { + return stateData_; + } + + private void initFields() { + className_ = ""; + parentId_ = 0L; + procId_ = 0L; + startTime_ = 0L; + owner_ = ""; + state_ = org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + stackId_ = java.util.Collections.emptyList(); + lastUpdate_ = 0L; + timeout_ = 0; + exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + result_ = com.google.protobuf.ByteString.EMPTY; + stateData_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClassName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasProcId()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasStartTime()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasState()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLastUpdate()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, parentId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, procId_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, startTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBytes(5, getOwnerBytes()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeEnum(6, state_.getNumber()); + } + for (int i = 0; i < stackId_.size(); i++) { + output.writeUInt32(7, stackId_.get(i)); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt64(8, lastUpdate_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeUInt32(9, timeout_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + output.writeMessage(10, exception_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + output.writeBytes(11, result_); + } + if (((bitField0_ & 0x00000400) == 0x00000400)) { + output.writeBytes(12, stateData_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getClassNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, parentId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, procId_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, startTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, getOwnerBytes()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(6, state_.getNumber()); + } + { + int dataSize = 0; + for (int i = 0; i < stackId_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeUInt32SizeNoTag(stackId_.get(i)); + } + size += dataSize; + size += 1 * getStackIdList().size(); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(8, lastUpdate_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(9, timeout_); + } + if (((bitField0_ & 0x00000100) == 0x00000100)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, exception_); + } + if (((bitField0_ & 0x00000200) == 0x00000200)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(11, result_); + } + if (((bitField0_ & 0x00000400) == 0x00000400)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(12, stateData_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure other = (org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure) obj; + + boolean result = true; + result = result && (hasClassName() == other.hasClassName()); + if (hasClassName()) { + result = result && getClassName() + .equals(other.getClassName()); + } + result = result && (hasParentId() == other.hasParentId()); + if (hasParentId()) { + result = result && (getParentId() + == other.getParentId()); + } + result = result && (hasProcId() == other.hasProcId()); + if (hasProcId()) { + result = result && (getProcId() + == other.getProcId()); + } + result = result && (hasStartTime() == other.hasStartTime()); + if (hasStartTime()) { + result = result && (getStartTime() + == other.getStartTime()); + } + result = result && (hasOwner() == other.hasOwner()); + if (hasOwner()) { + result = result && getOwner() + .equals(other.getOwner()); + } + result = result && (hasState() == other.hasState()); + if (hasState()) { + result = result && + (getState() == other.getState()); + } + result = result && getStackIdList() + .equals(other.getStackIdList()); + result = result && (hasLastUpdate() == other.hasLastUpdate()); + if (hasLastUpdate()) { + result = result && (getLastUpdate() + == other.getLastUpdate()); + } + result = result && (hasTimeout() == other.hasTimeout()); + if (hasTimeout()) { + result = result && (getTimeout() + == other.getTimeout()); + } + result = result && (hasException() == other.hasException()); + if (hasException()) { + result = result && getException() + .equals(other.getException()); + } + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasStateData() == other.hasStateData()); + if (hasStateData()) { + result = result && getStateData() + .equals(other.getStateData()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClassName()) { + hash = (37 * hash) + CLASS_NAME_FIELD_NUMBER; + hash = (53 * hash) + getClassName().hashCode(); + } + if (hasParentId()) { + hash = (37 * hash) + PARENT_ID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getParentId()); + } + if (hasProcId()) { + hash = (37 * hash) + PROC_ID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getProcId()); + } + if (hasStartTime()) { + hash = (37 * hash) + START_TIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getStartTime()); + } + if (hasOwner()) { + hash = (37 * hash) + OWNER_FIELD_NUMBER; + hash = (53 * hash) + getOwner().hashCode(); + } + if (hasState()) { + hash = (37 * hash) + STATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getState()); + } + if (getStackIdCount() > 0) { + hash = (37 * hash) + STACK_ID_FIELD_NUMBER; + hash = (53 * hash) + getStackIdList().hashCode(); + } + if (hasLastUpdate()) { + hash = (37 * hash) + LAST_UPDATE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLastUpdate()); + } + if (hasTimeout()) { + hash = (37 * hash) + TIMEOUT_FIELD_NUMBER; + hash = (53 * hash) + getTimeout(); + } + if (hasException()) { + hash = (37 * hash) + EXCEPTION_FIELD_NUMBER; + hash = (53 * hash) + getException().hashCode(); + } + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasStateData()) { + hash = (37 * hash) + STATE_DATA_FIELD_NUMBER; + hash = (53 * hash) + getStateData().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code Procedure} + * + *
+     **
+     * Procedure metadata, serialized by the ProcedureStore to be able to recover the old state.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_Procedure_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_Procedure_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.class, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getExceptionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + className_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + parentId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + procId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + startTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + owner_ = ""; + bitField0_ = (bitField0_ & ~0x00000010); + state_ = org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + bitField0_ = (bitField0_ & ~0x00000020); + stackId_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + lastUpdate_ = 0L; + bitField0_ = (bitField0_ & ~0x00000080); + timeout_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000200); + result_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000400); + stateData_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000800); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_Procedure_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure build() { + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure result = new org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.className_ = className_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.parentId_ = parentId_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.procId_ = procId_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.startTime_ = startTime_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.owner_ = owner_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.state_ = state_; + if (((bitField0_ & 0x00000040) == 0x00000040)) { + stackId_ = java.util.Collections.unmodifiableList(stackId_); + bitField0_ = (bitField0_ & ~0x00000040); + } + result.stackId_ = stackId_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000040; + } + result.lastUpdate_ = lastUpdate_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000080; + } + result.timeout_ = timeout_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000100; + } + if (exceptionBuilder_ == null) { + result.exception_ = exception_; + } else { + result.exception_ = exceptionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000400) == 0x00000400)) { + to_bitField0_ |= 0x00000200; + } + result.result_ = result_; + if (((from_bitField0_ & 0x00000800) == 0x00000800)) { + to_bitField0_ |= 0x00000400; + } + result.stateData_ = stateData_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance()) return this; + if (other.hasClassName()) { + bitField0_ |= 0x00000001; + className_ = other.className_; + onChanged(); + } + if (other.hasParentId()) { + setParentId(other.getParentId()); + } + if (other.hasProcId()) { + setProcId(other.getProcId()); + } + if (other.hasStartTime()) { + setStartTime(other.getStartTime()); + } + if (other.hasOwner()) { + bitField0_ |= 0x00000010; + owner_ = other.owner_; + onChanged(); + } + if (other.hasState()) { + setState(other.getState()); + } + if (!other.stackId_.isEmpty()) { + if (stackId_.isEmpty()) { + stackId_ = other.stackId_; + bitField0_ = (bitField0_ & ~0x00000040); + } else { + ensureStackIdIsMutable(); + stackId_.addAll(other.stackId_); + } + onChanged(); + } + if (other.hasLastUpdate()) { + setLastUpdate(other.getLastUpdate()); + } + if (other.hasTimeout()) { + setTimeout(other.getTimeout()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + if (other.hasResult()) { + setResult(other.getResult()); + } + if (other.hasStateData()) { + setStateData(other.getStateData()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClassName()) { + + return false; + } + if (!hasProcId()) { + + return false; + } + if (!hasStartTime()) { + + return false; + } + if (!hasState()) { + + return false; + } + if (!hasLastUpdate()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string class_name = 1; + private java.lang.Object className_ = ""; + /** + * required string class_name = 1; + * + *
+       * internal "static" state
+       * 
+ */ + public boolean hasClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string class_name = 1; + * + *
+       * internal "static" state
+       * 
+ */ + public java.lang.String getClassName() { + java.lang.Object ref = className_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + className_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string class_name = 1; + * + *
+       * internal "static" state
+       * 
+ */ + public com.google.protobuf.ByteString + getClassNameBytes() { + java.lang.Object ref = className_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + className_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string class_name = 1; + * + *
+       * internal "static" state
+       * 
+ */ + public Builder setClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + className_ = value; + onChanged(); + return this; + } + /** + * required string class_name = 1; + * + *
+       * internal "static" state
+       * 
+ */ + public Builder clearClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + className_ = getDefaultInstance().getClassName(); + onChanged(); + return this; + } + /** + * required string class_name = 1; + * + *
+       * internal "static" state
+       * 
+ */ + public Builder setClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + className_ = value; + onChanged(); + return this; + } + + // optional uint64 parent_id = 2; + private long parentId_ ; + /** + * optional uint64 parent_id = 2; + * + *
+       * parent if not a root-procedure otherwise not set
+       * 
+ */ + public boolean hasParentId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint64 parent_id = 2; + * + *
+       * parent if not a root-procedure otherwise not set
+       * 
+ */ + public long getParentId() { + return parentId_; + } + /** + * optional uint64 parent_id = 2; + * + *
+       * parent if not a root-procedure otherwise not set
+       * 
+ */ + public Builder setParentId(long value) { + bitField0_ |= 0x00000002; + parentId_ = value; + onChanged(); + return this; + } + /** + * optional uint64 parent_id = 2; + * + *
+       * parent if not a root-procedure otherwise not set
+       * 
+ */ + public Builder clearParentId() { + bitField0_ = (bitField0_ & ~0x00000002); + parentId_ = 0L; + onChanged(); + return this; + } + + // required uint64 proc_id = 3; + private long procId_ ; + /** + * required uint64 proc_id = 3; + */ + public boolean hasProcId() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required uint64 proc_id = 3; + */ + public long getProcId() { + return procId_; + } + /** + * required uint64 proc_id = 3; + */ + public Builder setProcId(long value) { + bitField0_ |= 0x00000004; + procId_ = value; + onChanged(); + return this; + } + /** + * required uint64 proc_id = 3; + */ + public Builder clearProcId() { + bitField0_ = (bitField0_ & ~0x00000004); + procId_ = 0L; + onChanged(); + return this; + } + + // required uint64 start_time = 4; + private long startTime_ ; + /** + * required uint64 start_time = 4; + */ + public boolean hasStartTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * required uint64 start_time = 4; + */ + public long getStartTime() { + return startTime_; + } + /** + * required uint64 start_time = 4; + */ + public Builder setStartTime(long value) { + bitField0_ |= 0x00000008; + startTime_ = value; + onChanged(); + return this; + } + /** + * required uint64 start_time = 4; + */ + public Builder clearStartTime() { + bitField0_ = (bitField0_ & ~0x00000008); + startTime_ = 0L; + onChanged(); + return this; + } + + // optional string owner = 5; + private java.lang.Object owner_ = ""; + /** + * optional string owner = 5; + */ + public boolean hasOwner() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * optional string owner = 5; + */ + public java.lang.String getOwner() { + java.lang.Object ref = owner_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + owner_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string owner = 5; + */ + public com.google.protobuf.ByteString + getOwnerBytes() { + java.lang.Object ref = owner_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + owner_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string owner = 5; + */ + public Builder setOwner( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + owner_ = value; + onChanged(); + return this; + } + /** + * optional string owner = 5; + */ + public Builder clearOwner() { + bitField0_ = (bitField0_ & ~0x00000010); + owner_ = getDefaultInstance().getOwner(); + onChanged(); + return this; + } + /** + * optional string owner = 5; + */ + public Builder setOwnerBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + owner_ = value; + onChanged(); + return this; + } + + // required .ProcedureState state = 6; + private org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState state_ = org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + /** + * required .ProcedureState state = 6; + * + *
+       * internal "runtime" state
+       * 
+ */ + public boolean hasState() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * required .ProcedureState state = 6; + * + *
+       * internal "runtime" state
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState getState() { + return state_; + } + /** + * required .ProcedureState state = 6; + * + *
+       * internal "runtime" state
+       * 
+ */ + public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + state_ = value; + onChanged(); + return this; + } + /** + * required .ProcedureState state = 6; + * + *
+       * internal "runtime" state
+       * 
+ */ + public Builder clearState() { + bitField0_ = (bitField0_ & ~0x00000020); + state_ = org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureState.INITIALIZING; + onChanged(); + return this; + } + + // repeated uint32 stack_id = 7; + private java.util.List stackId_ = java.util.Collections.emptyList(); + private void ensureStackIdIsMutable() { + if (!((bitField0_ & 0x00000040) == 0x00000040)) { + stackId_ = new java.util.ArrayList(stackId_); + bitField0_ |= 0x00000040; + } + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public java.util.List + getStackIdList() { + return java.util.Collections.unmodifiableList(stackId_); + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public int getStackIdCount() { + return stackId_.size(); + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public int getStackId(int index) { + return stackId_.get(index); + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public Builder setStackId( + int index, int value) { + ensureStackIdIsMutable(); + stackId_.set(index, value); + onChanged(); + return this; + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public Builder addStackId(int value) { + ensureStackIdIsMutable(); + stackId_.add(value); + onChanged(); + return this; + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public Builder addAllStackId( + java.lang.Iterable values) { + ensureStackIdIsMutable(); + super.addAll(values, stackId_); + onChanged(); + return this; + } + /** + * repeated uint32 stack_id = 7; + * + *
+       * stack indices in case the procedure was running
+       * 
+ */ + public Builder clearStackId() { + stackId_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + + // required uint64 last_update = 8; + private long lastUpdate_ ; + /** + * required uint64 last_update = 8; + */ + public boolean hasLastUpdate() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + /** + * required uint64 last_update = 8; + */ + public long getLastUpdate() { + return lastUpdate_; + } + /** + * required uint64 last_update = 8; + */ + public Builder setLastUpdate(long value) { + bitField0_ |= 0x00000080; + lastUpdate_ = value; + onChanged(); + return this; + } + /** + * required uint64 last_update = 8; + */ + public Builder clearLastUpdate() { + bitField0_ = (bitField0_ & ~0x00000080); + lastUpdate_ = 0L; + onChanged(); + return this; + } + + // optional uint32 timeout = 9; + private int timeout_ ; + /** + * optional uint32 timeout = 9; + */ + public boolean hasTimeout() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + /** + * optional uint32 timeout = 9; + */ + public int getTimeout() { + return timeout_; + } + /** + * optional uint32 timeout = 9; + */ + public Builder setTimeout(int value) { + bitField0_ |= 0x00000100; + timeout_ = value; + onChanged(); + return this; + } + /** + * optional uint32 timeout = 9; + */ + public Builder clearTimeout() { + bitField0_ = (bitField0_ & ~0x00000100); + timeout_ = 0; + onChanged(); + return this; + } + + // optional .ForeignExceptionMessage exception = 10; + private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> exceptionBuilder_; + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public boolean hasException() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() { + if (exceptionBuilder_ == null) { + return exception_; + } else { + return exceptionBuilder_.getMessage(); + } + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public Builder setException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { + if (exceptionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + exception_ = value; + onChanged(); + } else { + exceptionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000200; + return this; + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public Builder setException( + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder builderForValue) { + if (exceptionBuilder_ == null) { + exception_ = builderForValue.build(); + onChanged(); + } else { + exceptionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000200; + return this; + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) { + if (exceptionBuilder_ == null) { + if (((bitField0_ & 0x00000200) == 0x00000200) && + exception_ != org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance()) { + exception_ = + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.newBuilder(exception_).mergeFrom(value).buildPartial(); + } else { + exception_ = value; + } + onChanged(); + } else { + exceptionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000200; + return this; + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public Builder clearException() { + if (exceptionBuilder_ == null) { + exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance(); + onChanged(); + } else { + exceptionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000200); + return this; + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder getExceptionBuilder() { + bitField0_ |= 0x00000200; + onChanged(); + return getExceptionFieldBuilder().getBuilder(); + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() { + if (exceptionBuilder_ != null) { + return exceptionBuilder_.getMessageOrBuilder(); + } else { + return exception_; + } + } + /** + * optional .ForeignExceptionMessage exception = 10; + * + *
+       * user state/results
+       * 
+ */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> + getExceptionFieldBuilder() { + if (exceptionBuilder_ == null) { + exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder>( + exception_, + getParentForChildren(), + isClean()); + exception_ = null; + } + return exceptionBuilder_; + } + + // optional bytes result = 11; + private com.google.protobuf.ByteString result_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes result = 11; + * + *
+       * opaque (user) result structure
+       * 
+ */ + public boolean hasResult() { + return ((bitField0_ & 0x00000400) == 0x00000400); + } + /** + * optional bytes result = 11; + * + *
+       * opaque (user) result structure
+       * 
+ */ + public com.google.protobuf.ByteString getResult() { + return result_; + } + /** + * optional bytes result = 11; + * + *
+       * opaque (user) result structure
+       * 
+ */ + public Builder setResult(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000400; + result_ = value; + onChanged(); + return this; + } + /** + * optional bytes result = 11; + * + *
+       * opaque (user) result structure
+       * 
+ */ + public Builder clearResult() { + bitField0_ = (bitField0_ & ~0x00000400); + result_ = getDefaultInstance().getResult(); + onChanged(); + return this; + } + + // optional bytes state_data = 12; + private com.google.protobuf.ByteString stateData_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes state_data = 12; + * + *
+       * opaque (user) procedure internal-state
+       * 
+ */ + public boolean hasStateData() { + return ((bitField0_ & 0x00000800) == 0x00000800); + } + /** + * optional bytes state_data = 12; + * + *
+       * opaque (user) procedure internal-state
+       * 
+ */ + public com.google.protobuf.ByteString getStateData() { + return stateData_; + } + /** + * optional bytes state_data = 12; + * + *
+       * opaque (user) procedure internal-state
+       * 
+ */ + public Builder setStateData(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000800; + stateData_ = value; + onChanged(); + return this; + } + /** + * optional bytes state_data = 12; + * + *
+       * opaque (user) procedure internal-state
+       * 
+ */ + public Builder clearStateData() { + bitField0_ = (bitField0_ & ~0x00000800); + stateData_ = getDefaultInstance().getStateData(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Procedure) + } + + static { + defaultInstance = new Procedure(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Procedure) + } + + public interface SequentialProcedureDataOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool executed = 1; + /** + * required bool executed = 1; + */ + boolean hasExecuted(); + /** + * required bool executed = 1; + */ + boolean getExecuted(); + } + /** + * Protobuf type {@code SequentialProcedureData} + * + *
+   **
+   * SequentialProcedure data
+   * 
+ */ + public static final class SequentialProcedureData extends + com.google.protobuf.GeneratedMessage + implements SequentialProcedureDataOrBuilder { + // Use SequentialProcedureData.newBuilder() to construct. + private SequentialProcedureData(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private SequentialProcedureData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final SequentialProcedureData defaultInstance; + public static SequentialProcedureData getDefaultInstance() { + return defaultInstance; + } + + public SequentialProcedureData getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private SequentialProcedureData( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + executed_ = input.readBool(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_SequentialProcedureData_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_SequentialProcedureData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.class, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public SequentialProcedureData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new SequentialProcedureData(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required bool executed = 1; + public static final int EXECUTED_FIELD_NUMBER = 1; + private boolean executed_; + /** + * required bool executed = 1; + */ + public boolean hasExecuted() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required bool executed = 1; + */ + public boolean getExecuted() { + return executed_; + } + + private void initFields() { + executed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasExecuted()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, executed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, executed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData other = (org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData) obj; + + boolean result = true; + result = result && (hasExecuted() == other.hasExecuted()); + if (hasExecuted()) { + result = result && (getExecuted() + == other.getExecuted()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasExecuted()) { + hash = (37 * hash) + EXECUTED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExecuted()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code SequentialProcedureData} + * + *
+     **
+     * SequentialProcedure data
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureDataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_SequentialProcedureData_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_SequentialProcedureData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.class, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + executed_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.internal_static_SequentialProcedureData_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData build() { + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData result = new org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.executed_ = executed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.SequentialProcedureData.getDefaultInstance()) return this; + if (other.hasExecuted()) { + setExecuted(other.getExecuted()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasExecuted()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.g