Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id A9D88200B91 for ; Thu, 29 Sep 2016 21:37:19 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id A693A160AF1; Thu, 29 Sep 2016 19:37:19 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 5E05D160AE8 for ; Thu, 29 Sep 2016 21:37:17 +0200 (CEST) Received: (qmail 11143 invoked by uid 500); 29 Sep 2016 19:37:16 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 10661 invoked by uid 99); 29 Sep 2016 19:37:16 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 29 Sep 2016 19:37:16 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id BF602EEE22; Thu, 29 Sep 2016 19:37:15 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: stack@apache.org To: commits@hbase.apache.org Date: Thu, 29 Sep 2016 19:37:19 -0000 Message-Id: <9e59cbf1f304438fbe293a774d5e25c6@git.apache.org> In-Reply-To: <7ac0ae966da84a9f878a0e8d498da8a0@git.apache.org> References: <7ac0ae966da84a9f878a0e8d498da8a0@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [05/51] [partial] hbase git commit: HBASE-16264 Figure how to deal with endpoints and shaded pb Shade our protobufs. Do it in a manner that makes it so we can still have in our API references to com.google.protobuf (and in REST). The c.g.p in API is for archived-at: Thu, 29 Sep 2016 19:37:19 -0000 http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java new file mode 100644 index 0000000..63553af --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java @@ -0,0 +1,9239 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RegionServerStatus.proto + +package org.apache.hadoop.hbase.shaded.protobuf.generated; + +public final class RegionServerStatusProtos { + private RegionServerStatusProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RegionServerStartupRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint32 port = 1; + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ + boolean hasPort(); + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ + int getPort(); + + // required uint64 server_start_code = 2; + /** + * required uint64 server_start_code = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ + boolean hasServerStartCode(); + /** + * required uint64 server_start_code = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ + long getServerStartCode(); + + // required uint64 server_current_time = 3; + /** + * required uint64 server_current_time = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ + boolean hasServerCurrentTime(); + /** + * required uint64 server_current_time = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ + long getServerCurrentTime(); + + // optional string use_this_hostname_instead = 4; + /** + * optional string use_this_hostname_instead = 4; + * + *
+     ** hostname for region server, optional 
+     * 
+ */ + boolean hasUseThisHostnameInstead(); + /** + * optional string use_this_hostname_instead = 4; + * + *
+     ** hostname for region server, optional 
+     * 
+ */ + java.lang.String getUseThisHostnameInstead(); + /** + * optional string use_this_hostname_instead = 4; + * + *
+     ** hostname for region server, optional 
+     * 
+ */ + com.google.protobuf.ByteString + getUseThisHostnameInsteadBytes(); + } + /** + * Protobuf type {@code hbase.pb.RegionServerStartupRequest} + */ + public static final class RegionServerStartupRequest extends + com.google.protobuf.GeneratedMessage + implements RegionServerStartupRequestOrBuilder { + // Use RegionServerStartupRequest.newBuilder() to construct. + private RegionServerStartupRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RegionServerStartupRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RegionServerStartupRequest defaultInstance; + public static RegionServerStartupRequest getDefaultInstance() { + return defaultInstance; + } + + public RegionServerStartupRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerStartupRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + port_ = input.readUInt32(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + serverStartCode_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + serverCurrentTime_ = input.readUInt64(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + useThisHostnameInstead_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerStartupRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerStartupRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required uint32 port = 1; + public static final int PORT_FIELD_NUMBER = 1; + private int port_; + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ + public boolean hasPort() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint32 port = 1; + * + *
+     ** Port number this regionserver is up on 
+     * 
+ */ + public int getPort() { + return port_; + } + + // required uint64 server_start_code = 2; + public static final int SERVER_START_CODE_FIELD_NUMBER = 2; + private long serverStartCode_; + /** + * required uint64 server_start_code = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ + public boolean hasServerStartCode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required uint64 server_start_code = 2; + * + *
+     ** This servers' startcode 
+     * 
+ */ + public long getServerStartCode() { + return serverStartCode_; + } + + // required uint64 server_current_time = 3; + public static final int SERVER_CURRENT_TIME_FIELD_NUMBER = 3; + private long serverCurrentTime_; + /** + * required uint64 server_current_time = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ + public boolean hasServerCurrentTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required uint64 server_current_time = 3; + * + *
+     ** Current time of the region server in ms 
+     * 
+ */ + public long getServerCurrentTime() { + return serverCurrentTime_; + } + + // optional string use_this_hostname_instead = 4; + public static final int USE_THIS_HOSTNAME_INSTEAD_FIELD_NUMBER = 4; + private java.lang.Object useThisHostnameInstead_; + /** + * optional string use_this_hostname_instead = 4; + * + *
+     ** hostname for region server, optional 
+     * 
+ */ + public boolean hasUseThisHostnameInstead() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+     ** hostname for region server, optional 
+     * 
+ */ + public java.lang.String getUseThisHostnameInstead() { + java.lang.Object ref = useThisHostnameInstead_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + useThisHostnameInstead_ = s; + } + return s; + } + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+     ** hostname for region server, optional 
+     * 
+ */ + public com.google.protobuf.ByteString + getUseThisHostnameInsteadBytes() { + java.lang.Object ref = useThisHostnameInstead_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + useThisHostnameInstead_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + port_ = 0; + serverStartCode_ = 0L; + serverCurrentTime_ = 0L; + useThisHostnameInstead_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasPort()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerStartCode()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasServerCurrentTime()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(1, port_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, serverStartCode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, serverCurrentTime_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getUseThisHostnameInsteadBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, port_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, serverStartCode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, serverCurrentTime_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getUseThisHostnameInsteadBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) obj; + + boolean result = true; + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasServerStartCode() == other.hasServerStartCode()); + if (hasServerStartCode()) { + result = result && (getServerStartCode() + == other.getServerStartCode()); + } + result = result && (hasServerCurrentTime() == other.hasServerCurrentTime()); + if (hasServerCurrentTime()) { + result = result && (getServerCurrentTime() + == other.getServerCurrentTime()); + } + result = result && (hasUseThisHostnameInstead() == other.hasUseThisHostnameInstead()); + if (hasUseThisHostnameInstead()) { + result = result && getUseThisHostnameInstead() + .equals(other.getUseThisHostnameInstead()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasServerStartCode()) { + hash = (37 * hash) + SERVER_START_CODE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getServerStartCode()); + } + if (hasServerCurrentTime()) { + hash = (37 * hash) + SERVER_CURRENT_TIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getServerCurrentTime()); + } + if (hasUseThisHostnameInstead()) { + hash = (37 * hash) + USE_THIS_HOSTNAME_INSTEAD_FIELD_NUMBER; + hash = (53 * hash) + getUseThisHostnameInstead().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.RegionServerStartupRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000001); + serverStartCode_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + serverCurrentTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + useThisHostnameInstead_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupRequest_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.serverStartCode_ = serverStartCode_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.serverCurrentTime_ = serverCurrentTime_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.useThisHostnameInstead_ = useThisHostnameInstead_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest.getDefaultInstance()) return this; + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasServerStartCode()) { + setServerStartCode(other.getServerStartCode()); + } + if (other.hasServerCurrentTime()) { + setServerCurrentTime(other.getServerCurrentTime()); + } + if (other.hasUseThisHostnameInstead()) { + bitField0_ |= 0x00000008; + useThisHostnameInstead_ = other.useThisHostnameInstead_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasPort()) { + + return false; + } + if (!hasServerStartCode()) { + + return false; + } + if (!hasServerCurrentTime()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required uint32 port = 1; + private int port_ ; + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ + public boolean hasPort() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ + public int getPort() { + return port_; + } + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ + public Builder setPort(int value) { + bitField0_ |= 0x00000001; + port_ = value; + onChanged(); + return this; + } + /** + * required uint32 port = 1; + * + *
+       ** Port number this regionserver is up on 
+       * 
+ */ + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000001); + port_ = 0; + onChanged(); + return this; + } + + // required uint64 server_start_code = 2; + private long serverStartCode_ ; + /** + * required uint64 server_start_code = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ + public boolean hasServerStartCode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required uint64 server_start_code = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ + public long getServerStartCode() { + return serverStartCode_; + } + /** + * required uint64 server_start_code = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ + public Builder setServerStartCode(long value) { + bitField0_ |= 0x00000002; + serverStartCode_ = value; + onChanged(); + return this; + } + /** + * required uint64 server_start_code = 2; + * + *
+       ** This servers' startcode 
+       * 
+ */ + public Builder clearServerStartCode() { + bitField0_ = (bitField0_ & ~0x00000002); + serverStartCode_ = 0L; + onChanged(); + return this; + } + + // required uint64 server_current_time = 3; + private long serverCurrentTime_ ; + /** + * required uint64 server_current_time = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ + public boolean hasServerCurrentTime() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required uint64 server_current_time = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ + public long getServerCurrentTime() { + return serverCurrentTime_; + } + /** + * required uint64 server_current_time = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ + public Builder setServerCurrentTime(long value) { + bitField0_ |= 0x00000004; + serverCurrentTime_ = value; + onChanged(); + return this; + } + /** + * required uint64 server_current_time = 3; + * + *
+       ** Current time of the region server in ms 
+       * 
+ */ + public Builder clearServerCurrentTime() { + bitField0_ = (bitField0_ & ~0x00000004); + serverCurrentTime_ = 0L; + onChanged(); + return this; + } + + // optional string use_this_hostname_instead = 4; + private java.lang.Object useThisHostnameInstead_ = ""; + /** + * optional string use_this_hostname_instead = 4; + * + *
+       ** hostname for region server, optional 
+       * 
+ */ + public boolean hasUseThisHostnameInstead() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+       ** hostname for region server, optional 
+       * 
+ */ + public java.lang.String getUseThisHostnameInstead() { + java.lang.Object ref = useThisHostnameInstead_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + useThisHostnameInstead_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+       ** hostname for region server, optional 
+       * 
+ */ + public com.google.protobuf.ByteString + getUseThisHostnameInsteadBytes() { + java.lang.Object ref = useThisHostnameInstead_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + useThisHostnameInstead_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+       ** hostname for region server, optional 
+       * 
+ */ + public Builder setUseThisHostnameInstead( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + useThisHostnameInstead_ = value; + onChanged(); + return this; + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+       ** hostname for region server, optional 
+       * 
+ */ + public Builder clearUseThisHostnameInstead() { + bitField0_ = (bitField0_ & ~0x00000008); + useThisHostnameInstead_ = getDefaultInstance().getUseThisHostnameInstead(); + onChanged(); + return this; + } + /** + * optional string use_this_hostname_instead = 4; + * + *
+       ** hostname for region server, optional 
+       * 
+ */ + public Builder setUseThisHostnameInsteadBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + useThisHostnameInstead_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerStartupRequest) + } + + static { + defaultInstance = new RegionServerStartupRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStartupRequest) + } + + public interface RegionServerStartupResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .hbase.pb.NameStringPair map_entries = 1; + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + java.util.List + getMapEntriesList(); + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index); + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + int getMapEntriesCount(); + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + java.util.List + getMapEntriesOrBuilderList(); + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( + int index); + } + /** + * Protobuf type {@code hbase.pb.RegionServerStartupResponse} + */ + public static final class RegionServerStartupResponse extends + com.google.protobuf.GeneratedMessage + implements RegionServerStartupResponseOrBuilder { + // Use RegionServerStartupResponse.newBuilder() to construct. + private RegionServerStartupResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RegionServerStartupResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RegionServerStartupResponse defaultInstance; + public static RegionServerStartupResponse getDefaultInstance() { + return defaultInstance; + } + + public RegionServerStartupResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerStartupResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + mapEntries_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerStartupResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerStartupResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .hbase.pb.NameStringPair map_entries = 1; + public static final int MAP_ENTRIES_FIELD_NUMBER = 1; + private java.util.List mapEntries_; + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + public java.util.List getMapEntriesList() { + return mapEntries_; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + public java.util.List + getMapEntriesOrBuilderList() { + return mapEntries_; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + public int getMapEntriesCount() { + return mapEntries_.size(); + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { + return mapEntries_.get(index); + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+     **
+     * Configuration for the regionserver to use: e.g. filesystem,
+     * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+     * etc
+     * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( + int index) { + return mapEntries_.get(index); + } + + private void initFields() { + mapEntries_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getMapEntriesCount(); i++) { + if (!getMapEntries(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < mapEntries_.size(); i++) { + output.writeMessage(1, mapEntries_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < mapEntries_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, mapEntries_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) obj; + + boolean result = true; + result = result && getMapEntriesList() + .equals(other.getMapEntriesList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getMapEntriesCount() > 0) { + hash = (37 * hash) + MAP_ENTRIES_FIELD_NUMBER; + hash = (53 * hash) + getMapEntriesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.RegionServerStartupResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getMapEntriesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (mapEntriesBuilder_ == null) { + mapEntries_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + mapEntriesBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerStartupResponse_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse result = new org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse(this); + int from_bitField0_ = bitField0_; + if (mapEntriesBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = java.util.Collections.unmodifiableList(mapEntries_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.mapEntries_ = mapEntries_; + } else { + result.mapEntries_ = mapEntriesBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse.getDefaultInstance()) return this; + if (mapEntriesBuilder_ == null) { + if (!other.mapEntries_.isEmpty()) { + if (mapEntries_.isEmpty()) { + mapEntries_ = other.mapEntries_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureMapEntriesIsMutable(); + mapEntries_.addAll(other.mapEntries_); + } + onChanged(); + } + } else { + if (!other.mapEntries_.isEmpty()) { + if (mapEntriesBuilder_.isEmpty()) { + mapEntriesBuilder_.dispose(); + mapEntriesBuilder_ = null; + mapEntries_ = other.mapEntries_; + bitField0_ = (bitField0_ & ~0x00000001); + mapEntriesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getMapEntriesFieldBuilder() : null; + } else { + mapEntriesBuilder_.addAllMessages(other.mapEntries_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getMapEntriesCount(); i++) { + if (!getMapEntries(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .hbase.pb.NameStringPair map_entries = 1; + private java.util.List mapEntries_ = + java.util.Collections.emptyList(); + private void ensureMapEntriesIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + mapEntries_ = new java.util.ArrayList(mapEntries_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> mapEntriesBuilder_; + + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public java.util.List getMapEntriesList() { + if (mapEntriesBuilder_ == null) { + return java.util.Collections.unmodifiableList(mapEntries_); + } else { + return mapEntriesBuilder_.getMessageList(); + } + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public int getMapEntriesCount() { + if (mapEntriesBuilder_ == null) { + return mapEntries_.size(); + } else { + return mapEntriesBuilder_.getCount(); + } + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair getMapEntries(int index) { + if (mapEntriesBuilder_ == null) { + return mapEntries_.get(index); + } else { + return mapEntriesBuilder_.getMessage(index); + } + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder setMapEntries( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { + if (mapEntriesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntriesIsMutable(); + mapEntries_.set(index, value); + onChanged(); + } else { + mapEntriesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder setMapEntries( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.set(index, builderForValue.build()); + onChanged(); + } else { + mapEntriesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder addMapEntries(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { + if (mapEntriesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntriesIsMutable(); + mapEntries_.add(value); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder addMapEntries( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair value) { + if (mapEntriesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureMapEntriesIsMutable(); + mapEntries_.add(index, value); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder addMapEntries( + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.add(builderForValue.build()); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder addMapEntries( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.add(index, builderForValue.build()); + onChanged(); + } else { + mapEntriesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder addAllMapEntries( + java.lang.Iterable values) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + super.addAll(values, mapEntries_); + onChanged(); + } else { + mapEntriesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder clearMapEntries() { + if (mapEntriesBuilder_ == null) { + mapEntries_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + mapEntriesBuilder_.clear(); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public Builder removeMapEntries(int index) { + if (mapEntriesBuilder_ == null) { + ensureMapEntriesIsMutable(); + mapEntries_.remove(index); + onChanged(); + } else { + mapEntriesBuilder_.remove(index); + } + return this; + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder getMapEntriesBuilder( + int index) { + return getMapEntriesFieldBuilder().getBuilder(index); + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getMapEntriesOrBuilder( + int index) { + if (mapEntriesBuilder_ == null) { + return mapEntries_.get(index); } else { + return mapEntriesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public java.util.List + getMapEntriesOrBuilderList() { + if (mapEntriesBuilder_ != null) { + return mapEntriesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(mapEntries_); + } + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder() { + return getMapEntriesFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder addMapEntriesBuilder( + int index) { + return getMapEntriesFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); + } + /** + * repeated .hbase.pb.NameStringPair map_entries = 1; + * + *
+       **
+       * Configuration for the regionserver to use: e.g. filesystem,
+       * hbase rootdir, the hostname to use creating the RegionServer ServerName,
+       * etc
+       * 
+ */ + public java.util.List + getMapEntriesBuilderList() { + return getMapEntriesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> + getMapEntriesFieldBuilder() { + if (mapEntriesBuilder_ == null) { + mapEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( + mapEntries_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + mapEntries_ = null; + } + return mapEntriesBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionServerStartupResponse) + } + + static { + defaultInstance = new RegionServerStartupResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.RegionServerStartupResponse) + } + + public interface RegionServerReportRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .hbase.pb.ServerName server = 1; + /** + * required .hbase.pb.ServerName server = 1; + */ + boolean hasServer(); + /** + * required .hbase.pb.ServerName server = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer(); + /** + * required .hbase.pb.ServerName server = 1; + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); + + // optional .hbase.pb.ServerLoad load = 2; + /** + * optional .hbase.pb.ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ + boolean hasLoad(); + /** + * optional .hbase.pb.ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad(); + /** + * optional .hbase.pb.ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder(); + } + /** + * Protobuf type {@code hbase.pb.RegionServerReportRequest} + */ + public static final class RegionServerReportRequest extends + com.google.protobuf.GeneratedMessage + implements RegionServerReportRequestOrBuilder { + // Use RegionServerReportRequest.newBuilder() to construct. + private RegionServerReportRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RegionServerReportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RegionServerReportRequest defaultInstance; + public static RegionServerReportRequest getDefaultInstance() { + return defaultInstance; + } + + public RegionServerReportRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RegionServerReportRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = server_.toBuilder(); + } + server_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(server_); + server_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.Builder subBuilder = null; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + subBuilder = load_.toBuilder(); + } + load_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(load_); + load_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000002; + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RegionServerReportRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionServerReportRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .hbase.pb.ServerName server = 1; + public static final int SERVER_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName server_; + /** + * required .hbase.pb.ServerName server = 1; + */ + public boolean hasServer() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.ServerName server = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServer() { + return server_; + } + /** + * required .hbase.pb.ServerName server = 1; + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { + return server_; + } + + // optional .hbase.pb.ServerLoad load = 2; + public static final int LOAD_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad load_; + /** + * optional .hbase.pb.ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ + public boolean hasLoad() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional .hbase.pb.ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad getLoad() { + return load_; + } + /** + * optional .hbase.pb.ServerLoad load = 2; + * + *
+     ** load the server is under 
+     * 
+ */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoadOrBuilder getLoadOrBuilder() { + return load_; + } + + private void initFields() { + server_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + load_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServer()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServer().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasLoad()) { + if (!getLoad().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, load_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, server_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, load_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest other = (org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest) obj; + + boolean result = true; + result = result && (hasServer() == other.hasServer()); + if (hasServer()) { + result = result && getServer() + .equals(other.getServer()); + } + result = result && (hasLoad() == other.hasLoad()); + if (hasLoad()) { + result = result && getLoad() + .equals(other.getLoad()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServer()) { + hash = (37 * hash) + SERVER_FIELD_NUMBER; + hash = (53 * hash) + getServer().hashCode(); + } + if (hasLoad()) { + hash = (37 * hash) + LOAD_FIELD_NUMBER; + hash = (53 * hash) + getLoad().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.RegionServerReportRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.internal_static_hbase_pb_RegionServerReportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.class, org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + +