Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id A7CD8200CE7 for ; Sat, 16 Sep 2017 18:34:40 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id A611C1609CC; Sat, 16 Sep 2017 16:34:40 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 2B8E61609D5 for ; Sat, 16 Sep 2017 18:34:38 +0200 (CEST) Received: (qmail 35382 invoked by uid 500); 16 Sep 2017 16:34:37 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 35306 invoked by uid 99); 16 Sep 2017 16:34:37 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 16 Sep 2017 16:34:37 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 00730F5616; Sat, 16 Sep 2017 16:34:36 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: tedyu@apache.org To: commits@hbase.apache.org Date: Sat, 16 Sep 2017 16:34:37 -0000 Message-Id: <078fc31facd74eb99cca492c814a50a5@git.apache.org> In-Reply-To: <8c85217baaad4707b312195172ad3281@git.apache.org> References: <8c85217baaad4707b312195172ad3281@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/3] hbase git commit: HBASE-18131 Add an hbase shell command to clear deadserver list in ServerManager archived-at: Sat, 16 Sep 2017 16:34:40 -0000 http://git-wip-us.apache.org/repos/asf/hbase/blob/90158955/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java index b3ae957..da111a4 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProtos.java @@ -60631,6 +60631,2507 @@ public final class MasterProtos { // @@protoc_insertion_point(class_scope:hbase.pb.SecurityCapabilitiesResponse) } + public interface ListDeadServersRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.ListDeadServersRequest} + */ + public static final class ListDeadServersRequest extends + com.google.protobuf.GeneratedMessage + implements ListDeadServersRequestOrBuilder { + // Use ListDeadServersRequest.newBuilder() to construct. + private ListDeadServersRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ListDeadServersRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ListDeadServersRequest defaultInstance; + public static ListDeadServersRequest getDefaultInstance() { + return defaultInstance; + } + + public ListDeadServersRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListDeadServersRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ListDeadServersRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListDeadServersRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ListDeadServersRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ListDeadServersRequest) + } + + static { + defaultInstance = new ListDeadServersRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ListDeadServersRequest) + } + + public interface ListDeadServersResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .hbase.pb.ServerName server_name = 1; + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + java.util.List + getServerNameList(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + int getServerNameCount(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + java.util.List + getServerNameOrBuilderList(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index); + } + /** + * Protobuf type {@code hbase.pb.ListDeadServersResponse} + */ + public static final class ListDeadServersResponse extends + com.google.protobuf.GeneratedMessage + implements ListDeadServersResponseOrBuilder { + // Use ListDeadServersResponse.newBuilder() to construct. + private ListDeadServersResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ListDeadServersResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ListDeadServersResponse defaultInstance; + public static ListDeadServersResponse getDefaultInstance() { + return defaultInstance; + } + + public ListDeadServersResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ListDeadServersResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + serverName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = java.util.Collections.unmodifiableList(serverName_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ListDeadServersResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ListDeadServersResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .hbase.pb.ServerName server_name = 1; + public static final int SERVER_NAME_FIELD_NUMBER = 1; + private java.util.List serverName_; + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List getServerNameList() { + return serverName_; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameOrBuilderList() { + return serverName_; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public int getServerNameCount() { + return serverName_.size(); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index) { + return serverName_.get(index); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index) { + return serverName_.get(index); + } + + private void initFields() { + serverName_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getServerNameCount(); i++) { + if (!getServerName(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < serverName_.size(); i++) { + output.writeMessage(1, serverName_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < serverName_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverName_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse) obj; + + boolean result = true; + result = result && getServerNameList() + .equals(other.getServerNameList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getServerNameCount() > 0) { + hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER; + hash = (53 * hash) + getServerNameList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ListDeadServersResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverNameBuilder_ == null) { + serverName_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + serverNameBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ListDeadServersResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse(this); + int from_bitField0_ = bitField0_; + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = java.util.Collections.unmodifiableList(serverName_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse.getDefaultInstance()) return this; + if (serverNameBuilder_ == null) { + if (!other.serverName_.isEmpty()) { + if (serverName_.isEmpty()) { + serverName_ = other.serverName_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureServerNameIsMutable(); + serverName_.addAll(other.serverName_); + } + onChanged(); + } + } else { + if (!other.serverName_.isEmpty()) { + if (serverNameBuilder_.isEmpty()) { + serverNameBuilder_.dispose(); + serverNameBuilder_ = null; + serverName_ = other.serverName_; + bitField0_ = (bitField0_ & ~0x00000001); + serverNameBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getServerNameFieldBuilder() : null; + } else { + serverNameBuilder_.addAllMessages(other.serverName_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getServerNameCount(); i++) { + if (!getServerName(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListDeadServersResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .hbase.pb.ServerName server_name = 1; + private java.util.List serverName_ = + java.util.Collections.emptyList(); + private void ensureServerNameIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = new java.util.ArrayList(serverName_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List getServerNameList() { + if (serverNameBuilder_ == null) { + return java.util.Collections.unmodifiableList(serverName_); + } else { + return serverNameBuilder_.getMessageList(); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public int getServerNameCount() { + if (serverNameBuilder_ == null) { + return serverName_.size(); + } else { + return serverNameBuilder_.getCount(); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index) { + if (serverNameBuilder_ == null) { + return serverName_.get(index); + } else { + return serverNameBuilder_.getMessage(index); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder setServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerNameIsMutable(); + serverName_.set(index, value); + onChanged(); + } else { + serverNameBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder setServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.set(index, builderForValue.build()); + onChanged(); + } else { + serverNameBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerNameIsMutable(); + serverName_.add(value); + onChanged(); + } else { + serverNameBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerNameIsMutable(); + serverName_.add(index, value); + onChanged(); + } else { + serverNameBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.add(builderForValue.build()); + onChanged(); + } else { + serverNameBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.add(index, builderForValue.build()); + onChanged(); + } else { + serverNameBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addAllServerName( + java.lang.Iterable values) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + super.addAll(values, serverName_); + onChanged(); + } else { + serverNameBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder removeServerName(int index) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.remove(index); + onChanged(); + } else { + serverNameBuilder_.remove(index); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder( + int index) { + return getServerNameFieldBuilder().getBuilder(index); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index) { + if (serverNameBuilder_ == null) { + return serverName_.get(index); } else { + return serverNameBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameOrBuilderList() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(serverName_); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addServerNameBuilder() { + return getServerNameFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addServerNameBuilder( + int index) { + return getServerNameFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameBuilderList() { + return getServerNameFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ListDeadServersResponse) + } + + static { + defaultInstance = new ListDeadServersResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ListDeadServersResponse) + } + + public interface ClearDeadServersRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .hbase.pb.ServerName server_name = 1; + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + java.util.List + getServerNameList(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + int getServerNameCount(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + java.util.List + getServerNameOrBuilderList(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index); + } + /** + * Protobuf type {@code hbase.pb.ClearDeadServersRequest} + */ + public static final class ClearDeadServersRequest extends + com.google.protobuf.GeneratedMessage + implements ClearDeadServersRequestOrBuilder { + // Use ClearDeadServersRequest.newBuilder() to construct. + private ClearDeadServersRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ClearDeadServersRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ClearDeadServersRequest defaultInstance; + public static ClearDeadServersRequest getDefaultInstance() { + return defaultInstance; + } + + public ClearDeadServersRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClearDeadServersRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + serverName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = java.util.Collections.unmodifiableList(serverName_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ClearDeadServersRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClearDeadServersRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .hbase.pb.ServerName server_name = 1; + public static final int SERVER_NAME_FIELD_NUMBER = 1; + private java.util.List serverName_; + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List getServerNameList() { + return serverName_; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameOrBuilderList() { + return serverName_; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public int getServerNameCount() { + return serverName_.size(); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index) { + return serverName_.get(index); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index) { + return serverName_.get(index); + } + + private void initFields() { + serverName_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getServerNameCount(); i++) { + if (!getServerName(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < serverName_.size(); i++) { + output.writeMessage(1, serverName_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < serverName_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverName_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest) obj; + + boolean result = true; + result = result && getServerNameList() + .equals(other.getServerNameList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getServerNameCount() > 0) { + hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER; + hash = (53 * hash) + getServerNameList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ClearDeadServersRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverNameBuilder_ == null) { + serverName_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + serverNameBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest build() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest(this); + int from_bitField0_ = bitField0_; + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = java.util.Collections.unmodifiableList(serverName_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest.getDefaultInstance()) return this; + if (serverNameBuilder_ == null) { + if (!other.serverName_.isEmpty()) { + if (serverName_.isEmpty()) { + serverName_ = other.serverName_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureServerNameIsMutable(); + serverName_.addAll(other.serverName_); + } + onChanged(); + } + } else { + if (!other.serverName_.isEmpty()) { + if (serverNameBuilder_.isEmpty()) { + serverNameBuilder_.dispose(); + serverNameBuilder_ = null; + serverName_ = other.serverName_; + bitField0_ = (bitField0_ & ~0x00000001); + serverNameBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getServerNameFieldBuilder() : null; + } else { + serverNameBuilder_.addAllMessages(other.serverName_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getServerNameCount(); i++) { + if (!getServerName(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // repeated .hbase.pb.ServerName server_name = 1; + private java.util.List serverName_ = + java.util.Collections.emptyList(); + private void ensureServerNameIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = new java.util.ArrayList(serverName_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List getServerNameList() { + if (serverNameBuilder_ == null) { + return java.util.Collections.unmodifiableList(serverName_); + } else { + return serverNameBuilder_.getMessageList(); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public int getServerNameCount() { + if (serverNameBuilder_ == null) { + return serverName_.size(); + } else { + return serverNameBuilder_.getCount(); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index) { + if (serverNameBuilder_ == null) { + return serverName_.get(index); + } else { + return serverNameBuilder_.getMessage(index); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder setServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerNameIsMutable(); + serverName_.set(index, value); + onChanged(); + } else { + serverNameBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder setServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.set(index, builderForValue.build()); + onChanged(); + } else { + serverNameBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerNameIsMutable(); + serverName_.add(value); + onChanged(); + } else { + serverNameBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerNameIsMutable(); + serverName_.add(index, value); + onChanged(); + } else { + serverNameBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.add(builderForValue.build()); + onChanged(); + } else { + serverNameBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addServerName( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.add(index, builderForValue.build()); + onChanged(); + } else { + serverNameBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder addAllServerName( + java.lang.Iterable values) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + super.addAll(values, serverName_); + onChanged(); + } else { + serverNameBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public Builder removeServerName(int index) { + if (serverNameBuilder_ == null) { + ensureServerNameIsMutable(); + serverName_.remove(index); + onChanged(); + } else { + serverNameBuilder_.remove(index); + } + return this; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder( + int index) { + return getServerNameFieldBuilder().getBuilder(index); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index) { + if (serverNameBuilder_ == null) { + return serverName_.get(index); } else { + return serverNameBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameOrBuilderList() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(serverName_); + } + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addServerNameBuilder() { + return getServerNameFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addServerNameBuilder( + int index) { + return getServerNameFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameBuilderList() { + return getServerNameFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ClearDeadServersRequest) + } + + static { + defaultInstance = new ClearDeadServersRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ClearDeadServersRequest) + } + + public interface ClearDeadServersResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .hbase.pb.ServerName server_name = 1; + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + java.util.List + getServerNameList(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + int getServerNameCount(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + java.util.List + getServerNameOrBuilderList(); + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index); + } + /** + * Protobuf type {@code hbase.pb.ClearDeadServersResponse} + */ + public static final class ClearDeadServersResponse extends + com.google.protobuf.GeneratedMessage + implements ClearDeadServersResponseOrBuilder { + // Use ClearDeadServersResponse.newBuilder() to construct. + private ClearDeadServersResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ClearDeadServersResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ClearDeadServersResponse defaultInstance; + public static ClearDeadServersResponse getDefaultInstance() { + return defaultInstance; + } + + public ClearDeadServersResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ClearDeadServersResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + serverName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { + serverName_ = java.util.Collections.unmodifiableList(serverName_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public ClearDeadServersResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ClearDeadServersResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + // repeated .hbase.pb.ServerName server_name = 1; + public static final int SERVER_NAME_FIELD_NUMBER = 1; + private java.util.List serverName_; + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List getServerNameList() { + return serverName_; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public java.util.List + getServerNameOrBuilderList() { + return serverName_; + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public int getServerNameCount() { + return serverName_.size(); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(int index) { + return serverName_.get(index); + } + /** + * repeated .hbase.pb.ServerName server_name = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder( + int index) { + return serverName_.get(index); + } + + private void initFields() { + serverName_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getServerNameCount(); i++) { + if (!getServerName(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < serverName_.size(); i++) { + output.writeMessage(1, serverName_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < serverName_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverName_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse) obj; + + boolean result = true; + result = result && getServerNameList() + .equals(other.getServerNameList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getServerNameCount() > 0) { + hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER; + hash = (53 * hash) + getServerNameList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ClearDeadServersResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_hbase_pb_ClearDeadServersResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ClearDeadServersResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent par